Repository: locationtech/geowave Branch: master Commit: 998f3e4af31a Files: 3163 Total size: 13.7 MB Directory structure: gitextract_mqqhxs54/ ├── .gitattributes ├── .github/ │ └── workflows/ │ ├── publish.yml │ └── test.yml ├── .gitignore ├── .utility/ │ ├── .maven.xml │ ├── build-dev-resources.sh │ ├── build-python-docs.sh │ ├── publish-artifacts.sh │ ├── publish-docs.sh │ ├── retry │ ├── run-python-tests.sh │ └── run-tests.sh ├── CONTRIBUTING.md ├── LICENSE ├── NOTICE ├── README.md ├── analytics/ │ ├── api/ │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── locationtech/ │ │ │ │ └── geowave/ │ │ │ │ └── analytic/ │ │ │ │ ├── AdapterWithObjectWritable.java │ │ │ │ ├── AnalyticFeature.java │ │ │ │ ├── AnalyticItemWrapper.java │ │ │ │ ├── AnalyticItemWrapperFactory.java │ │ │ │ ├── AnalyticPersistableRegistry.java │ │ │ │ ├── GeoObjectDimensionValues.java │ │ │ │ ├── GeometryCalculations.java │ │ │ │ ├── GeometryDataSetGenerator.java │ │ │ │ ├── GeometryHullTool.java │ │ │ │ ├── IndependentJobRunner.java │ │ │ │ ├── Projection.java │ │ │ │ ├── PropertyManagement.java │ │ │ │ ├── ScopedJobConfiguration.java │ │ │ │ ├── SerializableAdapterStore.java │ │ │ │ ├── ShapefileTool.java │ │ │ │ ├── SimpleFeatureItemWrapperFactory.java │ │ │ │ ├── SimpleFeatureProjection.java │ │ │ │ ├── clustering/ │ │ │ │ │ ├── CentroidItemWrapperFactory.java │ │ │ │ │ ├── CentroidManager.java │ │ │ │ │ ├── CentroidManagerGeoWave.java │ │ │ │ │ ├── CentroidPairing.java │ │ │ │ │ ├── ClusteringUtils.java │ │ │ │ │ ├── DistortionGroupManagement.java │ │ │ │ │ ├── LongCentroid.java │ │ │ │ │ ├── NeighborData.java │ │ │ │ │ ├── NestedGroupCentroidAssignment.java │ │ │ │ │ └── exception/ │ │ │ │ │ └── MatchingCentroidNotFoundException.java │ │ │ │ ├── distance/ │ │ │ │ │ ├── CoordinateCircleDistanceFn.java │ │ │ │ │ ├── CoordinateCosineDistanceFn.java │ │ │ │ │ ├── CoordinateEuclideanDistanceFn.java │ │ │ │ │ ├── DistanceFn.java │ │ │ │ │ ├── FeatureCentroidDistanceFn.java │ │ │ │ │ ├── FeatureCentroidOrthodromicDistanceFn.java │ │ │ │ │ ├── FeatureDistanceFn.java │ │ │ │ │ ├── FeatureGeometryDistanceFn.java │ │ │ │ │ └── GeometryCentroidDistanceFn.java │ │ │ │ ├── extract/ │ │ │ │ │ ├── CentroidExtractor.java │ │ │ │ │ ├── DimensionExtractor.java │ │ │ │ │ ├── EmptyDimensionExtractor.java │ │ │ │ │ ├── SimpleFeatureCentroidExtractor.java │ │ │ │ │ ├── SimpleFeatureGeometryExtractor.java │ │ │ │ │ ├── SimpleFeatureInteriorPointExtractor.java │ │ │ │ │ └── TimeDimensionExtractor.java │ │ │ │ ├── kmeans/ │ │ │ │ │ ├── AssociationNotification.java │ │ │ │ │ ├── CentroidAssociationFn.java │ │ │ │ │ └── serial/ │ │ │ │ │ ├── AnalyticStats.java │ │ │ │ │ ├── KMeansParallelInitialize.java │ │ │ │ │ └── StatsMap.java │ │ │ │ ├── kryo/ │ │ │ │ │ ├── FeatureSerializer.java │ │ │ │ │ ├── GridCoverageWritableSerializer.java │ │ │ │ │ └── PersistableSerializer.java │ │ │ │ ├── model/ │ │ │ │ │ ├── IndexModelBuilder.java │ │ │ │ │ └── SpatialIndexModelBuilder.java │ │ │ │ ├── nn/ │ │ │ │ │ ├── DefaultNeighborList.java │ │ │ │ │ ├── DistanceProfile.java │ │ │ │ │ ├── DistanceProfileGenerateFn.java │ │ │ │ │ ├── NNProcessor.java │ │ │ │ │ ├── NeighborIndex.java │ │ │ │ │ ├── NeighborList.java │ │ │ │ │ ├── NeighborListFactory.java │ │ │ │ │ ├── NullList.java │ │ │ │ │ └── TypeConverter.java │ │ │ │ ├── param/ │ │ │ │ │ ├── BasicParameterHelper.java │ │ │ │ │ ├── CentroidParameters.java │ │ │ │ │ ├── ClusteringParameters.java │ │ │ │ │ ├── CommonParameters.java │ │ │ │ │ ├── ExtractParameters.java │ │ │ │ │ ├── FormatConfiguration.java │ │ │ │ │ ├── GlobalParameters.java │ │ │ │ │ ├── GroupParameterEnum.java │ │ │ │ │ ├── HullParameters.java │ │ │ │ │ ├── InputParameters.java │ │ │ │ │ ├── InputStoreParameterHelper.java │ │ │ │ │ ├── JumpParameters.java │ │ │ │ │ ├── MapReduceParameters.java │ │ │ │ │ ├── OutputParameters.java │ │ │ │ │ ├── OutputStoreParameterHelper.java │ │ │ │ │ ├── ParameterEnum.java │ │ │ │ │ ├── ParameterHelper.java │ │ │ │ │ ├── PartitionParameters.java │ │ │ │ │ ├── SampleParameters.java │ │ │ │ │ ├── StoreParameters.java │ │ │ │ │ └── annotations/ │ │ │ │ │ ├── CentroidParameter.java │ │ │ │ │ ├── ClusteringParameter.java │ │ │ │ │ ├── CommonParameter.java │ │ │ │ │ ├── ExtractParameter.java │ │ │ │ │ ├── GlobalParameter.java │ │ │ │ │ ├── HullParameter.java │ │ │ │ │ ├── InputParameter.java │ │ │ │ │ ├── JumpParameter.java │ │ │ │ │ ├── MapReduceParameter.java │ │ │ │ │ ├── OutputParameter.java │ │ │ │ │ ├── PartitionParameter.java │ │ │ │ │ └── SampleParameter.java │ │ │ │ ├── partitioner/ │ │ │ │ │ ├── AbstractPartitioner.java │ │ │ │ │ ├── BoundaryPartitioner.java │ │ │ │ │ ├── OrthodromicDistancePartitioner.java │ │ │ │ │ └── Partitioner.java │ │ │ │ ├── sample/ │ │ │ │ │ ├── BahmanEtAlSampleProbabilityFn.java │ │ │ │ │ ├── RandomProbabilitySampleFn.java │ │ │ │ │ ├── SampleNotification.java │ │ │ │ │ ├── SampleProbabilityFn.java │ │ │ │ │ ├── Sampler.java │ │ │ │ │ └── function/ │ │ │ │ │ ├── CentroidDistanceBasedSamplingRankFunction.java │ │ │ │ │ ├── RandomSamplingRankFunction.java │ │ │ │ │ └── SamplingRankFunction.java │ │ │ │ └── store/ │ │ │ │ └── PersistableStore.java │ │ │ └── resources/ │ │ │ └── META-INF/ │ │ │ └── services/ │ │ │ └── org.locationtech.geowave.core.index.persist.PersistableRegistrySpi │ │ └── test/ │ │ └── java/ │ │ └── org/ │ │ └── locationtech/ │ │ └── geowave/ │ │ └── analytic/ │ │ ├── AnalyticFeatureTest.java │ │ ├── GeometryCalculationsTest.java │ │ ├── GeometryDataSetGeneratorTest.java │ │ ├── GeometryGenerator.java │ │ ├── GeometryHullToolTest.java │ │ ├── PropertyManagementTest.java │ │ ├── SerializableAdapterStoreTest.java │ │ ├── SimpleFeatureCentroidExractorTest.java │ │ ├── clustering/ │ │ │ ├── CentroidManagerTest.java │ │ │ ├── DistortionGroupManagementTest.java │ │ │ └── NestedGroupCentroidAssignmentTest.java │ │ ├── distance/ │ │ │ ├── CoordinateCircleDistanceFnTest.java │ │ │ └── FeatureDistanceFnTest.java │ │ ├── kmeans/ │ │ │ ├── CentroidAssociationFnTest.java │ │ │ └── KMeansParallelInitializeTest.java │ │ ├── kryo/ │ │ │ └── FeatureSerializationTest.java │ │ ├── nn/ │ │ │ └── NNProcessorTest.java │ │ └── partitioner/ │ │ ├── BoundaryDistancePartitionerTest.java │ │ └── OrthodromicDistancePartitionerTest.java │ ├── mapreduce/ │ │ ├── .gitignore │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── locationtech/ │ │ │ │ └── geowave/ │ │ │ │ └── analytic/ │ │ │ │ └── mapreduce/ │ │ │ │ ├── CountofDoubleWritable.java │ │ │ │ ├── DoubleOutputFormat.java │ │ │ │ ├── GeoWaveAnalyticJobRunner.java │ │ │ │ ├── GeoWaveInputFormatConfiguration.java │ │ │ │ ├── GeoWaveOutputFormatConfiguration.java │ │ │ │ ├── GroupIDText.java │ │ │ │ ├── HadoopOptions.java │ │ │ │ ├── MapReduceIntegration.java │ │ │ │ ├── MapReduceJobController.java │ │ │ │ ├── MapReduceJobRunner.java │ │ │ │ ├── SequenceFileInputFormatConfiguration.java │ │ │ │ ├── SequenceFileOutputFormatConfiguration.java │ │ │ │ ├── ToolRunnerMapReduceIntegration.java │ │ │ │ ├── clustering/ │ │ │ │ │ ├── ConvexHullMapReduce.java │ │ │ │ │ ├── GroupAssignmentMapReduce.java │ │ │ │ │ ├── InputToOutputKeyReducer.java │ │ │ │ │ ├── SimpleFeatureOutputReducer.java │ │ │ │ │ └── runner/ │ │ │ │ │ ├── AnalyticJobRunner.java │ │ │ │ │ ├── ClusteringRunner.java │ │ │ │ │ ├── ConvexHullJobRunner.java │ │ │ │ │ ├── GeoWaveAnalyticExtractJobRunner.java │ │ │ │ │ ├── GeoWaveInputLoadJobRunner.java │ │ │ │ │ ├── GroupAssigmentJobRunner.java │ │ │ │ │ ├── MultiLevelClusteringJobRunner.java │ │ │ │ │ ├── MultiLevelJumpKMeansClusteringJobRunner.java │ │ │ │ │ └── MultiLevelKMeansClusteringJobRunner.java │ │ │ │ ├── dbscan/ │ │ │ │ │ ├── Cluster.java │ │ │ │ │ ├── ClusterItem.java │ │ │ │ │ ├── ClusterItemDistanceFn.java │ │ │ │ │ ├── ClusterNeighborList.java │ │ │ │ │ ├── ClusterUnionList.java │ │ │ │ │ ├── DBScanClusterList.java │ │ │ │ │ ├── DBScanIterationsJobRunner.java │ │ │ │ │ ├── DBScanJobRunner.java │ │ │ │ │ ├── DBScanMapReduce.java │ │ │ │ │ ├── PreProcessSingleItemClusterList.java │ │ │ │ │ └── SingleItemClusterList.java │ │ │ │ ├── kde/ │ │ │ │ │ ├── CellCounter.java │ │ │ │ │ ├── CellSummationCombiner.java │ │ │ │ │ ├── CellSummationReducer.java │ │ │ │ │ ├── DoubleLevelPartitioner.java │ │ │ │ │ ├── GaussianCellMapper.java │ │ │ │ │ ├── GaussianFilter.java │ │ │ │ │ ├── IdentityMapper.java │ │ │ │ │ ├── KDECommandLineOptions.java │ │ │ │ │ ├── KDEJobRunner.java │ │ │ │ │ ├── KDEReducer.java │ │ │ │ │ ├── LevelPartitioner.java │ │ │ │ │ ├── MapContextCellCounter.java │ │ │ │ │ └── compare/ │ │ │ │ │ ├── ComparisonAccumuloStatsReducer.java │ │ │ │ │ ├── ComparisonCellData.java │ │ │ │ │ ├── ComparisonCellDataReducer.java │ │ │ │ │ ├── ComparisonCellLevelPartitioner.java │ │ │ │ │ ├── ComparisonCellSummationReducer.java │ │ │ │ │ ├── ComparisonCombinedLevelPartitioner.java │ │ │ │ │ ├── ComparisonCombiningStatsMapper.java │ │ │ │ │ ├── ComparisonCombiningStatsReducer.java │ │ │ │ │ ├── ComparisonCommandLineOptions.java │ │ │ │ │ ├── ComparisonDoubleLevelPartitioner.java │ │ │ │ │ ├── ComparisonGaussianCellMapper.java │ │ │ │ │ ├── ComparisonIdentityMapper.java │ │ │ │ │ ├── ComparisonLevelPartitioner.java │ │ │ │ │ ├── ComparisonStatsJobRunner.java │ │ │ │ │ └── NegativeCellIdCounter.java │ │ │ │ ├── kmeans/ │ │ │ │ │ ├── KMeansDistortionMapReduce.java │ │ │ │ │ ├── KMeansMapReduce.java │ │ │ │ │ ├── KSamplerMapReduce.java │ │ │ │ │ ├── UpdateCentroidCostMapReduce.java │ │ │ │ │ └── runner/ │ │ │ │ │ ├── IterationCountCalculateRunner.java │ │ │ │ │ ├── KMeansDistortionJobRunner.java │ │ │ │ │ ├── KMeansIterationsJobRunner.java │ │ │ │ │ ├── KMeansJobRunner.java │ │ │ │ │ ├── KMeansJumpJobRunner.java │ │ │ │ │ ├── KMeansParallelJobRunner.java │ │ │ │ │ ├── KMeansSingleSampleJobRunner.java │ │ │ │ │ ├── KSamplerJobRunner.java │ │ │ │ │ ├── RankSamplerJobRunner.java │ │ │ │ │ ├── SampleMultipleSetsJobRunner.java │ │ │ │ │ ├── StripWeakCentroidsRunner.java │ │ │ │ │ └── UpdateCentroidCostJobRunner.java │ │ │ │ ├── nn/ │ │ │ │ │ ├── GeoWaveExtractNNJobRunner.java │ │ │ │ │ ├── NNData.java │ │ │ │ │ ├── NNJobRunner.java │ │ │ │ │ └── NNMapReduce.java │ │ │ │ └── operations/ │ │ │ │ ├── AnalyticOperationCLIProvider.java │ │ │ │ ├── AnalyticSection.java │ │ │ │ ├── DBScanCommand.java │ │ │ │ ├── KdeCommand.java │ │ │ │ ├── KmeansJumpCommand.java │ │ │ │ ├── KmeansParallelCommand.java │ │ │ │ ├── NearestNeighborCommand.java │ │ │ │ └── options/ │ │ │ │ ├── CommonOptions.java │ │ │ │ ├── DBScanOptions.java │ │ │ │ ├── KMeansCommonOptions.java │ │ │ │ ├── KMeansJumpOptions.java │ │ │ │ ├── KMeansParallelOptions.java │ │ │ │ ├── NearestNeighborOptions.java │ │ │ │ ├── PropertyManagementConverter.java │ │ │ │ └── QueryOptionsCommand.java │ │ │ └── resources/ │ │ │ └── META-INF/ │ │ │ └── services/ │ │ │ └── org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi │ │ └── test/ │ │ ├── java/ │ │ │ └── org/ │ │ │ └── locationtech/ │ │ │ └── geowave/ │ │ │ └── analytic/ │ │ │ └── mapreduce/ │ │ │ ├── TestMapReducePersistableRegistry.java │ │ │ ├── clustering/ │ │ │ │ └── runner/ │ │ │ │ ├── ConvexHullJobRunnerTest.java │ │ │ │ └── GroupAssigmentJobRunnerTest.java │ │ │ ├── dbscan/ │ │ │ │ └── DBScanMapReduceTest.java │ │ │ ├── kmeans/ │ │ │ │ ├── KMeansDistortionMapReduceTest.java │ │ │ │ ├── KSamplerMapReduceTest.java │ │ │ │ ├── SimpleFeatureImplSerialization.java │ │ │ │ ├── TestObject.java │ │ │ │ ├── TestObjectDataAdapter.java │ │ │ │ ├── TestObjectDimExtractor.java │ │ │ │ ├── TestObjectDistanceFn.java │ │ │ │ ├── TestObjectExtractor.java │ │ │ │ ├── TestObjectItemWrapperFactory.java │ │ │ │ ├── TestObjectSerialization.java │ │ │ │ ├── TestObjectWritable.java │ │ │ │ └── runner/ │ │ │ │ ├── KMeansIterationsJobRunnerTest.java │ │ │ │ └── StripWeakCentroidsRunnerTest.java │ │ │ ├── nn/ │ │ │ │ ├── NNJobRunnerTest.java │ │ │ │ └── NNMapReduceTest.java │ │ │ └── operations/ │ │ │ └── options/ │ │ │ └── PropertyManagementConverterTest.java │ │ └── resources/ │ │ ├── META-INF/ │ │ │ └── services/ │ │ │ └── org.locationtech.geowave.core.index.persist.PersistableRegistrySpi │ │ └── log4j.properties │ ├── pom.xml │ ├── pyspark/ │ │ ├── .gitignore │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ └── python/ │ │ ├── geowave_pyspark/ │ │ │ ├── __init__.py │ │ │ └── types.py │ │ └── setup.py │ └── spark/ │ ├── pom.xml │ └── src/ │ └── main/ │ ├── java/ │ │ └── org/ │ │ └── locationtech/ │ │ └── geowave/ │ │ └── analytic/ │ │ └── spark/ │ │ ├── AnalyticOperationCLIProvider.java │ │ ├── GeoWaveIndexedRDD.java │ │ ├── GeoWaveRDD.java │ │ ├── GeoWaveRDDLoader.java │ │ ├── GeoWaveRasterRDD.java │ │ ├── GeoWaveRegistrator.java │ │ ├── GeoWaveSparkConf.java │ │ ├── RDDOptions.java │ │ ├── RDDUtils.java │ │ ├── kde/ │ │ │ ├── KDERunner.java │ │ │ └── operations/ │ │ │ ├── KDESparkCommand.java │ │ │ └── KDESparkOptions.java │ │ ├── kmeans/ │ │ │ ├── KMeansHullGenerator.java │ │ │ ├── KMeansRunner.java │ │ │ ├── KMeansUtils.java │ │ │ └── operations/ │ │ │ ├── KMeansSparkOptions.java │ │ │ └── KmeansSparkCommand.java │ │ ├── resize/ │ │ │ ├── RasterTileResizeSparkRunner.java │ │ │ └── ResizeSparkCommand.java │ │ ├── sparksql/ │ │ │ ├── GeoWaveSpatialEncoders.java │ │ │ ├── SimpleFeatureDataFrame.java │ │ │ ├── SimpleFeatureDataType.java │ │ │ ├── SimpleFeatureMapper.java │ │ │ ├── SqlQueryRunner.java │ │ │ ├── SqlResultsWriter.java │ │ │ ├── operations/ │ │ │ │ ├── SparkSqlCommand.java │ │ │ │ └── SparkSqlOptions.java │ │ │ ├── udf/ │ │ │ │ ├── BufferOperation.java │ │ │ │ ├── GeomContains.java │ │ │ │ ├── GeomCovers.java │ │ │ │ ├── GeomCrosses.java │ │ │ │ ├── GeomDisjoint.java │ │ │ │ ├── GeomDistance.java │ │ │ │ ├── GeomEquals.java │ │ │ │ ├── GeomFromWKT.java │ │ │ │ ├── GeomFunction.java │ │ │ │ ├── GeomFunctionRegistry.java │ │ │ │ ├── GeomIntersects.java │ │ │ │ ├── GeomOverlaps.java │ │ │ │ ├── GeomTouches.java │ │ │ │ ├── GeomWithin.java │ │ │ │ ├── GeomWithinDistance.java │ │ │ │ └── UDFRegistrySPI.java │ │ │ ├── udt/ │ │ │ │ ├── AbstractGeometryUDT.java │ │ │ │ ├── GeometryUDT.java │ │ │ │ ├── LineStringUDT.java │ │ │ │ ├── MultiLineStringUDT.java │ │ │ │ ├── MultiPointUDT.java │ │ │ │ ├── MultiPolygonUDT.java │ │ │ │ ├── PointUDT.java │ │ │ │ └── PolygonUDT.java │ │ │ └── util/ │ │ │ ├── GeomReader.java │ │ │ ├── GeomWriter.java │ │ │ └── SchemaConverter.java │ │ └── spatial/ │ │ ├── JoinOptions.java │ │ ├── JoinStrategy.java │ │ ├── SpatialJoin.java │ │ ├── SpatialJoinRunner.java │ │ ├── TieredSpatialJoin.java │ │ └── operations/ │ │ ├── SpatialJoinCmdOptions.java │ │ └── SpatialJoinCommand.java │ └── resources/ │ └── META-INF/ │ └── services/ │ └── org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi ├── core/ │ ├── cli/ │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── locationtech/ │ │ │ │ └── geowave/ │ │ │ │ └── core/ │ │ │ │ └── cli/ │ │ │ │ ├── Constants.java │ │ │ │ ├── GeoWaveMain.java │ │ │ │ ├── VersionUtils.java │ │ │ │ ├── annotations/ │ │ │ │ │ ├── GeowaveOperation.java │ │ │ │ │ └── PrefixParameter.java │ │ │ │ ├── api/ │ │ │ │ │ ├── Command.java │ │ │ │ │ ├── DefaultOperation.java │ │ │ │ │ ├── DefaultPluginOptions.java │ │ │ │ │ ├── Operation.java │ │ │ │ │ ├── OperationParams.java │ │ │ │ │ ├── PluginOptions.java │ │ │ │ │ ├── ServiceEnabledCommand.java │ │ │ │ │ └── ServiceStatus.java │ │ │ │ ├── converters/ │ │ │ │ │ ├── GeoWaveBaseConverter.java │ │ │ │ │ ├── OptionalPasswordConverter.java │ │ │ │ │ ├── PasswordConverter.java │ │ │ │ │ └── RequiredFieldConverter.java │ │ │ │ ├── exceptions/ │ │ │ │ │ ├── DuplicateEntryException.java │ │ │ │ │ └── TargetNotFoundException.java │ │ │ │ ├── operations/ │ │ │ │ │ ├── ExplainCommand.java │ │ │ │ │ ├── GeoWaveTopLevelSection.java │ │ │ │ │ ├── HelpCommand.java │ │ │ │ │ ├── TopLevelOperationProvider.java │ │ │ │ │ ├── config/ │ │ │ │ │ │ ├── ConfigOperationProvider.java │ │ │ │ │ │ ├── ConfigSection.java │ │ │ │ │ │ ├── ListCommand.java │ │ │ │ │ │ ├── SetCommand.java │ │ │ │ │ │ ├── options/ │ │ │ │ │ │ │ └── ConfigOptions.java │ │ │ │ │ │ └── security/ │ │ │ │ │ │ ├── NewTokenCommand.java │ │ │ │ │ │ ├── crypto/ │ │ │ │ │ │ │ ├── BaseEncryption.java │ │ │ │ │ │ │ └── GeoWaveEncryption.java │ │ │ │ │ │ └── utils/ │ │ │ │ │ │ └── SecurityUtils.java │ │ │ │ │ └── util/ │ │ │ │ │ ├── UtilOperationProvider.java │ │ │ │ │ └── UtilSection.java │ │ │ │ ├── parser/ │ │ │ │ │ ├── CommandLineOperationParams.java │ │ │ │ │ ├── ManualOperationParams.java │ │ │ │ │ └── OperationParser.java │ │ │ │ ├── prefix/ │ │ │ │ │ ├── JCommanderPrefixTranslator.java │ │ │ │ │ ├── JCommanderPropertiesTransformer.java │ │ │ │ │ ├── JCommanderTranslationMap.java │ │ │ │ │ ├── JavassistUtils.java │ │ │ │ │ ├── PrefixedJCommander.java │ │ │ │ │ └── TranslationEntry.java │ │ │ │ ├── spi/ │ │ │ │ │ ├── CLIOperationProviderSpi.java │ │ │ │ │ ├── DefaultConfigProviderSpi.java │ │ │ │ │ ├── OperationEntry.java │ │ │ │ │ └── OperationRegistry.java │ │ │ │ └── utils/ │ │ │ │ ├── ConsoleTablePrinter.java │ │ │ │ ├── FileUtils.java │ │ │ │ ├── FirstElementListComparator.java │ │ │ │ ├── JCommanderParameterUtils.java │ │ │ │ ├── PropertiesUtils.java │ │ │ │ ├── URLUtils.java │ │ │ │ └── ValueConverter.java │ │ │ └── resources/ │ │ │ └── META-INF/ │ │ │ └── services/ │ │ │ └── org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi │ │ └── test/ │ │ └── java/ │ │ └── org/ │ │ └── locationtech/ │ │ └── geowave/ │ │ └── core/ │ │ └── cli/ │ │ ├── VersionUtilsTest.java │ │ ├── api/ │ │ │ └── ServiceEnableCommandTest.java │ │ ├── operations/ │ │ │ ├── ExplainCommandTest.java │ │ │ ├── HelpCommandTest.java │ │ │ └── config/ │ │ │ ├── SetCommandTest.java │ │ │ ├── options/ │ │ │ │ └── ConfigOptionsTest.java │ │ │ └── security/ │ │ │ └── SecurityUtilsTest.java │ │ ├── parser/ │ │ │ └── OperationParserTest.java │ │ ├── prefix/ │ │ │ ├── JCommanderPrefixTranslatorTest.java │ │ │ ├── JCommanderPropertiesTransformerTest.java │ │ │ ├── JCommanderTranslationMapTest.java │ │ │ ├── JavassistUtilsTest.java │ │ │ ├── PrefixedJCommanderTest.java │ │ │ └── TranslationEntryTest.java │ │ └── spi/ │ │ └── OperationRegistryTest.java │ ├── geotime/ │ │ ├── .gitignore │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── locationtech/ │ │ │ │ └── geowave/ │ │ │ │ └── core/ │ │ │ │ └── geotime/ │ │ │ │ ├── GeoTimePersistableRegistry.java │ │ │ │ ├── adapter/ │ │ │ │ │ ├── GeometryFieldMapper.java │ │ │ │ │ ├── GeotimeRegisteredIndexFieldMappers.java │ │ │ │ │ ├── LatLonFieldMapper.java │ │ │ │ │ ├── SpatialFieldDescriptor.java │ │ │ │ │ ├── SpatialFieldDescriptorBuilder.java │ │ │ │ │ ├── SpatialFieldMapper.java │ │ │ │ │ ├── TemporalFieldDescriptor.java │ │ │ │ │ ├── TemporalFieldDescriptorBuilder.java │ │ │ │ │ ├── TemporalIntervalFieldMapper.java │ │ │ │ │ ├── TemporalLongFieldMapper.java │ │ │ │ │ ├── TimeInstantFieldMapper.java │ │ │ │ │ ├── TimeRangeFieldMapper.java │ │ │ │ │ └── annotation/ │ │ │ │ │ ├── GeoWaveSpatialField.java │ │ │ │ │ ├── GeoWaveTemporalField.java │ │ │ │ │ ├── SpatialAnnotatedFieldDescriptorBuilder.java │ │ │ │ │ └── TemporalAnnotatedFieldDescriptorBuilder.java │ │ │ │ ├── binning/ │ │ │ │ │ ├── ComplexGeometryBinningOption.java │ │ │ │ │ ├── GeohashBinningHelper.java │ │ │ │ │ ├── H3BinningHelper.java │ │ │ │ │ ├── S2BinningHelper.java │ │ │ │ │ ├── SpatialBinningHelper.java │ │ │ │ │ └── SpatialBinningType.java │ │ │ │ ├── index/ │ │ │ │ │ ├── CommonSpatialOptions.java │ │ │ │ │ ├── SpatialAttributeIndexProvider.java │ │ │ │ │ ├── SpatialDimensionalityTypeProvider.java │ │ │ │ │ ├── SpatialIndexFilter.java │ │ │ │ │ ├── SpatialOptions.java │ │ │ │ │ ├── SpatialTemporalDimensionalityTypeProvider.java │ │ │ │ │ ├── SpatialTemporalOptions.java │ │ │ │ │ ├── TemporalAttributeIndexProvider.java │ │ │ │ │ ├── TemporalDimensionalityTypeProvider.java │ │ │ │ │ ├── TemporalOptions.java │ │ │ │ │ ├── api/ │ │ │ │ │ │ ├── SpatialIndexBuilder.java │ │ │ │ │ │ ├── SpatialTemporalIndexBuilder.java │ │ │ │ │ │ └── TemporalIndexBuilder.java │ │ │ │ │ └── dimension/ │ │ │ │ │ ├── LatitudeDefinition.java │ │ │ │ │ ├── LongitudeDefinition.java │ │ │ │ │ ├── SimpleTimeDefinition.java │ │ │ │ │ ├── SimpleTimeIndexStrategy.java │ │ │ │ │ ├── TemporalBinningStrategy.java │ │ │ │ │ └── TimeDefinition.java │ │ │ │ ├── store/ │ │ │ │ │ ├── GeotoolsFeatureDataAdapter.java │ │ │ │ │ ├── InternalGeotoolsDataAdapterWrapper.java │ │ │ │ │ ├── InternalGeotoolsFeatureDataAdapter.java │ │ │ │ │ ├── dimension/ │ │ │ │ │ │ ├── BaseCustomCRSSpatialDimension.java │ │ │ │ │ │ ├── CustomCRSBoundedSpatialDimension.java │ │ │ │ │ │ ├── CustomCRSBoundedSpatialDimensionX.java │ │ │ │ │ │ ├── CustomCRSBoundedSpatialDimensionY.java │ │ │ │ │ │ ├── CustomCRSSpatialDimension.java │ │ │ │ │ │ ├── CustomCRSSpatialField.java │ │ │ │ │ │ ├── CustomCRSUnboundedSpatialDimension.java │ │ │ │ │ │ ├── CustomCRSUnboundedSpatialDimensionX.java │ │ │ │ │ │ ├── CustomCRSUnboundedSpatialDimensionY.java │ │ │ │ │ │ ├── CustomCrsIndexModel.java │ │ │ │ │ │ ├── LatitudeField.java │ │ │ │ │ │ ├── LongitudeField.java │ │ │ │ │ │ ├── SpatialField.java │ │ │ │ │ │ └── TimeField.java │ │ │ │ │ ├── field/ │ │ │ │ │ │ ├── CalendarArraySerializationProvider.java │ │ │ │ │ │ ├── CalendarSerializationProvider.java │ │ │ │ │ │ ├── DateArraySerializationProvider.java │ │ │ │ │ │ ├── DateSerializationProvider.java │ │ │ │ │ │ ├── GeometryArraySerializationProvider.java │ │ │ │ │ │ ├── GeometrySerializationProvider.java │ │ │ │ │ │ ├── IntervalArraySerializationProvider.java │ │ │ │ │ │ └── IntervalSerializationProvider.java │ │ │ │ │ ├── query/ │ │ │ │ │ │ ├── AbstractVectorConstraints.java │ │ │ │ │ │ ├── BaseVectorQueryBuilder.java │ │ │ │ │ │ ├── ExplicitCQLQuery.java │ │ │ │ │ │ ├── ExplicitSpatialQuery.java │ │ │ │ │ │ ├── ExplicitSpatialTemporalQuery.java │ │ │ │ │ │ ├── ExplicitTemporalQuery.java │ │ │ │ │ │ ├── IndexOnlySpatialQuery.java │ │ │ │ │ │ ├── OptimalCQLQuery.java │ │ │ │ │ │ ├── ScaledTemporalRange.java │ │ │ │ │ │ ├── SpatialQuery.java │ │ │ │ │ │ ├── SpatialTemporalConstraintsBuilderImpl.java │ │ │ │ │ │ ├── SpatialTemporalQuery.java │ │ │ │ │ │ ├── TemporalConstraints.java │ │ │ │ │ │ ├── TemporalConstraintsSet.java │ │ │ │ │ │ ├── TemporalQuery.java │ │ │ │ │ │ ├── TemporalRange.java │ │ │ │ │ │ ├── VectorQueryBuilderImpl.java │ │ │ │ │ │ ├── VectorQueryConstraintsFactoryImpl.java │ │ │ │ │ │ ├── aggregate/ │ │ │ │ │ │ │ ├── BaseOptimalVectorAggregation.java │ │ │ │ │ │ │ ├── BoundingBoxAggregation.java │ │ │ │ │ │ │ ├── CommonIndexBoundingBoxAggregation.java │ │ │ │ │ │ │ ├── CommonIndexTimeRangeAggregation.java │ │ │ │ │ │ │ ├── OptimalVectorBoundingBoxAggregation.java │ │ │ │ │ │ │ ├── OptimalVectorTimeRangeAggregation.java │ │ │ │ │ │ │ ├── SpatialBinningStrategy.java │ │ │ │ │ │ │ ├── SpatialCommonIndexedBinningStrategy.java │ │ │ │ │ │ │ ├── SpatialFieldBinningStrategy.java │ │ │ │ │ │ │ ├── SpatialSimpleFeatureBinningStrategy.java │ │ │ │ │ │ │ ├── TimeRangeAggregation.java │ │ │ │ │ │ │ ├── VectorAggregationQueryBuilderImpl.java │ │ │ │ │ │ │ ├── VectorBoundingBoxAggregation.java │ │ │ │ │ │ │ └── VectorTimeRangeAggregation.java │ │ │ │ │ │ ├── api/ │ │ │ │ │ │ │ ├── SpatialTemporalConstraintsBuilder.java │ │ │ │ │ │ │ ├── VectorAggregationQueryBuilder.java │ │ │ │ │ │ │ ├── VectorQueryBuilder.java │ │ │ │ │ │ │ └── VectorQueryConstraintsFactory.java │ │ │ │ │ │ ├── filter/ │ │ │ │ │ │ │ ├── CQLQueryFilter.java │ │ │ │ │ │ │ ├── SpatialQueryFilter.java │ │ │ │ │ │ │ └── expression/ │ │ │ │ │ │ │ ├── CQLToGeoWaveConversionException.java │ │ │ │ │ │ │ ├── CQLToGeoWaveFilterVisitor.java │ │ │ │ │ │ │ ├── spatial/ │ │ │ │ │ │ │ │ ├── BBox.java │ │ │ │ │ │ │ │ ├── BinarySpatialPredicate.java │ │ │ │ │ │ │ │ ├── Crosses.java │ │ │ │ │ │ │ │ ├── Disjoint.java │ │ │ │ │ │ │ │ ├── FilterGeometry.java │ │ │ │ │ │ │ │ ├── Intersects.java │ │ │ │ │ │ │ │ ├── Overlaps.java │ │ │ │ │ │ │ │ ├── PreparedFilterGeometry.java │ │ │ │ │ │ │ │ ├── SpatialContains.java │ │ │ │ │ │ │ │ ├── SpatialEqualTo.java │ │ │ │ │ │ │ │ ├── SpatialExpression.java │ │ │ │ │ │ │ │ ├── SpatialFieldValue.java │ │ │ │ │ │ │ │ ├── SpatialLiteral.java │ │ │ │ │ │ │ │ ├── SpatialNotEqualTo.java │ │ │ │ │ │ │ │ ├── TextToSpatialExpression.java │ │ │ │ │ │ │ │ ├── Touches.java │ │ │ │ │ │ │ │ ├── UnpreparedFilterGeometry.java │ │ │ │ │ │ │ │ └── Within.java │ │ │ │ │ │ │ └── temporal/ │ │ │ │ │ │ │ ├── After.java │ │ │ │ │ │ │ ├── Before.java │ │ │ │ │ │ │ ├── BeforeOrDuring.java │ │ │ │ │ │ │ ├── BinaryTemporalPredicate.java │ │ │ │ │ │ │ ├── During.java │ │ │ │ │ │ │ ├── DuringOrAfter.java │ │ │ │ │ │ │ ├── TemporalBetween.java │ │ │ │ │ │ │ ├── TemporalEqualTo.java │ │ │ │ │ │ │ ├── TemporalExpression.java │ │ │ │ │ │ │ ├── TemporalFieldValue.java │ │ │ │ │ │ │ ├── TemporalLiteral.java │ │ │ │ │ │ │ ├── TemporalNotEqualTo.java │ │ │ │ │ │ │ └── TimeOverlaps.java │ │ │ │ │ │ └── gwql/ │ │ │ │ │ │ ├── BboxFunction.java │ │ │ │ │ │ ├── DateCastableType.java │ │ │ │ │ │ ├── GWQLSpatialTemporalExtensions.java │ │ │ │ │ │ ├── GeometryCastableType.java │ │ │ │ │ │ ├── SpatialPredicates.java │ │ │ │ │ │ ├── TemporalOperators.java │ │ │ │ │ │ └── TemporalPredicates.java │ │ │ │ │ └── statistics/ │ │ │ │ │ ├── AbstractBoundingBoxValue.java │ │ │ │ │ ├── AbstractTimeRangeValue.java │ │ │ │ │ ├── BoundingBoxStatistic.java │ │ │ │ │ ├── GeotimeRegisteredStatistics.java │ │ │ │ │ ├── SpatialTemporalStatisticQueryBuilder.java │ │ │ │ │ ├── TimeRangeStatistic.java │ │ │ │ │ └── binning/ │ │ │ │ │ ├── SpatialFieldValueBinningStrategy.java │ │ │ │ │ └── TimeRangeFieldValueBinningStrategy.java │ │ │ │ └── util/ │ │ │ │ ├── DWithinFilterVisitor.java │ │ │ │ ├── ExtractAttributesFilter.java │ │ │ │ ├── ExtractGeometryFilterVisitor.java │ │ │ │ ├── ExtractGeometryFilterVisitorResult.java │ │ │ │ ├── ExtractTimeFilterVisitor.java │ │ │ │ ├── FilterToCQLTool.java │ │ │ │ ├── GeometryUtils.java │ │ │ │ ├── HasDWithinFilterVisitor.java │ │ │ │ ├── IndexOptimizationUtils.java │ │ │ │ ├── SimpleFeatureUserDataConfiguration.java │ │ │ │ ├── SpatialIndexUtils.java │ │ │ │ ├── TWKBReader.java │ │ │ │ ├── TWKBUtils.java │ │ │ │ ├── TWKBWriter.java │ │ │ │ ├── TimeDescriptors.java │ │ │ │ └── TimeUtils.java │ │ │ └── resources/ │ │ │ └── META-INF/ │ │ │ └── services/ │ │ │ ├── org.locationtech.geowave.core.index.persist.PersistableRegistrySpi │ │ │ ├── org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi │ │ │ ├── org.locationtech.geowave.core.store.index.AttributeIndexProviderSpi │ │ │ ├── org.locationtech.geowave.core.store.index.IndexFieldMapperRegistrySPI │ │ │ ├── org.locationtech.geowave.core.store.query.gwql.GWQLExtensionRegistrySpi │ │ │ ├── org.locationtech.geowave.core.store.spi.DimensionalityTypeProviderSpi │ │ │ └── org.locationtech.geowave.core.store.statistics.StatisticsRegistrySPI │ │ └── test/ │ │ ├── java/ │ │ │ └── org/ │ │ │ └── locationtech/ │ │ │ └── geowave/ │ │ │ └── core/ │ │ │ └── geotime/ │ │ │ ├── TestGeoTimePersistableRegistry.java │ │ │ ├── adapter/ │ │ │ │ ├── SpatialFieldDescriptorTest.java │ │ │ │ ├── SpatialFieldMapperTest.java │ │ │ │ └── annotation/ │ │ │ │ └── SpatialTemporalAnnotationsTest.java │ │ │ ├── binning/ │ │ │ │ └── SpatialBinningTypeTest.java │ │ │ ├── index/ │ │ │ │ ├── dimension/ │ │ │ │ │ ├── LongitudeDefinitionTest.java │ │ │ │ │ ├── TemporalBinningStrategyTest.java │ │ │ │ │ └── TimeDefinitionTest.java │ │ │ │ └── sfc/ │ │ │ │ └── hilbert/ │ │ │ │ ├── HilbertSFCTest.java │ │ │ │ ├── PrimitiveHilbertSFCTest.java │ │ │ │ └── tiered/ │ │ │ │ └── TieredSFCIndexStrategyTest.java │ │ │ ├── store/ │ │ │ │ ├── data/ │ │ │ │ │ └── PersistenceEncodingTest.java │ │ │ │ ├── field/ │ │ │ │ │ └── GeoTimeReaderWriterTest.java │ │ │ │ ├── query/ │ │ │ │ │ ├── BasicQueryTest.java │ │ │ │ │ ├── SpatialQueryTest.java │ │ │ │ │ ├── SpatialTemporalQueryTest.java │ │ │ │ │ ├── TemporalConstraintsTest.java │ │ │ │ │ ├── TemporalRangeTest.java │ │ │ │ │ ├── aggregate/ │ │ │ │ │ │ ├── AbstractVectorAggregationTest.java │ │ │ │ │ │ ├── CompositeAggregationTest.java │ │ │ │ │ │ ├── GeohashBinningStrategyTest.java │ │ │ │ │ │ ├── VectorBoundingBoxAggregationTest.java │ │ │ │ │ │ ├── VectorCountAggregationTest.java │ │ │ │ │ │ └── VectorMathAggregationTest.java │ │ │ │ │ ├── filter/ │ │ │ │ │ │ └── expression/ │ │ │ │ │ │ ├── CQLToGeoWaveFilterTest.java │ │ │ │ │ │ └── SpatialTemporalFilterExpressionTest.java │ │ │ │ │ └── gwql/ │ │ │ │ │ └── GWQLParserTest.java │ │ │ │ └── statistics/ │ │ │ │ └── BoundingBoxStatisticTest.java │ │ │ └── util/ │ │ │ ├── GeometryUtilsTest.java │ │ │ └── TWKBTest.java │ │ └── resources/ │ │ └── META-INF/ │ │ └── services/ │ │ └── org.locationtech.geowave.core.index.persist.PersistableRegistrySpi │ ├── index/ │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── locationtech/ │ │ │ │ └── geowave/ │ │ │ │ └── core/ │ │ │ │ └── index/ │ │ │ │ ├── ByteArray.java │ │ │ │ ├── ByteArrayRange.java │ │ │ │ ├── ByteArrayUtils.java │ │ │ │ ├── CompoundIndexStrategy.java │ │ │ │ ├── Coordinate.java │ │ │ │ ├── CoordinateRange.java │ │ │ │ ├── CustomIndexStrategy.java │ │ │ │ ├── FloatCompareUtils.java │ │ │ │ ├── GeoWaveSerializationException.java │ │ │ │ ├── HierarchicalNumericIndexStrategy.java │ │ │ │ ├── IndexConstraints.java │ │ │ │ ├── IndexData.java │ │ │ │ ├── IndexDimensionHint.java │ │ │ │ ├── IndexMetaData.java │ │ │ │ ├── IndexPersistableRegistry.java │ │ │ │ ├── IndexStrategy.java │ │ │ │ ├── IndexUtils.java │ │ │ │ ├── InsertionIds.java │ │ │ │ ├── Mergeable.java │ │ │ │ ├── MultiDimensionalCoordinateRanges.java │ │ │ │ ├── MultiDimensionalCoordinateRangesArray.java │ │ │ │ ├── MultiDimensionalCoordinates.java │ │ │ │ ├── MultiDimensionalIndexData.java │ │ │ │ ├── NullNumericIndexStrategy.java │ │ │ │ ├── NumericIndexStrategy.java │ │ │ │ ├── NumericIndexStrategyWrapper.java │ │ │ │ ├── PartitionIndexStrategy.java │ │ │ │ ├── PartitionIndexStrategyWrapper.java │ │ │ │ ├── QueryRanges.java │ │ │ │ ├── SPIServiceRegistry.java │ │ │ │ ├── SinglePartitionInsertionIds.java │ │ │ │ ├── SinglePartitionQueryRanges.java │ │ │ │ ├── SortedIndexStrategy.java │ │ │ │ ├── StringUtils.java │ │ │ │ ├── VarintUtils.java │ │ │ │ ├── dimension/ │ │ │ │ │ ├── BasicDimensionDefinition.java │ │ │ │ │ ├── NumericDimensionDefinition.java │ │ │ │ │ └── UnboundedDimensionDefinition.java │ │ │ │ ├── lexicoder/ │ │ │ │ │ ├── ByteLexicoder.java │ │ │ │ │ ├── DoubleLexicoder.java │ │ │ │ │ ├── FloatLexicoder.java │ │ │ │ │ ├── IntegerLexicoder.java │ │ │ │ │ ├── Lexicoders.java │ │ │ │ │ ├── LongLexicoder.java │ │ │ │ │ ├── NumberLexicoder.java │ │ │ │ │ └── ShortLexicoder.java │ │ │ │ ├── numeric/ │ │ │ │ │ ├── BasicNumericDataset.java │ │ │ │ │ ├── BinnedNumericDataset.java │ │ │ │ │ ├── MultiDimensionalNumericData.java │ │ │ │ │ ├── NumericData.java │ │ │ │ │ ├── NumericRange.java │ │ │ │ │ └── NumericValue.java │ │ │ │ ├── persist/ │ │ │ │ │ ├── InternalPersistableRegistry.java │ │ │ │ │ ├── Persistable.java │ │ │ │ │ ├── PersistableFactory.java │ │ │ │ │ ├── PersistableList.java │ │ │ │ │ ├── PersistableRegistrySpi.java │ │ │ │ │ └── PersistenceUtils.java │ │ │ │ ├── sfc/ │ │ │ │ │ ├── BasicSFCIndexStrategy.java │ │ │ │ │ ├── RangeDecomposition.java │ │ │ │ │ ├── SFCDimensionDefinition.java │ │ │ │ │ ├── SFCFactory.java │ │ │ │ │ ├── SpaceFillingCurve.java │ │ │ │ │ ├── binned/ │ │ │ │ │ │ └── BinnedSFCUtils.java │ │ │ │ │ ├── hilbert/ │ │ │ │ │ │ ├── HilbertSFC.java │ │ │ │ │ │ ├── HilbertSFCOperations.java │ │ │ │ │ │ ├── PrimitiveHilbertSFCOperations.java │ │ │ │ │ │ └── UnboundedHilbertSFCOperations.java │ │ │ │ │ ├── tiered/ │ │ │ │ │ │ ├── SingleTierSubStrategy.java │ │ │ │ │ │ ├── TieredSFCIndexFactory.java │ │ │ │ │ │ └── TieredSFCIndexStrategy.java │ │ │ │ │ ├── xz/ │ │ │ │ │ │ ├── XZHierarchicalIndexFactory.java │ │ │ │ │ │ ├── XZHierarchicalIndexStrategy.java │ │ │ │ │ │ └── XZOrderSFC.java │ │ │ │ │ └── zorder/ │ │ │ │ │ ├── ZOrderSFC.java │ │ │ │ │ └── ZOrderUtils.java │ │ │ │ ├── simple/ │ │ │ │ │ ├── HashKeyIndexStrategy.java │ │ │ │ │ ├── RoundRobinKeyIndexStrategy.java │ │ │ │ │ ├── SimpleByteIndexStrategy.java │ │ │ │ │ ├── SimpleDoubleIndexStrategy.java │ │ │ │ │ ├── SimpleFloatIndexStrategy.java │ │ │ │ │ ├── SimpleIntegerIndexStrategy.java │ │ │ │ │ ├── SimpleLongIndexStrategy.java │ │ │ │ │ ├── SimpleNumericIndexStrategy.java │ │ │ │ │ └── SimpleShortIndexStrategy.java │ │ │ │ └── text/ │ │ │ │ ├── BasicTextDataset.java │ │ │ │ ├── CaseSensitivity.java │ │ │ │ ├── EnumIndexStrategy.java │ │ │ │ ├── EnumSearch.java │ │ │ │ ├── ExplicitTextSearch.java │ │ │ │ ├── MultiDimensionalTextData.java │ │ │ │ ├── TextConstraints.java │ │ │ │ ├── TextData.java │ │ │ │ ├── TextIndexEntryConverter.java │ │ │ │ ├── TextIndexStrategy.java │ │ │ │ ├── TextIndexType.java │ │ │ │ ├── TextIndexUtils.java │ │ │ │ ├── TextRange.java │ │ │ │ ├── TextSearch.java │ │ │ │ ├── TextSearchPredicate.java │ │ │ │ ├── TextSearchType.java │ │ │ │ └── TextValue.java │ │ │ └── resources/ │ │ │ └── META-INF/ │ │ │ └── services/ │ │ │ └── org.locationtech.geowave.core.index.persist.PersistableRegistrySpi │ │ └── test/ │ │ ├── java/ │ │ │ └── org/ │ │ │ └── locationtech/ │ │ │ └── geowave/ │ │ │ └── core/ │ │ │ └── index/ │ │ │ ├── ByteArrayRangeTest.java │ │ │ ├── ByteArrayUtilsTest.java │ │ │ ├── CompoundIndexStrategyTest.java │ │ │ ├── PersistenceUtilsTest.java │ │ │ ├── StringUtilsTest.java │ │ │ ├── TestIndexPersistableRegistry.java │ │ │ ├── VarintUtilsTest.java │ │ │ ├── dimension/ │ │ │ │ └── BasicDimensionDefinitionTest.java │ │ │ ├── lexicoder/ │ │ │ │ ├── AbstractLexicoderTest.java │ │ │ │ ├── ByteLexicoderTest.java │ │ │ │ ├── DoubleLexicoderTest.java │ │ │ │ ├── FloatLexicoderTest.java │ │ │ │ ├── IntegerLexicoderTest.java │ │ │ │ ├── LongLexicoderTest.java │ │ │ │ └── ShortLexicoderTest.java │ │ │ ├── sfc/ │ │ │ │ ├── data/ │ │ │ │ │ ├── BasicNumericDatasetTest.java │ │ │ │ │ ├── NumericRangeTest.java │ │ │ │ │ └── NumericValueTest.java │ │ │ │ ├── xz/ │ │ │ │ │ └── XZOrderSFCTest.java │ │ │ │ └── zorder/ │ │ │ │ └── ZOrderSFCTest.java │ │ │ └── simple/ │ │ │ ├── HashKeyIndexStrategyTest.java │ │ │ ├── RoundRobinKeyIndexStrategyTest.java │ │ │ └── SimpleNumericIndexStrategyTest.java │ │ └── resources/ │ │ └── META-INF/ │ │ └── services/ │ │ └── org.locationtech.geowave.core.index.persist.PersistableRegistrySpi │ ├── ingest/ │ │ ├── .gitignore │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ ├── avro/ │ │ │ │ └── wholefile.avsc │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── locationtech/ │ │ │ │ └── geowave/ │ │ │ │ └── core/ │ │ │ │ └── ingest/ │ │ │ │ ├── HdfsIngestHandler.java │ │ │ │ ├── S3IngestHandler.java │ │ │ │ ├── URLIngestUtils.java │ │ │ │ ├── avro/ │ │ │ │ │ ├── AbstractStageWholeFileToAvro.java │ │ │ │ │ ├── GenericAvroSerializer.java │ │ │ │ │ ├── GeoWaveAvroFormatPlugin.java │ │ │ │ │ ├── GeoWaveAvroPluginBase.java │ │ │ │ │ └── GeoWaveAvroSchemaProvider.java │ │ │ │ ├── hdfs/ │ │ │ │ │ ├── HdfsFile.java │ │ │ │ │ ├── StageRunData.java │ │ │ │ │ ├── StageToHdfsDriver.java │ │ │ │ │ └── mapreduce/ │ │ │ │ │ ├── AbstractLocalIngestWithMapper.java │ │ │ │ │ ├── AbstractMapReduceIngest.java │ │ │ │ │ ├── ByteBufferBackedInputStream.java │ │ │ │ │ ├── IngestFromHdfsDriver.java │ │ │ │ │ ├── IngestFromHdfsPlugin.java │ │ │ │ │ ├── IngestMapper.java │ │ │ │ │ ├── IngestPersistableRegistry.java │ │ │ │ │ ├── IngestReducer.java │ │ │ │ │ ├── IngestWithMapper.java │ │ │ │ │ ├── IngestWithMapperJobRunner.java │ │ │ │ │ ├── IngestWithReducer.java │ │ │ │ │ ├── IngestWithReducerJobRunner.java │ │ │ │ │ ├── IntermediateKeyValueMapper.java │ │ │ │ │ ├── KeyValueData.java │ │ │ │ │ └── MapReduceCommandLineOptions.java │ │ │ │ ├── kafka/ │ │ │ │ │ ├── IngestFromKafkaDriver.java │ │ │ │ │ ├── KafkaCommandLineArgument.java │ │ │ │ │ ├── KafkaCommandLineOptions.java │ │ │ │ │ ├── KafkaConsumerCommandLineOptions.java │ │ │ │ │ ├── KafkaIngestRunData.java │ │ │ │ │ ├── KafkaProducerCommandLineOptions.java │ │ │ │ │ ├── PropertyReference.java │ │ │ │ │ ├── StageKafkaData.java │ │ │ │ │ └── StageToKafkaDriver.java │ │ │ │ ├── local/ │ │ │ │ │ └── LocalFileIngestCLIDriver.java │ │ │ │ ├── operations/ │ │ │ │ │ ├── AddTypeCommand.java │ │ │ │ │ ├── ConfigAWSCommand.java │ │ │ │ │ ├── IngestOperationProvider.java │ │ │ │ │ ├── IngestSection.java │ │ │ │ │ ├── KafkaToGeoWaveCommand.java │ │ │ │ │ ├── ListIngestPluginsCommand.java │ │ │ │ │ ├── LocalToGeoWaveCommand.java │ │ │ │ │ ├── LocalToHdfsCommand.java │ │ │ │ │ ├── LocalToKafkaCommand.java │ │ │ │ │ ├── LocalToMapReduceToGeoWaveCommand.java │ │ │ │ │ ├── MapReduceToGeoWaveCommand.java │ │ │ │ │ ├── SparkToGeoWaveCommand.java │ │ │ │ │ └── options/ │ │ │ │ │ └── IngestFormatPluginOptions.java │ │ │ │ ├── spark/ │ │ │ │ │ ├── SparkCommandLineOptions.java │ │ │ │ │ └── SparkIngestDriver.java │ │ │ │ └── spi/ │ │ │ │ ├── IngestFormatPluginProviderSpi.java │ │ │ │ └── IngestFormatPluginRegistry.java │ │ │ └── resources/ │ │ │ └── META-INF/ │ │ │ └── services/ │ │ │ ├── java.nio.file.spi.FileSystemProvider │ │ │ ├── org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi │ │ │ ├── org.locationtech.geowave.core.index.persist.PersistableRegistrySpi │ │ │ ├── org.locationtech.geowave.core.store.ingest.IngestUrlHandlerSpi │ │ │ └── org.locationtech.geowave.core.store.ingest.LocalFileIngestPluginRegistrySpi │ │ └── test/ │ │ └── java/ │ │ └── org/ │ │ └── locationtech/ │ │ └── geowave/ │ │ └── ingest/ │ │ └── s3/ │ │ └── DefaultGeoWaveAWSCredentialsProviderTest.java │ ├── mapreduce/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ ├── java/ │ │ │ └── org/ │ │ │ └── locationtech/ │ │ │ └── geowave/ │ │ │ └── mapreduce/ │ │ │ ├── AbstractGeoWaveJobRunner.java │ │ │ ├── BaseMapReduceDataStore.java │ │ │ ├── GeoWaveConfiguratorBase.java │ │ │ ├── GeoWaveKey.java │ │ │ ├── GeoWaveMapper.java │ │ │ ├── GeoWaveReducer.java │ │ │ ├── GeoWaveWritableInputMapper.java │ │ │ ├── GeoWaveWritableInputReducer.java │ │ │ ├── GeoWaveWritableOutputMapper.java │ │ │ ├── GeoWaveWritableOutputReducer.java │ │ │ ├── HadoopDataAdapter.java │ │ │ ├── HadoopWritableSerializationTool.java │ │ │ ├── HadoopWritableSerializer.java │ │ │ ├── JobContextAdapterIndexMappingStore.java │ │ │ ├── JobContextAdapterStore.java │ │ │ ├── JobContextIndexStore.java │ │ │ ├── JobContextInternalAdapterStore.java │ │ │ ├── MapReduceDataStore.java │ │ │ ├── MapReduceDataStoreOperations.java │ │ │ ├── MapReduceUtils.java │ │ │ ├── NativeMapContext.java │ │ │ ├── NativeReduceContext.java │ │ │ ├── URLClassloaderUtils.java │ │ │ ├── VFSClassLoaderTransformer.java │ │ │ ├── copy/ │ │ │ │ ├── StoreCopyJobRunner.java │ │ │ │ ├── StoreCopyMapper.java │ │ │ │ └── StoreCopyReducer.java │ │ │ ├── dedupe/ │ │ │ │ ├── GeoWaveDedupeCombiner.java │ │ │ │ ├── GeoWaveDedupeJobRunner.java │ │ │ │ ├── GeoWaveDedupeMapper.java │ │ │ │ └── GeoWaveDedupeReducer.java │ │ │ ├── hdfs/ │ │ │ │ └── HdfsUrlStreamHandlerFactory.java │ │ │ ├── input/ │ │ │ │ ├── AsyncInputFormatIteratorWrapper.java │ │ │ │ ├── GeoWaveInputConfigurator.java │ │ │ │ ├── GeoWaveInputFormat.java │ │ │ │ ├── GeoWaveInputKey.java │ │ │ │ └── InputFormatIteratorWrapper.java │ │ │ ├── operations/ │ │ │ │ ├── ConfigHDFSCommand.java │ │ │ │ ├── CopyCommand.java │ │ │ │ ├── CopyCommandOptions.java │ │ │ │ ├── HdfsHostPortConverter.java │ │ │ │ └── MapReduceOperationProvider.java │ │ │ ├── output/ │ │ │ │ ├── GeoWaveOutputFormat.java │ │ │ │ └── GeoWaveOutputKey.java │ │ │ ├── s3/ │ │ │ │ ├── DefaultGeoWaveAWSCredentialsProvider.java │ │ │ │ ├── GeoWaveAmazonS3Factory.java │ │ │ │ ├── S3Params.java │ │ │ │ ├── S3ParamsExtractor.java │ │ │ │ ├── S3URLConnection.java │ │ │ │ ├── S3URLStreamHandler.java │ │ │ │ └── S3URLStreamHandlerFactory.java │ │ │ └── splits/ │ │ │ ├── GeoWaveInputSplit.java │ │ │ ├── GeoWaveRecordReader.java │ │ │ ├── GeoWaveRowRange.java │ │ │ ├── IntermediateSplitInfo.java │ │ │ ├── RangeLocationPair.java │ │ │ ├── RecordReaderParams.java │ │ │ ├── SplitInfo.java │ │ │ └── SplitsProvider.java │ │ └── resources/ │ │ └── META-INF/ │ │ └── services/ │ │ ├── org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi │ │ └── org.locationtech.geowave.core.store.spi.ClassLoaderTransformerSpi │ ├── pom.xml │ └── store/ │ ├── .gitignore │ ├── pom.xml │ └── src/ │ ├── main/ │ │ ├── antlr4/ │ │ │ └── org/ │ │ │ └── locationtech/ │ │ │ └── geowave/ │ │ │ └── core/ │ │ │ └── store/ │ │ │ └── query/ │ │ │ └── gwql/ │ │ │ └── parse/ │ │ │ └── GWQL.g4 │ │ ├── java/ │ │ │ └── org/ │ │ │ └── locationtech/ │ │ │ └── geowave/ │ │ │ └── core/ │ │ │ └── store/ │ │ │ ├── AdapterMapping.java │ │ │ ├── AdapterToIndexMapping.java │ │ │ ├── BaseDataStoreFactory.java │ │ │ ├── BaseDataStoreFamily.java │ │ │ ├── BaseDataStoreOptions.java │ │ │ ├── BaseStoreFactory.java │ │ │ ├── CloseableIterator.java │ │ │ ├── CloseableIteratorWrapper.java │ │ │ ├── DataStoreOptions.java │ │ │ ├── DataStoreProperty.java │ │ │ ├── EntryVisibilityHandler.java │ │ │ ├── GenericFactory.java │ │ │ ├── GenericStoreFactory.java │ │ │ ├── GeoWaveStoreFinder.java │ │ │ ├── PropertyStore.java │ │ │ ├── StoreFactoryFamilySpi.java │ │ │ ├── StoreFactoryHelper.java │ │ │ ├── StoreFactoryOptions.java │ │ │ ├── StorePersistableRegistry.java │ │ │ ├── adapter/ │ │ │ │ ├── AbstractAdapterPersistenceEncoding.java │ │ │ │ ├── AbstractDataTypeAdapter.java │ │ │ │ ├── AdapterIndexMappingStore.java │ │ │ │ ├── AdapterPersistenceEncoding.java │ │ │ │ ├── AdapterStore.java │ │ │ │ ├── AdapterStoreWrapper.java │ │ │ │ ├── AsyncPersistenceEncoding.java │ │ │ │ ├── BaseFieldDescriptor.java │ │ │ │ ├── BasicDataTypeAdapter.java │ │ │ │ ├── BinaryDataAdapter.java │ │ │ │ ├── FieldDescriptor.java │ │ │ │ ├── FieldDescriptorBuilder.java │ │ │ │ ├── FitToIndexPersistenceEncoding.java │ │ │ │ ├── FullAsyncPersistenceEncoding.java │ │ │ │ ├── IndexDependentDataAdapter.java │ │ │ │ ├── IndexedAdapterPersistenceEncoding.java │ │ │ │ ├── InternalAdapterStore.java │ │ │ │ ├── InternalAdapterUtils.java │ │ │ │ ├── InternalDataAdapter.java │ │ │ │ ├── InternalDataAdapterImpl.java │ │ │ │ ├── LazyReadPersistenceEncoding.java │ │ │ │ ├── MapRowBuilder.java │ │ │ │ ├── PartialAsyncPersistenceEncoding.java │ │ │ │ ├── PersistentAdapterStore.java │ │ │ │ ├── RowMergingDataAdapter.java │ │ │ │ ├── SimpleAbstractDataAdapter.java │ │ │ │ ├── SimpleRowTransform.java │ │ │ │ ├── SingletonFieldRowBuilder.java │ │ │ │ ├── TransientAdapterStore.java │ │ │ │ ├── annotation/ │ │ │ │ │ ├── AnnotatedFieldDescriptorBuilder.java │ │ │ │ │ ├── BaseAnnotatedFieldDescriptorBuilder.java │ │ │ │ │ ├── GeoWaveDataType.java │ │ │ │ │ ├── GeoWaveField.java │ │ │ │ │ └── GeoWaveFieldAnnotation.java │ │ │ │ ├── exceptions/ │ │ │ │ │ └── AdapterException.java │ │ │ │ └── statistics/ │ │ │ │ └── histogram/ │ │ │ │ ├── ByteUtils.java │ │ │ │ ├── FixedBinNumericHistogram.java │ │ │ │ ├── MinimalBinDistanceHistogram.java │ │ │ │ ├── NumericHistogram.java │ │ │ │ ├── NumericHistogramFactory.java │ │ │ │ └── TDigestNumericHistogram.java │ │ │ ├── api/ │ │ │ │ ├── Aggregation.java │ │ │ │ ├── AggregationQuery.java │ │ │ │ ├── AggregationQueryBuilder.java │ │ │ │ ├── AttributeIndex.java │ │ │ │ ├── BinConstraints.java │ │ │ │ ├── BinningStrategy.java │ │ │ │ ├── DataStore.java │ │ │ │ ├── DataStoreFactory.java │ │ │ │ ├── DataTypeAdapter.java │ │ │ │ ├── DataTypeStatistic.java │ │ │ │ ├── FieldStatistic.java │ │ │ │ ├── Index.java │ │ │ │ ├── IndexFieldMapper.java │ │ │ │ ├── IndexStatistic.java │ │ │ │ ├── IngestOptions.java │ │ │ │ ├── Query.java │ │ │ │ ├── QueryBuilder.java │ │ │ │ ├── QueryConstraintsFactory.java │ │ │ │ ├── RowBuilder.java │ │ │ │ ├── Statistic.java │ │ │ │ ├── StatisticBinningStrategy.java │ │ │ │ ├── StatisticQuery.java │ │ │ │ ├── StatisticQueryBuilder.java │ │ │ │ ├── StatisticValue.java │ │ │ │ ├── VisibilityHandler.java │ │ │ │ ├── WriteResults.java │ │ │ │ └── Writer.java │ │ │ ├── base/ │ │ │ │ ├── AbstractBaseRowQuery.java │ │ │ │ ├── BaseConstraintsQuery.java │ │ │ │ ├── BaseDataIndexWriter.java │ │ │ │ ├── BaseDataStore.java │ │ │ │ ├── BaseDataStoreUtils.java │ │ │ │ ├── BaseFilteredIndexQuery.java │ │ │ │ ├── BaseIndexWriter.java │ │ │ │ ├── BaseInsertionIdQuery.java │ │ │ │ ├── BaseQuery.java │ │ │ │ ├── BaseQueryOptions.java │ │ │ │ ├── BaseRowPrefixQuery.java │ │ │ │ ├── CastIterator.java │ │ │ │ ├── DataStoreCallbackManager.java │ │ │ │ ├── GeoWaveValueStore.java │ │ │ │ ├── IntermediaryReadEntryInfo.java │ │ │ │ ├── IntermediaryWriteEntryInfo.java │ │ │ │ └── dataidx/ │ │ │ │ ├── BatchDataIndexRetrieval.java │ │ │ │ ├── BatchDataIndexRetrievalIteratorHelper.java │ │ │ │ ├── BatchIndexRetrievalImpl.java │ │ │ │ ├── DataIndexRetrieval.java │ │ │ │ ├── DataIndexRetrievalImpl.java │ │ │ │ ├── DataIndexUtils.java │ │ │ │ ├── DefaultDataIndexRowDeleterWrapper.java │ │ │ │ └── DefaultDataIndexRowWriterWrapper.java │ │ │ ├── callback/ │ │ │ │ ├── DeleteCallback.java │ │ │ │ ├── DeleteCallbackList.java │ │ │ │ ├── DeleteOtherIndicesCallback.java │ │ │ │ ├── DuplicateDeletionCallback.java │ │ │ │ ├── IngestCallback.java │ │ │ │ ├── IngestCallbackList.java │ │ │ │ ├── ScanCallback.java │ │ │ │ └── ScanCallbackList.java │ │ │ ├── cli/ │ │ │ │ ├── CLIUtils.java │ │ │ │ ├── VisibilityOptions.java │ │ │ │ ├── index/ │ │ │ │ │ ├── AddIndexCommand.java │ │ │ │ │ ├── CompactIndexCommand.java │ │ │ │ │ ├── IndexOperationProvider.java │ │ │ │ │ ├── IndexSection.java │ │ │ │ │ ├── ListIndexPluginsCommand.java │ │ │ │ │ ├── ListIndicesCommand.java │ │ │ │ │ └── RemoveIndexCommand.java │ │ │ │ ├── query/ │ │ │ │ │ ├── CSVQueryOutputFormat.java │ │ │ │ │ ├── ConsoleQueryOutputFormat.java │ │ │ │ │ ├── GWQLQuery.java │ │ │ │ │ ├── QueryOperationProvider.java │ │ │ │ │ └── QueryOutputFormatSpi.java │ │ │ │ ├── stats/ │ │ │ │ │ ├── AbstractStatsCommand.java │ │ │ │ │ ├── AddStatCommand.java │ │ │ │ │ ├── CompactStatsCommand.java │ │ │ │ │ ├── ListStatTypesCommand.java │ │ │ │ │ ├── ListStatsCommand.java │ │ │ │ │ ├── RecalculateStatsCommand.java │ │ │ │ │ ├── RemoveStatCommand.java │ │ │ │ │ ├── StatsCommandLineOptions.java │ │ │ │ │ ├── StatsOperationProvider.java │ │ │ │ │ └── StatsSection.java │ │ │ │ ├── store/ │ │ │ │ │ ├── AbstractRemoveCommand.java │ │ │ │ │ ├── AddStoreCommand.java │ │ │ │ │ ├── ClearStoreCommand.java │ │ │ │ │ ├── CopyConfigStoreCommand.java │ │ │ │ │ ├── CopyStoreCommand.java │ │ │ │ │ ├── DataStorePluginOptions.java │ │ │ │ │ ├── DescribeStoreCommand.java │ │ │ │ │ ├── ListStorePluginsCommand.java │ │ │ │ │ ├── ListStoresCommand.java │ │ │ │ │ ├── RemoveStoreCommand.java │ │ │ │ │ ├── StoreLoader.java │ │ │ │ │ ├── StoreOperationProvider.java │ │ │ │ │ ├── StoreSection.java │ │ │ │ │ └── VersionCommand.java │ │ │ │ └── type/ │ │ │ │ ├── DescribeTypeCommand.java │ │ │ │ ├── ListTypesCommand.java │ │ │ │ ├── RemoveTypeCommand.java │ │ │ │ ├── TypeOperationProvider.java │ │ │ │ └── TypeSection.java │ │ │ ├── config/ │ │ │ │ ├── ConfigOption.java │ │ │ │ └── ConfigUtils.java │ │ │ ├── data/ │ │ │ │ ├── CommonIndexedPersistenceEncoding.java │ │ │ │ ├── DataReader.java │ │ │ │ ├── DataWriter.java │ │ │ │ ├── DeferredReadCommonIndexedPersistenceEncoding.java │ │ │ │ ├── IndexedPersistenceEncoding.java │ │ │ │ ├── MultiFieldPersistentDataset.java │ │ │ │ ├── PersistenceEncoding.java │ │ │ │ ├── PersistentDataset.java │ │ │ │ ├── PersistentValue.java │ │ │ │ ├── SingleFieldPersistentDataset.java │ │ │ │ ├── UnreadFieldDataList.java │ │ │ │ ├── field/ │ │ │ │ │ ├── ArrayReader.java │ │ │ │ │ ├── ArrayWriter.java │ │ │ │ │ ├── FieldReader.java │ │ │ │ │ ├── FieldSerializationProviderSpi.java │ │ │ │ │ ├── FieldUtils.java │ │ │ │ │ ├── FieldWriter.java │ │ │ │ │ ├── PersistableReader.java │ │ │ │ │ ├── PersistableWriter.java │ │ │ │ │ └── base/ │ │ │ │ │ ├── BigDecimalArraySerializationProvider.java │ │ │ │ │ ├── BigDecimalSerializationProvider.java │ │ │ │ │ ├── BigIntegerArraySerializationProvider.java │ │ │ │ │ ├── BigIntegerSerializationProvider.java │ │ │ │ │ ├── BooleanArraySerializationProvider.java │ │ │ │ │ ├── BooleanSerializationProvider.java │ │ │ │ │ ├── ByteArraySerializationProvider.java │ │ │ │ │ ├── ByteSerializationProvider.java │ │ │ │ │ ├── DoubleArraySerializationProvider.java │ │ │ │ │ ├── DoubleSerializationProvider.java │ │ │ │ │ ├── FloatArraySerializationProvider.java │ │ │ │ │ ├── FloatSerializationProvider.java │ │ │ │ │ ├── IntegerArraySerializationProvider.java │ │ │ │ │ ├── IntegerSerializationProvider.java │ │ │ │ │ ├── LongArraySerializationProvider.java │ │ │ │ │ ├── LongSerializationProvider.java │ │ │ │ │ ├── PrimitiveBooleanArraySerializationProvider.java │ │ │ │ │ ├── PrimitiveByteArraySerializationProvider.java │ │ │ │ │ ├── PrimitiveDoubleArraySerializationProvider.java │ │ │ │ │ ├── PrimitiveFloatArraySerializationProvider.java │ │ │ │ │ ├── PrimitiveIntArraySerializationProvider.java │ │ │ │ │ ├── PrimitiveLongArraySerializationProvider.java │ │ │ │ │ ├── PrimitiveShortArraySerializationProvider.java │ │ │ │ │ ├── ShortArraySerializationProvider.java │ │ │ │ │ ├── ShortSerializationProvider.java │ │ │ │ │ ├── StringArraySerializationProvider.java │ │ │ │ │ └── StringSerializationProvider.java │ │ │ │ └── visibility/ │ │ │ │ ├── FallbackVisibilityHandler.java │ │ │ │ ├── FieldLevelVisibilityHandler.java │ │ │ │ ├── FieldMappedVisibilityHandler.java │ │ │ │ ├── GlobalVisibilityHandler.java │ │ │ │ ├── JsonFieldLevelVisibilityHandler.java │ │ │ │ ├── UnconstrainedVisibilityHandler.java │ │ │ │ ├── VisibilityComposer.java │ │ │ │ └── VisibilityExpression.java │ │ │ ├── dimension/ │ │ │ │ ├── AbstractNumericDimensionField.java │ │ │ │ ├── BasicNumericDimensionField.java │ │ │ │ └── NumericDimensionField.java │ │ │ ├── entities/ │ │ │ │ ├── GeoWaveKey.java │ │ │ │ ├── GeoWaveKeyImpl.java │ │ │ │ ├── GeoWaveMetadata.java │ │ │ │ ├── GeoWaveRow.java │ │ │ │ ├── GeoWaveRowImpl.java │ │ │ │ ├── GeoWaveRowIteratorTransformer.java │ │ │ │ ├── GeoWaveRowMergingIterator.java │ │ │ │ ├── GeoWaveRowMergingTransform.java │ │ │ │ ├── GeoWaveValue.java │ │ │ │ ├── GeoWaveValueImpl.java │ │ │ │ └── MergeableGeoWaveRow.java │ │ │ ├── flatten/ │ │ │ │ ├── BitmaskUtils.java │ │ │ │ ├── BitmaskedPairComparator.java │ │ │ │ ├── FlattenedDataSet.java │ │ │ │ ├── FlattenedFieldInfo.java │ │ │ │ ├── FlattenedUnreadData.java │ │ │ │ └── FlattenedUnreadDataSingleRow.java │ │ │ ├── index/ │ │ │ │ ├── AttributeDimensionalityTypeProvider.java │ │ │ │ ├── AttributeIndexImpl.java │ │ │ │ ├── AttributeIndexOptions.java │ │ │ │ ├── AttributeIndexProviderSpi.java │ │ │ │ ├── BaseIndexBuilder.java │ │ │ │ ├── BasicIndexModel.java │ │ │ │ ├── CommonIndexModel.java │ │ │ │ ├── CompositeConstraints.java │ │ │ │ ├── CoreRegisteredIndexFieldMappers.java │ │ │ │ ├── CustomAttributeIndex.java │ │ │ │ ├── CustomIndex.java │ │ │ │ ├── CustomNameIndex.java │ │ │ │ ├── FilterableConstraints.java │ │ │ │ ├── IndexBuilder.java │ │ │ │ ├── IndexFieldMapperPersistableRegistry.java │ │ │ │ ├── IndexFieldMapperRegistry.java │ │ │ │ ├── IndexFieldMapperRegistrySPI.java │ │ │ │ ├── IndexFilter.java │ │ │ │ ├── IndexImpl.java │ │ │ │ ├── IndexPluginOptions.java │ │ │ │ ├── IndexStore.java │ │ │ │ ├── NoOpIndexFieldMapper.java │ │ │ │ ├── NullIndex.java │ │ │ │ ├── NumericAttributeIndexProvider.java │ │ │ │ ├── TextAttributeIndexProvider.java │ │ │ │ └── writer/ │ │ │ │ ├── IndependentAdapterIndexWriter.java │ │ │ │ └── IndexCompositeWriter.java │ │ │ ├── ingest/ │ │ │ │ ├── AbstractLocalFileDriver.java │ │ │ │ ├── AbstractLocalFileIngestDriver.java │ │ │ │ ├── BaseDataStoreIngestDriver.java │ │ │ │ ├── DataAdapterProvider.java │ │ │ │ ├── GeoWaveData.java │ │ │ │ ├── IndexProvider.java │ │ │ │ ├── IngestFormatOptions.java │ │ │ │ ├── IngestOptionsBuilderImpl.java │ │ │ │ ├── IngestPluginBase.java │ │ │ │ ├── IngestTask.java │ │ │ │ ├── IngestUrlHandlerSpi.java │ │ │ │ ├── IngestUtils.java │ │ │ │ ├── LocalFileIngestPlugin.java │ │ │ │ ├── LocalFileIngestPluginRegistrySpi.java │ │ │ │ ├── LocalIngestRunData.java │ │ │ │ ├── LocalInputCommandLineOptions.java │ │ │ │ ├── LocalPluginBase.java │ │ │ │ └── LocalPluginFileVisitor.java │ │ │ ├── memory/ │ │ │ │ ├── MemoryAdapterIndexMappingStore.java │ │ │ │ ├── MemoryAdapterStore.java │ │ │ │ ├── MemoryDataStoreOperations.java │ │ │ │ ├── MemoryFactoryHelper.java │ │ │ │ ├── MemoryIndexStore.java │ │ │ │ ├── MemoryMetadataFilteringIterator.java │ │ │ │ ├── MemoryPersistentAdapterStore.java │ │ │ │ ├── MemoryRequiredOptions.java │ │ │ │ ├── MemoryStoreFactoryFamily.java │ │ │ │ └── MemoryStoreUtils.java │ │ │ ├── metadata/ │ │ │ │ ├── AbstractGeoWavePersistence.java │ │ │ │ ├── AdapterIndexMappingStoreFactory.java │ │ │ │ ├── AdapterIndexMappingStoreImpl.java │ │ │ │ ├── AdapterStoreFactory.java │ │ │ │ ├── AdapterStoreImpl.java │ │ │ │ ├── DataStatisticsStoreFactory.java │ │ │ │ ├── DataStatisticsStoreImpl.java │ │ │ │ ├── IndexStoreFactory.java │ │ │ │ ├── IndexStoreImpl.java │ │ │ │ ├── InternalAdapterStoreFactory.java │ │ │ │ ├── InternalAdapterStoreImpl.java │ │ │ │ ├── MetadataIterators.java │ │ │ │ ├── PropertyStoreFactory.java │ │ │ │ └── PropertyStoreImpl.java │ │ │ ├── operations/ │ │ │ │ ├── BaseReaderParams.java │ │ │ │ ├── BaseReaderParamsBuilder.java │ │ │ │ ├── DataIndexReaderParams.java │ │ │ │ ├── DataIndexReaderParamsBuilder.java │ │ │ │ ├── DataStoreOperations.java │ │ │ │ ├── DataStoreOperationsFactory.java │ │ │ │ ├── Deleter.java │ │ │ │ ├── MetadataDeleter.java │ │ │ │ ├── MetadataQuery.java │ │ │ │ ├── MetadataReader.java │ │ │ │ ├── MetadataType.java │ │ │ │ ├── MetadataWriter.java │ │ │ │ ├── ParallelDecoder.java │ │ │ │ ├── QueryAndDeleteByRow.java │ │ │ │ ├── RangeReaderParams.java │ │ │ │ ├── RangeReaderParamsBuilder.java │ │ │ │ ├── ReaderParams.java │ │ │ │ ├── ReaderParamsBuilder.java │ │ │ │ ├── RowDeleter.java │ │ │ │ ├── RowReader.java │ │ │ │ ├── RowReaderWrapper.java │ │ │ │ ├── RowWriter.java │ │ │ │ ├── SimpleParallelDecoder.java │ │ │ │ ├── config/ │ │ │ │ │ └── IndexDefaultConfigProvider.java │ │ │ │ └── remote/ │ │ │ │ └── options/ │ │ │ │ └── BasicIndexOptions.java │ │ │ ├── query/ │ │ │ │ ├── BaseQuery.java │ │ │ │ ├── BaseQueryBuilder.java │ │ │ │ ├── BaseQueryBuilderImpl.java │ │ │ │ ├── QueryBuilderImpl.java │ │ │ │ ├── aggregate/ │ │ │ │ │ ├── AdapterAndIndexBasedAggregation.java │ │ │ │ │ ├── AggregationQueryBuilderImpl.java │ │ │ │ │ ├── BinningAggregation.java │ │ │ │ │ ├── BinningAggregationOptions.java │ │ │ │ │ ├── CommonIndexAggregation.java │ │ │ │ │ ├── CompositeAggregation.java │ │ │ │ │ ├── CountAggregation.java │ │ │ │ │ ├── FieldMathAggregation.java │ │ │ │ │ ├── FieldMaxAggregation.java │ │ │ │ │ ├── FieldMinAggregation.java │ │ │ │ │ ├── FieldNameParam.java │ │ │ │ │ ├── FieldSumAggregation.java │ │ │ │ │ ├── MergingAggregation.java │ │ │ │ │ ├── OptimalCountAggregation.java │ │ │ │ │ └── OptimalFieldAggregation.java │ │ │ │ ├── constraints/ │ │ │ │ │ ├── AdapterAndIndexBasedQueryConstraints.java │ │ │ │ │ ├── BasicOrderedConstraintQuery.java │ │ │ │ │ ├── BasicQuery.java │ │ │ │ │ ├── BasicQueryByClass.java │ │ │ │ │ ├── Constraints.java │ │ │ │ │ ├── CoordinateRangeQuery.java │ │ │ │ │ ├── CoordinateRangeUtils.java │ │ │ │ │ ├── CustomQueryConstraints.java │ │ │ │ │ ├── CustomQueryConstraintsWithFilter.java │ │ │ │ │ ├── DataIdQuery.java │ │ │ │ │ ├── DataIdRangeQuery.java │ │ │ │ │ ├── EverythingQuery.java │ │ │ │ │ ├── ExplicitFilteredQuery.java │ │ │ │ │ ├── FilteredEverythingQuery.java │ │ │ │ │ ├── InsertionIdQuery.java │ │ │ │ │ ├── OptimalExpressionQuery.java │ │ │ │ │ ├── PrefixIdQuery.java │ │ │ │ │ ├── QueryConstraints.java │ │ │ │ │ ├── QueryConstraintsFactoryImpl.java │ │ │ │ │ ├── SimpleNumericQuery.java │ │ │ │ │ └── TypeConstraintQuery.java │ │ │ │ ├── filter/ │ │ │ │ │ ├── AdapterIdQueryFilter.java │ │ │ │ │ ├── BasicQueryFilter.java │ │ │ │ │ ├── ClientVisibilityFilter.java │ │ │ │ │ ├── CoordinateRangeQueryFilter.java │ │ │ │ │ ├── DataIdQueryFilter.java │ │ │ │ │ ├── DataIdRangeQueryFilter.java │ │ │ │ │ ├── DedupeFilter.java │ │ │ │ │ ├── ExpressionQueryFilter.java │ │ │ │ │ ├── FilterList.java │ │ │ │ │ ├── FixedResolutionSubsampleQueryFilter.java │ │ │ │ │ ├── InsertionIdQueryFilter.java │ │ │ │ │ ├── PrefixIdQueryFilter.java │ │ │ │ │ ├── QueryFilter.java │ │ │ │ │ └── expression/ │ │ │ │ │ ├── And.java │ │ │ │ │ ├── Between.java │ │ │ │ │ ├── BinaryPredicate.java │ │ │ │ │ ├── BooleanExpression.java │ │ │ │ │ ├── BooleanFieldValue.java │ │ │ │ │ ├── BooleanLiteral.java │ │ │ │ │ ├── ComparableExpression.java │ │ │ │ │ ├── ComparisonOperator.java │ │ │ │ │ ├── Exclude.java │ │ │ │ │ ├── Expression.java │ │ │ │ │ ├── FieldValue.java │ │ │ │ │ ├── Filter.java │ │ │ │ │ ├── FilterConstraints.java │ │ │ │ │ ├── FilterRange.java │ │ │ │ │ ├── GenericEqualTo.java │ │ │ │ │ ├── GenericExpression.java │ │ │ │ │ ├── GenericFieldValue.java │ │ │ │ │ ├── GenericLiteral.java │ │ │ │ │ ├── GenericNotEqualTo.java │ │ │ │ │ ├── Include.java │ │ │ │ │ ├── IndexFieldConstraints.java │ │ │ │ │ ├── InvalidFilterException.java │ │ │ │ │ ├── IsNotNull.java │ │ │ │ │ ├── IsNull.java │ │ │ │ │ ├── Literal.java │ │ │ │ │ ├── MultiFilterOperator.java │ │ │ │ │ ├── Not.java │ │ │ │ │ ├── Or.java │ │ │ │ │ ├── Predicate.java │ │ │ │ │ ├── numeric/ │ │ │ │ │ │ ├── Abs.java │ │ │ │ │ │ ├── Add.java │ │ │ │ │ │ ├── Divide.java │ │ │ │ │ │ ├── MathExpression.java │ │ │ │ │ │ ├── Multiply.java │ │ │ │ │ │ ├── NumericBetween.java │ │ │ │ │ │ ├── NumericComparisonOperator.java │ │ │ │ │ │ ├── NumericExpression.java │ │ │ │ │ │ ├── NumericFieldConstraints.java │ │ │ │ │ │ ├── NumericFieldValue.java │ │ │ │ │ │ ├── NumericLiteral.java │ │ │ │ │ │ └── Subtract.java │ │ │ │ │ └── text/ │ │ │ │ │ ├── Concat.java │ │ │ │ │ ├── Contains.java │ │ │ │ │ ├── EndsWith.java │ │ │ │ │ ├── StartsWith.java │ │ │ │ │ ├── TextBetween.java │ │ │ │ │ ├── TextBinaryPredicate.java │ │ │ │ │ ├── TextComparisonOperator.java │ │ │ │ │ ├── TextExpression.java │ │ │ │ │ ├── TextFieldConstraints.java │ │ │ │ │ ├── TextFieldValue.java │ │ │ │ │ ├── TextFilterRange.java │ │ │ │ │ └── TextLiteral.java │ │ │ │ ├── gwql/ │ │ │ │ │ ├── AdapterEntryResultSet.java │ │ │ │ │ ├── AggregationSelector.java │ │ │ │ │ ├── CastableType.java │ │ │ │ │ ├── ColumnSelector.java │ │ │ │ │ ├── ErrorListener.java │ │ │ │ │ ├── GWQLCoreExtensions.java │ │ │ │ │ ├── GWQLExtensionRegistry.java │ │ │ │ │ ├── GWQLExtensionRegistrySpi.java │ │ │ │ │ ├── GWQLParseException.java │ │ │ │ │ ├── GWQLParseHelper.java │ │ │ │ │ ├── QLFunction.java │ │ │ │ │ ├── Result.java │ │ │ │ │ ├── ResultSet.java │ │ │ │ │ ├── Selector.java │ │ │ │ │ ├── SingletonResultSet.java │ │ │ │ │ ├── function/ │ │ │ │ │ │ ├── aggregation/ │ │ │ │ │ │ │ ├── AggregationFunction.java │ │ │ │ │ │ │ ├── CountFunction.java │ │ │ │ │ │ │ ├── MathAggregationFunction.java │ │ │ │ │ │ │ ├── MaxFunction.java │ │ │ │ │ │ │ ├── MinFunction.java │ │ │ │ │ │ │ └── SumFunction.java │ │ │ │ │ │ ├── expression/ │ │ │ │ │ │ │ ├── AbsFunction.java │ │ │ │ │ │ │ ├── ConcatFunction.java │ │ │ │ │ │ │ └── ExpressionFunction.java │ │ │ │ │ │ ├── operator/ │ │ │ │ │ │ │ └── OperatorFunction.java │ │ │ │ │ │ └── predicate/ │ │ │ │ │ │ ├── PredicateFunction.java │ │ │ │ │ │ └── TextPredicates.java │ │ │ │ │ ├── statement/ │ │ │ │ │ │ ├── DeleteStatement.java │ │ │ │ │ │ ├── SelectStatement.java │ │ │ │ │ │ └── Statement.java │ │ │ │ │ └── type/ │ │ │ │ │ ├── NumberCastableType.java │ │ │ │ │ └── TextCastableType.java │ │ │ │ └── options/ │ │ │ │ ├── AggregateTypeQueryOptions.java │ │ │ │ ├── CommonQueryOptions.java │ │ │ │ ├── DataTypeQueryOptions.java │ │ │ │ ├── FilterByTypeQueryOptions.java │ │ │ │ ├── IndexQueryOptions.java │ │ │ │ ├── QueryAllIndices.java │ │ │ │ ├── QueryAllTypes.java │ │ │ │ └── QuerySingleIndex.java │ │ │ ├── server/ │ │ │ │ ├── BasicOptionProvider.java │ │ │ │ ├── RowMergingAdapterOptionProvider.java │ │ │ │ ├── ServerOpConfig.java │ │ │ │ ├── ServerOpHelper.java │ │ │ │ └── ServerSideOperations.java │ │ │ ├── spi/ │ │ │ │ ├── ClassLoaderTransformerSpi.java │ │ │ │ ├── DimensionalityTypeOptions.java │ │ │ │ ├── DimensionalityTypeProviderSpi.java │ │ │ │ └── DimensionalityTypeRegistry.java │ │ │ ├── statistics/ │ │ │ │ ├── CoreRegisteredStatistics.java │ │ │ │ ├── DataStatisticsStore.java │ │ │ │ ├── DefaultStatisticsProvider.java │ │ │ │ ├── InternalStatisticsHelper.java │ │ │ │ ├── StatisticId.java │ │ │ │ ├── StatisticType.java │ │ │ │ ├── StatisticUpdateCallback.java │ │ │ │ ├── StatisticUpdateHandler.java │ │ │ │ ├── StatisticValueReader.java │ │ │ │ ├── StatisticValueWriter.java │ │ │ │ ├── StatisticsDeleteCallback.java │ │ │ │ ├── StatisticsIngestCallback.java │ │ │ │ ├── StatisticsPersistableRegistry.java │ │ │ │ ├── StatisticsRegistry.java │ │ │ │ ├── StatisticsRegistrySPI.java │ │ │ │ ├── StatisticsValueIterator.java │ │ │ │ ├── adapter/ │ │ │ │ │ ├── CountStatistic.java │ │ │ │ │ └── DataTypeStatisticType.java │ │ │ │ ├── binning/ │ │ │ │ │ ├── BinningStrategyUtils.java │ │ │ │ │ ├── CompositeBinningStrategy.java │ │ │ │ │ ├── DataTypeBinningStrategy.java │ │ │ │ │ ├── FieldValueBinningStrategy.java │ │ │ │ │ ├── NumericRangeFieldValueBinningStrategy.java │ │ │ │ │ └── PartitionBinningStrategy.java │ │ │ │ ├── field/ │ │ │ │ │ ├── BloomFilterStatistic.java │ │ │ │ │ ├── CountMinSketchStatistic.java │ │ │ │ │ ├── FieldStatisticId.java │ │ │ │ │ ├── FieldStatisticType.java │ │ │ │ │ ├── FixedBinNumericHistogramStatistic.java │ │ │ │ │ ├── HyperLogLogStatistic.java │ │ │ │ │ ├── NumericHistogramStatistic.java │ │ │ │ │ ├── NumericMeanStatistic.java │ │ │ │ │ ├── NumericRangeStatistic.java │ │ │ │ │ ├── NumericStatsStatistic.java │ │ │ │ │ ├── Stats.java │ │ │ │ │ └── StatsAccumulator.java │ │ │ │ ├── index/ │ │ │ │ │ ├── DifferingVisibilityCountStatistic.java │ │ │ │ │ ├── DuplicateEntryCountStatistic.java │ │ │ │ │ ├── FieldVisibilityCountStatistic.java │ │ │ │ │ ├── IndexMetaDataSetStatistic.java │ │ │ │ │ ├── IndexStatisticType.java │ │ │ │ │ ├── MaxDuplicatesStatistic.java │ │ │ │ │ ├── PartitionsStatistic.java │ │ │ │ │ └── RowRangeHistogramStatistic.java │ │ │ │ ├── query/ │ │ │ │ │ ├── AbstractStatisticQuery.java │ │ │ │ │ ├── AbstractStatisticQueryBuilder.java │ │ │ │ │ ├── BinConstraintsImpl.java │ │ │ │ │ ├── DataTypeStatisticQuery.java │ │ │ │ │ ├── DataTypeStatisticQueryBuilder.java │ │ │ │ │ ├── FieldStatisticQuery.java │ │ │ │ │ ├── FieldStatisticQueryBuilder.java │ │ │ │ │ ├── IndexStatisticQuery.java │ │ │ │ │ └── IndexStatisticQueryBuilder.java │ │ │ │ └── visibility/ │ │ │ │ ├── DefaultStatisticVisibility.java │ │ │ │ └── FieldDependentStatisticVisibility.java │ │ │ └── util/ │ │ │ ├── AsyncNativeEntryIteratorWrapper.java │ │ │ ├── ClasspathUtils.java │ │ │ ├── CompoundHierarchicalIndexStrategyWrapper.java │ │ │ ├── DataAdapterAndIndexCache.java │ │ │ ├── DataStoreUtils.java │ │ │ ├── GenericTypeResolver.java │ │ │ ├── GeoWaveRowIteratorFactory.java │ │ │ ├── IteratorWrapper.java │ │ │ ├── MergingEntryIterator.java │ │ │ ├── NativeEntryIteratorWrapper.java │ │ │ ├── NativeEntryTransformer.java │ │ │ ├── RewritingMergingEntryIterator.java │ │ │ ├── RowConsumer.java │ │ │ └── SecondaryIndexEntryIteratorWrapper.java │ │ └── resources/ │ │ └── META-INF/ │ │ └── services/ │ │ ├── org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi │ │ ├── org.locationtech.geowave.core.cli.spi.DefaultConfigProviderSpi │ │ ├── org.locationtech.geowave.core.index.persist.PersistableRegistrySpi │ │ ├── org.locationtech.geowave.core.store.cli.query.QueryOutputFormatSpi │ │ ├── org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi │ │ ├── org.locationtech.geowave.core.store.index.AttributeIndexProviderSpi │ │ ├── org.locationtech.geowave.core.store.index.IndexFieldMapperRegistrySPI │ │ ├── org.locationtech.geowave.core.store.query.gwql.GWQLExtensionRegistrySpi │ │ ├── org.locationtech.geowave.core.store.spi.DimensionalityTypeProviderSpi │ │ └── org.locationtech.geowave.core.store.statistics.StatisticsRegistrySPI │ └── test/ │ ├── java/ │ │ └── org/ │ │ └── locationtech/ │ │ └── geowave/ │ │ └── core/ │ │ └── store/ │ │ ├── DataStorePropertyTest.java │ │ ├── TestStorePersistableRegistry.java │ │ ├── adapter/ │ │ │ ├── AbstractDataTypeAdapterTest.java │ │ │ ├── BasicDataTypeAdapterTest.java │ │ │ ├── FieldDescriptorTest.java │ │ │ ├── IndexFieldMapperTest.java │ │ │ ├── MockComponents.java │ │ │ ├── MockRegisteredIndexFieldMappers.java │ │ │ └── statistics/ │ │ │ └── histogram/ │ │ │ ├── ByteUtilsTest.java │ │ │ └── NumericHistogramTest.java │ │ ├── api/ │ │ │ ├── DataStoreAddTest.java │ │ │ └── DataStoreRemoveTest.java │ │ ├── data/ │ │ │ ├── field/ │ │ │ │ └── BasicReaderWriterTest.java │ │ │ └── visibility/ │ │ │ ├── JsonFieldLevelVisibilityHandlerTest.java │ │ │ └── VisibilityExpressionTest.java │ │ ├── flatten/ │ │ │ └── BitmaskUtilsTest.java │ │ ├── memory/ │ │ │ ├── MemoryDataStoreTest.java │ │ │ └── MemoryStoreUtilsTest.java │ │ ├── query/ │ │ │ ├── BasicQueryByClassTest.java │ │ │ ├── aggregate/ │ │ │ │ ├── AbstractAggregationTest.java │ │ │ │ ├── AbstractCommonIndexAggregationTest.java │ │ │ │ ├── BinningAggregationOptionsTest.java │ │ │ │ ├── BinningAggregationTest.java │ │ │ │ └── CountAggregationTest.java │ │ │ ├── filter/ │ │ │ │ ├── DistributedQueryFilterTest.java │ │ │ │ └── expression/ │ │ │ │ ├── FilterExpressionTest.java │ │ │ │ └── FilterRangeTest.java │ │ │ └── gwql/ │ │ │ ├── AbstractGWQLTest.java │ │ │ ├── DeleteStatementTest.java │ │ │ ├── GWQLParserTest.java │ │ │ └── SelectStatementTest.java │ │ └── statistics/ │ │ └── index/ │ │ ├── PartitionsStatisticTest.java │ │ └── RowRangeHistogramStatisticTest.java │ └── resources/ │ └── META-INF/ │ └── services/ │ ├── org.locationtech.geowave.core.index.persist.PersistableRegistrySpi │ └── org.locationtech.geowave.core.store.index.IndexFieldMapperRegistrySPI ├── deploy/ │ ├── Jenkinsfile │ ├── packaging/ │ │ ├── docker/ │ │ │ ├── .gitignore │ │ │ ├── README.md │ │ │ ├── build-args-matrix.sh.example │ │ │ ├── build-rpm/ │ │ │ │ ├── build-rpm.sh │ │ │ │ ├── build-services-rpm.sh │ │ │ │ ├── deploy-geowave-to-hdfs.sh.template │ │ │ │ └── fpm_scripts/ │ │ │ │ ├── gwgeoserver_logrotate │ │ │ │ ├── gwgrpc.environment │ │ │ │ ├── gwgrpc.rsyslog │ │ │ │ ├── gwgrpc.service │ │ │ │ ├── gwgrpc_logrotate │ │ │ │ ├── gwgrpc_post_install.sh │ │ │ │ ├── gwgrpc_post_uninstall.sh │ │ │ │ ├── gwtomcat │ │ │ │ ├── gwtomcat.service │ │ │ │ ├── gwtomcat_logrotate │ │ │ │ ├── gwtomcat_post_install.sh │ │ │ │ ├── gwtomcat_post_uninstall.sh │ │ │ │ ├── gwtomcat_pre_uninstall.sh │ │ │ │ └── gwtomcat_tools.sh.template │ │ │ ├── build-src/ │ │ │ │ ├── build-geowave-common.sh │ │ │ │ └── build-geowave-vendor.sh │ │ │ ├── docker-build-rpms.sh │ │ │ ├── geowave-centos7-java7-build.dockerfile │ │ │ ├── geowave-centos7-java8-build.dockerfile │ │ │ ├── geowave-centos7-publish.dockerfile │ │ │ ├── geowave-centos7-rpm-build.dockerfile │ │ │ ├── init.sh │ │ │ ├── publish/ │ │ │ │ ├── publish-common-rpm.sh │ │ │ │ └── publish-vendor-rpm.sh │ │ │ └── pull-s3-caches.sh │ │ ├── emr/ │ │ │ ├── README.md │ │ │ ├── generate-emr-scripts.sh │ │ │ └── template/ │ │ │ ├── accumulo/ │ │ │ │ ├── DATASTORE_BOOTSTRAP_TOKEN │ │ │ │ ├── DATASTORE_CONFIGURE_GEOWAVE_TOKEN │ │ │ │ ├── DATASTORE_LIB_TOKEN │ │ │ │ ├── DATASTORE_PARAMS_TOKEN │ │ │ │ └── DATASTORE_PUPPET_TOKEN │ │ │ ├── bootstrap-geowave.sh.template │ │ │ ├── bootstrap-zeppelin.sh.template │ │ │ ├── cassandra/ │ │ │ │ ├── DATASTORE_BOOTSTRAP_TOKEN │ │ │ │ └── DATASTORE_PARAMS_TOKEN │ │ │ ├── configure-zeppelin.sh.template │ │ │ ├── geowave-install-lib.sh.template │ │ │ ├── hbase/ │ │ │ │ ├── DATASTORE_PARAMS_TOKEN │ │ │ │ └── DATASTORE_PUPPET_TOKEN │ │ │ ├── jupyter/ │ │ │ │ ├── bootstrap-jupyter.sh.template │ │ │ │ ├── bootstrap-jupyterhub.sh.template │ │ │ │ ├── create-configure-kernel.sh.template │ │ │ │ ├── gw-base.yml │ │ │ │ ├── install-conda.sh │ │ │ │ ├── jupyterhub_config.py │ │ │ │ └── pre-spawn.sh │ │ │ └── quickstart/ │ │ │ ├── QUICKSTART_BOOTSTRAP_TOKEN │ │ │ ├── geowave-env.sh.template │ │ │ ├── ingest-and-kde-gdelt.sh.template │ │ │ └── setup-geoserver-geowave-workspace.sh │ │ ├── puppet/ │ │ │ └── geowave/ │ │ │ └── manifests/ │ │ │ ├── accumulo.pp │ │ │ ├── app.pp │ │ │ ├── gwgeoserver.pp │ │ │ ├── gwgrpc.pp │ │ │ ├── gwtomcat_server.pp │ │ │ ├── gwtomcat_service.pp │ │ │ ├── hbase.pp │ │ │ ├── init.pp │ │ │ ├── params.pp │ │ │ ├── repo.pp │ │ │ └── restservices.pp │ │ ├── rpm/ │ │ │ ├── .gitignore │ │ │ ├── centos/ │ │ │ │ └── 7/ │ │ │ │ ├── .gitignore │ │ │ │ ├── SOURCES/ │ │ │ │ │ ├── bash_profile.sh │ │ │ │ │ ├── default.xml │ │ │ │ │ ├── geowave-tools.sh │ │ │ │ │ ├── namespace.xml │ │ │ │ │ └── workspace.xml │ │ │ │ ├── SPECS/ │ │ │ │ │ ├── geowave-common.spec │ │ │ │ │ └── geowave-vendor.spec │ │ │ │ └── rpm.sh │ │ │ ├── repo-dev/ │ │ │ │ ├── SOURCES/ │ │ │ │ │ └── geowave-dev.repo │ │ │ │ ├── SPECS/ │ │ │ │ │ └── geowave-dev.spec │ │ │ │ └── rpm.sh │ │ │ ├── repo-release/ │ │ │ │ ├── SOURCES/ │ │ │ │ │ └── geowave.repo │ │ │ │ ├── SPECS/ │ │ │ │ │ └── geowave-release.spec │ │ │ │ └── rpm.sh │ │ │ └── rpm-functions.sh │ │ ├── sandbox/ │ │ │ ├── generate-sandbox-scripts.sh │ │ │ └── template/ │ │ │ └── quickstart/ │ │ │ └── geowave-env.sh.template │ │ └── standalone/ │ │ └── standalone-installer.install4j │ ├── pom.xml │ ├── scripts/ │ │ └── clean-up.py │ └── src/ │ └── main/ │ └── resources/ │ ├── GeoWaveLabels.properties │ ├── build.properties │ └── log4j2.properties ├── dev-resources/ │ ├── pom.xml │ └── src/ │ └── main/ │ └── resources/ │ ├── assemblies/ │ │ ├── default-installer-main.xml │ │ └── default-installer-plugin.xml │ ├── eclipse/ │ │ ├── eclipse-cleanup.xml │ │ └── eclipse-formatter.xml │ └── findbugs/ │ └── findbugs-exclude.xml ├── docs/ │ ├── .gitignore │ ├── content/ │ │ ├── commands/ │ │ │ ├── 000-header.adoc │ │ │ ├── 005-commands-and-flags.adoc │ │ │ ├── 010-config-commands.adoc │ │ │ ├── 011-store-commands.adoc │ │ │ ├── 012-index-commands.adoc │ │ │ ├── 013-type-commands.adoc │ │ │ ├── 014-stat-commands.adoc │ │ │ ├── 015-ingest-commands.adoc │ │ │ ├── 017-query-command.adoc │ │ │ ├── 020-analytic-commands.adoc │ │ │ ├── 025-vector-commands.adoc │ │ │ ├── 030-raster-commands.adoc │ │ │ ├── 035-geoserver-commands.adoc │ │ │ ├── 040-util-commands.adoc │ │ │ ├── 041-util-migrate.adoc │ │ │ ├── 045-util-standalone-commands.adoc │ │ │ ├── 050-util-accumulo-commands.adoc │ │ │ ├── 050-util-osm-commands.adoc │ │ │ ├── 055-util-python-commands.adoc │ │ │ ├── 060-util-landsat-commands.adoc │ │ │ ├── 065-util-grpc-commands.adoc │ │ │ ├── 070-util-filesystem-commands.adoc │ │ │ └── manpages/ │ │ │ ├── analytic/ │ │ │ │ ├── geowave-dbscan.txt │ │ │ │ ├── geowave-kde.txt │ │ │ │ ├── geowave-kdespark.txt │ │ │ │ ├── geowave-kmeansjump.txt │ │ │ │ ├── geowave-kmeansparallel.txt │ │ │ │ ├── geowave-kmeansspark.txt │ │ │ │ ├── geowave-nn.txt │ │ │ │ ├── geowave-spatialjoin.txt │ │ │ │ └── geowave-sql.txt │ │ │ ├── config/ │ │ │ │ ├── geowave-aws.txt │ │ │ │ ├── geowave-geoserver.txt │ │ │ │ ├── geowave-hdfs.txt │ │ │ │ ├── geowave-list.txt │ │ │ │ ├── geowave-newcryptokey.txt │ │ │ │ └── geowave-set.txt │ │ │ ├── geoserver/ │ │ │ │ ├── geowave-addcs.txt │ │ │ │ ├── geowave-addcv.txt │ │ │ │ ├── geowave-addds.txt │ │ │ │ ├── geowave-addfl.txt │ │ │ │ ├── geowave-addlayer.txt │ │ │ │ ├── geowave-addstyle.txt │ │ │ │ ├── geowave-addws.txt │ │ │ │ ├── geowave-getcs.txt │ │ │ │ ├── geowave-getcv.txt │ │ │ │ ├── geowave-getds.txt │ │ │ │ ├── geowave-getfl.txt │ │ │ │ ├── geowave-getsa.txt │ │ │ │ ├── geowave-getstyle.txt │ │ │ │ ├── geowave-listcs.txt │ │ │ │ ├── geowave-listcv.txt │ │ │ │ ├── geowave-listds.txt │ │ │ │ ├── geowave-listfl.txt │ │ │ │ ├── geowave-liststyles.txt │ │ │ │ ├── geowave-listws.txt │ │ │ │ ├── geowave-rmcs.txt │ │ │ │ ├── geowave-rmcv.txt │ │ │ │ ├── geowave-rmds.txt │ │ │ │ ├── geowave-rmfl.txt │ │ │ │ ├── geowave-rmstyle.txt │ │ │ │ ├── geowave-rmws.txt │ │ │ │ ├── geowave-rungs.txt │ │ │ │ └── geowave-setls.txt │ │ │ ├── index/ │ │ │ │ ├── geowave-addindex.txt │ │ │ │ ├── geowave-compactindex.txt │ │ │ │ ├── geowave-listindex.txt │ │ │ │ ├── geowave-listindexplugins.txt │ │ │ │ └── geowave-rmindex.txt │ │ │ ├── ingest/ │ │ │ │ ├── geowave-kafkaToGW.txt │ │ │ │ ├── geowave-listplugins.txt │ │ │ │ ├── geowave-localToGW.txt │ │ │ │ ├── geowave-localToHdfs.txt │ │ │ │ ├── geowave-localToKafka.txt │ │ │ │ ├── geowave-localToMrGW.txt │ │ │ │ ├── geowave-mrToGW.txt │ │ │ │ └── geowave-sparkToGW.txt │ │ │ ├── query/ │ │ │ │ └── geowave-query.txt │ │ │ ├── raster/ │ │ │ │ ├── geowave-installgdal.txt │ │ │ │ ├── geowave-resizemr.txt │ │ │ │ └── geowave-resizespark.txt │ │ │ ├── stat/ │ │ │ │ ├── geowave-addstat.txt │ │ │ │ ├── geowave-compactstats.txt │ │ │ │ ├── geowave-liststats.txt │ │ │ │ ├── geowave-liststattypes.txt │ │ │ │ ├── geowave-recalcstats.txt │ │ │ │ └── geowave-rmstat.txt │ │ │ ├── store/ │ │ │ │ ├── geowave-addstore.txt │ │ │ │ ├── geowave-clear.txt │ │ │ │ ├── geowave-copy.txt │ │ │ │ ├── geowave-copymr.txt │ │ │ │ ├── geowave-copystorecfg.txt │ │ │ │ ├── geowave-describestore.txt │ │ │ │ ├── geowave-liststoreplugins.txt │ │ │ │ ├── geowave-liststores.txt │ │ │ │ ├── geowave-rmstore.txt │ │ │ │ └── geowave-version.txt │ │ │ ├── type/ │ │ │ │ ├── geowave-addtype.txt │ │ │ │ ├── geowave-describetype.txt │ │ │ │ ├── geowave-listtypes.txt │ │ │ │ └── geowave-rmtype.txt │ │ │ ├── util/ │ │ │ │ ├── accumulo/ │ │ │ │ │ ├── geowave-presplitpartitionid.txt │ │ │ │ │ ├── geowave-runserver.txt │ │ │ │ │ ├── geowave-splitequalinterval.txt │ │ │ │ │ ├── geowave-splitnumrecords.txt │ │ │ │ │ └── geowave-splitquantile.txt │ │ │ │ ├── bigtable/ │ │ │ │ │ └── geowave-runbigtable.txt │ │ │ │ ├── cassandra/ │ │ │ │ │ └── geowave-runcassandra.txt │ │ │ │ ├── dynamodb/ │ │ │ │ │ └── geowave-rundynamodb.txt │ │ │ │ ├── filesystem/ │ │ │ │ │ └── geowave-filesystem-listformats.txt │ │ │ │ ├── grpc/ │ │ │ │ │ ├── geowave-grpc-start.txt │ │ │ │ │ └── geowave-grpc-stop.txt │ │ │ │ ├── hbase/ │ │ │ │ │ └── geowave-runhbase.txt │ │ │ │ ├── kudu/ │ │ │ │ │ └── geowave-runkudu.txt │ │ │ │ ├── landsat/ │ │ │ │ │ ├── geowave-analyze.txt │ │ │ │ │ ├── geowave-download.txt │ │ │ │ │ ├── geowave-ingest.txt │ │ │ │ │ ├── geowave-ingestraster.txt │ │ │ │ │ └── geowave-ingestvector.txt │ │ │ │ ├── migrate/ │ │ │ │ │ └── geowave-util-migrate.txt │ │ │ │ ├── osm/ │ │ │ │ │ ├── geowave-ingest.txt │ │ │ │ │ └── geowave-stage.txt │ │ │ │ ├── python/ │ │ │ │ │ └── geowave-python-rungateway.txt │ │ │ │ └── redis/ │ │ │ │ └── geowave-runredis.txt │ │ │ └── vector/ │ │ │ ├── geowave-cqldelete.txt │ │ │ ├── geowave-localexport.txt │ │ │ └── geowave-mrexport.txt │ │ ├── devguide/ │ │ │ ├── 000-header.adoc │ │ │ ├── 005-introduction.adoc │ │ │ ├── 010-development-setup.adoc │ │ │ ├── 015-building.adoc │ │ │ ├── 020-packaging.adoc │ │ │ ├── 025-contributions.adoc │ │ │ ├── 030-architecture.adoc │ │ │ ├── 035-statistics.adoc │ │ │ ├── 040-ingest.adoc │ │ │ ├── 045-query.adoc │ │ │ ├── 050-services.adoc │ │ │ ├── 075-programmatic-api.adoc │ │ │ ├── 100-appendices.adoc │ │ │ ├── 102-extending-geowave.adoc │ │ │ ├── 105-appendix-documentation.adoc │ │ │ ├── 105-appendix-project-descriptions.adoc │ │ │ ├── 110-appendix-maven-artifacts.adoc │ │ │ ├── 115-appendix-python-api.adoc │ │ │ ├── 120-appendix-jace.adoc │ │ │ └── 125-appendix-theory.adoc │ │ ├── docs-common/ │ │ │ ├── 00-attrs.adoc │ │ │ ├── 900-version.adoc │ │ │ └── docinfo.html │ │ ├── downloads/ │ │ │ ├── 001-imports.adoc │ │ │ ├── 002-navbar.adoc │ │ │ ├── 003-container.adoc │ │ │ └── 004-scripts.adoc │ │ ├── geowave-index/ │ │ │ ├── 001-imports.adoc │ │ │ ├── 002-navbar.adoc │ │ │ ├── 003-container.adoc │ │ │ ├── docinfo.html │ │ │ ├── js/ │ │ │ │ ├── geowave.js │ │ │ │ └── versions.js │ │ │ ├── site.webmanifest │ │ │ └── stylesheets/ │ │ │ ├── blank.css │ │ │ ├── font-awesome.css │ │ │ ├── geowave-boostrap-theme.css │ │ │ ├── geowave-docs.css │ │ │ └── geowave.css │ │ ├── installation-guide/ │ │ │ ├── 000-header.adoc │ │ │ ├── 005-standalone.adoc │ │ │ └── 010-rpm.adoc │ │ ├── overview/ │ │ │ ├── 000-header.adoc │ │ │ ├── 005-introduction.adoc │ │ │ ├── 010-overview.adoc │ │ │ ├── 015-screenshots.adoc │ │ │ └── 020-deeper.adoc │ │ ├── quickstart/ │ │ │ ├── 000-header.adoc │ │ │ ├── 005-preparation.adoc │ │ │ ├── 010-vector-demo.adoc │ │ │ ├── 015-raster-demo.adoc │ │ │ └── 020-further-documentation.adoc │ │ ├── quickstart-emr/ │ │ │ ├── aws-env/ │ │ │ │ ├── 000-quickstart-guide-intro.adoc │ │ │ │ ├── 007-quickstart-guide-scripts.adoc │ │ │ │ ├── 010-quickstart-guide-CLI.adoc │ │ │ │ ├── 015-quickstart-guide-GUI.adoc │ │ │ │ ├── 020-quickstart-guide-GUI-step-1.adoc │ │ │ │ ├── 025-quickstart-guide-GUI-step-2.adoc │ │ │ │ ├── 030-quickstart-guide-GUI-step-3.adoc │ │ │ │ ├── 035-quickstart-guide-GUI-step-4.adoc │ │ │ │ ├── 036-quickstart-guide-enable-jupyter.adoc │ │ │ │ ├── 037-quickstart-steps-overview.adoc │ │ │ │ └── 110-appendices.adoc │ │ │ ├── interact-cluster/ │ │ │ │ └── 001-hw-quickstart-guide-interact.adoc │ │ │ ├── jupyter/ │ │ │ │ └── 000-jupyter-main-page.adoc │ │ │ ├── quickstart-emr/ │ │ │ │ ├── 000-header.adoc │ │ │ │ ├── 005-environment-setup.adoc │ │ │ │ ├── 010-preparation.adoc │ │ │ │ ├── 015-vector-demo.adoc │ │ │ │ ├── 020-raster-demo.adoc │ │ │ │ ├── 025-notebook-examples.adoc │ │ │ │ └── 030-further-documentation.adoc │ │ │ ├── spatial-join/ │ │ │ │ └── 000-join-main-page.adoc │ │ │ └── zeppelin/ │ │ │ └── 000-zeppelin-main-page.adoc │ │ └── userguide/ │ │ ├── 000-header.adoc │ │ ├── 005-introduction.adoc │ │ ├── 010-cli.adoc │ │ ├── 015-datastores.adoc │ │ ├── 020-indices.adoc │ │ ├── 025-ingest.adoc │ │ ├── 030-queries.adoc │ │ ├── 031-statistics.adoc │ │ ├── 035-analytics.adoc │ │ ├── 045-geoserver.adoc │ │ ├── 100-appendices.adoc │ │ ├── 101-migrating.adoc │ │ ├── 105-accumulo-config.adoc │ │ ├── 110-visibility-management.adoc │ │ ├── 115-appendix-security.adoc │ │ └── 120-puppet.adoc │ └── pom.xml ├── examples/ │ ├── README.md │ ├── data/ │ │ ├── notebooks/ │ │ │ ├── jupyter/ │ │ │ │ ├── geowave-gdelt.ipynb │ │ │ │ ├── geowave-gpx.ipynb │ │ │ │ ├── geowave-spatial-join.ipynb │ │ │ │ └── pygw-showcase.ipynb │ │ │ └── zeppelin/ │ │ │ ├── GDELT-Quick-Start.json │ │ │ ├── GeoWave-GPX-Demo.json │ │ │ └── README.md │ │ └── slds/ │ │ ├── DistributedRender.sld │ │ ├── KDEColorMap.sld │ │ └── SubsamplePoints.sld │ └── java-api/ │ ├── pom.xml │ └── src/ │ ├── main/ │ │ ├── java/ │ │ │ └── org/ │ │ │ └── locationtech/ │ │ │ └── geowave/ │ │ │ └── examples/ │ │ │ ├── ExamplePersistableRegistry.java │ │ │ ├── adapter/ │ │ │ │ ├── BasicDataTypeAdapterExample.java │ │ │ │ └── CustomAdapterExample.java │ │ │ ├── aggregation/ │ │ │ │ └── binning/ │ │ │ │ └── SpatialBinningAggregationExample.java │ │ │ ├── index/ │ │ │ │ └── CustomIndexExample.java │ │ │ ├── ingest/ │ │ │ │ ├── SimpleIngest.java │ │ │ │ ├── bulk/ │ │ │ │ │ ├── GeonamesDataFileInputFormat.java │ │ │ │ │ ├── GeonamesSimpleFeatureType.java │ │ │ │ │ └── SimpleFeatureToAccumuloKeyValueMapper.java │ │ │ │ └── plugin/ │ │ │ │ ├── CustomIngestFormat.java │ │ │ │ ├── CustomIngestPlugin.java │ │ │ │ └── CustomIngestPluginExample.java │ │ │ ├── query/ │ │ │ │ ├── CQLQueryExample.java │ │ │ │ ├── SpatialQueryExample.java │ │ │ │ └── SpatialTemporalQueryExample.java │ │ │ ├── spark/ │ │ │ │ └── GeoWaveRDDExample.java │ │ │ └── stats/ │ │ │ ├── CustomStatisticExample.java │ │ │ ├── ExampleRegisteredStatistics.java │ │ │ ├── SpatialBinningStatisticExample.java │ │ │ └── WordCountStatistic.java │ │ └── resources/ │ │ ├── META-INF/ │ │ │ └── services/ │ │ │ ├── org.locationtech.geowave.core.index.persist.PersistableRegistrySpi │ │ │ ├── org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi │ │ │ └── org.locationtech.geowave.core.store.statistics.StatisticsRegistrySPI │ │ ├── geonames.txt │ │ └── stateCapitals.csv │ └── test/ │ ├── java/ │ │ └── org/ │ │ └── locationtech/ │ │ └── geowave/ │ │ └── examples/ │ │ └── ingest/ │ │ ├── BulkIngestInputGenerationTest.java │ │ └── SimpleIngestTest.java │ └── resources/ │ ├── hbase.properties │ └── org/ │ └── locationtech/ │ └── geowave/ │ └── examples/ │ └── ingest/ │ └── geonames/ │ ├── barbados/ │ │ └── BB.txt │ └── readme.txt ├── extensions/ │ ├── adapters/ │ │ ├── auth/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ ├── main/ │ │ │ │ ├── java/ │ │ │ │ │ └── org/ │ │ │ │ │ └── locationtech/ │ │ │ │ │ └── geowave/ │ │ │ │ │ └── adapter/ │ │ │ │ │ └── auth/ │ │ │ │ │ ├── AuthorizationEntry.java │ │ │ │ │ ├── AuthorizationFactorySPI.java │ │ │ │ │ ├── AuthorizationSPI.java │ │ │ │ │ ├── AuthorizationSet.java │ │ │ │ │ ├── EmptyAuthorizationFactory.java │ │ │ │ │ ├── EmptyAuthorizationProvider.java │ │ │ │ │ ├── JsonFileAuthorizationFactory.java │ │ │ │ │ └── JsonFileAuthorizationProvider.java │ │ │ │ └── resources/ │ │ │ │ └── META-INF/ │ │ │ │ └── services/ │ │ │ │ └── org.locationtech.geowave.adapter.auth.AuthorizationFactorySPI │ │ │ └── test/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── locationtech/ │ │ │ │ └── geowave/ │ │ │ │ └── adapter/ │ │ │ │ └── auth/ │ │ │ │ └── JsonFileAuthorizationAdapterTest.java │ │ │ └── resources/ │ │ │ └── jsonAuthfile.json │ │ ├── raster/ │ │ │ ├── .gitignore │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ ├── main/ │ │ │ │ ├── java/ │ │ │ │ │ └── org/ │ │ │ │ │ └── locationtech/ │ │ │ │ │ └── geowave/ │ │ │ │ │ └── adapter/ │ │ │ │ │ └── raster/ │ │ │ │ │ ├── FitToIndexGridCoverage.java │ │ │ │ │ ├── ImageWorkerPredefineStats.java │ │ │ │ │ ├── RasterAdapterPersistableRegistry.java │ │ │ │ │ ├── RasterUtils.java │ │ │ │ │ ├── Resolution.java │ │ │ │ │ ├── adapter/ │ │ │ │ │ │ ├── ClientMergeableRasterTile.java │ │ │ │ │ │ ├── GridCoverageWritable.java │ │ │ │ │ │ ├── InternalRasterDataAdapter.java │ │ │ │ │ │ ├── MosaicPropertyGenerator.java │ │ │ │ │ │ ├── RasterDataAdapter.java │ │ │ │ │ │ ├── RasterRegisteredIndexFieldMappers.java │ │ │ │ │ │ ├── RasterTile.java │ │ │ │ │ │ ├── RasterTileReader.java │ │ │ │ │ │ ├── RasterTileSpatialFieldMapper.java │ │ │ │ │ │ ├── RasterTileWriter.java │ │ │ │ │ │ ├── ServerMergeableRasterTile.java │ │ │ │ │ │ ├── SourceThresholdFixMosaicDescriptor.java │ │ │ │ │ │ ├── SourceThresholdMosaicDescriptor.java │ │ │ │ │ │ ├── merge/ │ │ │ │ │ │ │ ├── MultiAdapterServerMergeStrategy.java │ │ │ │ │ │ │ ├── RasterTileMergeStrategy.java │ │ │ │ │ │ │ ├── RasterTileRowTransform.java │ │ │ │ │ │ │ ├── ServerMergeStrategy.java │ │ │ │ │ │ │ ├── SimpleAbstractMergeStrategy.java │ │ │ │ │ │ │ ├── SingleAdapterServerMergeStrategy.java │ │ │ │ │ │ │ └── nodata/ │ │ │ │ │ │ │ ├── NoDataByFilter.java │ │ │ │ │ │ │ ├── NoDataBySampleIndex.java │ │ │ │ │ │ │ ├── NoDataMergeStrategy.java │ │ │ │ │ │ │ ├── NoDataMetadata.java │ │ │ │ │ │ │ └── NoDataMetadataFactory.java │ │ │ │ │ │ └── warp/ │ │ │ │ │ │ ├── WarpNearestOpImage.java │ │ │ │ │ │ ├── WarpOpImage.java │ │ │ │ │ │ └── WarpRIF.java │ │ │ │ │ ├── operations/ │ │ │ │ │ │ ├── DeletePyramidLevelCommand.java │ │ │ │ │ │ ├── InstallGdalCommand.java │ │ │ │ │ │ ├── RasterOperationCLIProvider.java │ │ │ │ │ │ ├── RasterSection.java │ │ │ │ │ │ ├── ResizeMRCommand.java │ │ │ │ │ │ └── options/ │ │ │ │ │ │ └── RasterTileResizeCommandLineOptions.java │ │ │ │ │ ├── plugin/ │ │ │ │ │ │ ├── GeoWaveGTRasterFormat.java │ │ │ │ │ │ ├── GeoWaveGTRasterFormatFactory.java │ │ │ │ │ │ ├── GeoWaveRasterConfig.java │ │ │ │ │ │ ├── GeoWaveRasterReader.java │ │ │ │ │ │ ├── GeoWaveRasterReaderState.java │ │ │ │ │ │ └── gdal/ │ │ │ │ │ │ ├── GDALGeoTiffFormat.java │ │ │ │ │ │ ├── GDALGeoTiffFormatFactory.java │ │ │ │ │ │ ├── GDALGeoTiffReader.java │ │ │ │ │ │ └── InstallGdal.java │ │ │ │ │ ├── resize/ │ │ │ │ │ │ ├── RasterTileResizeCombiner.java │ │ │ │ │ │ ├── RasterTileResizeHelper.java │ │ │ │ │ │ ├── RasterTileResizeJobRunner.java │ │ │ │ │ │ ├── RasterTileResizeMapper.java │ │ │ │ │ │ └── RasterTileResizeReducer.java │ │ │ │ │ ├── stats/ │ │ │ │ │ │ ├── HistogramConfig.java │ │ │ │ │ │ ├── RasterBoundingBoxStatistic.java │ │ │ │ │ │ ├── RasterFootprintStatistic.java │ │ │ │ │ │ ├── RasterHistogramStatistic.java │ │ │ │ │ │ ├── RasterOverviewStatistic.java │ │ │ │ │ │ ├── RasterRegisteredStatistics.java │ │ │ │ │ │ └── RasterStatisticQueryBuilder.java │ │ │ │ │ └── util/ │ │ │ │ │ ├── DataBufferPersistenceUtils.java │ │ │ │ │ ├── SampleModelPersistenceUtils.java │ │ │ │ │ └── ZipUtils.java │ │ │ │ ├── protobuf/ │ │ │ │ │ ├── DataBuffer.proto │ │ │ │ │ └── SampleModel.proto │ │ │ │ └── resources/ │ │ │ │ └── META-INF/ │ │ │ │ └── services/ │ │ │ │ ├── org.geotools.coverage.grid.io.GridFormatFactorySpi │ │ │ │ ├── org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi │ │ │ │ ├── org.locationtech.geowave.core.index.persist.PersistableRegistrySpi │ │ │ │ ├── org.locationtech.geowave.core.store.index.IndexFieldMapperRegistrySPI │ │ │ │ └── org.locationtech.geowave.core.store.statistics.StatisticsRegistrySPI │ │ │ └── test/ │ │ │ └── java/ │ │ │ └── org/ │ │ │ └── locationtech/ │ │ │ └── geowave/ │ │ │ └── adapter/ │ │ │ └── raster/ │ │ │ ├── RasterUtilsTest.java │ │ │ └── WebMercatorRasterTest.java │ │ └── vector/ │ │ ├── .gitignore │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ ├── avro/ │ │ │ │ └── AvroSimpleFeature.avsc │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ ├── geotools/ │ │ │ │ │ ├── feature/ │ │ │ │ │ │ └── simple/ │ │ │ │ │ │ └── OptimizedSimpleFeatureBuilder.java │ │ │ │ │ ├── process/ │ │ │ │ │ │ └── function/ │ │ │ │ │ │ └── DistributedRenderProcessUtils.java │ │ │ │ │ └── renderer/ │ │ │ │ │ └── lite/ │ │ │ │ │ └── DistributedRenderer.java │ │ │ │ └── locationtech/ │ │ │ │ └── geowave/ │ │ │ │ └── adapter/ │ │ │ │ └── vector/ │ │ │ │ ├── FeatureAdapterPersistableRegistry.java │ │ │ │ ├── FeatureDataAdapter.java │ │ │ │ ├── FeatureRowBuilder.java │ │ │ │ ├── FeatureWritable.java │ │ │ │ ├── GeoWaveAvroFeatureUtils.java │ │ │ │ ├── cli/ │ │ │ │ │ ├── VectorCLIProvider.java │ │ │ │ │ └── VectorSection.java │ │ │ │ ├── delete/ │ │ │ │ │ └── CQLDelete.java │ │ │ │ ├── export/ │ │ │ │ │ ├── VectorExportMapper.java │ │ │ │ │ ├── VectorExportOptions.java │ │ │ │ │ ├── VectorLocalExportCommand.java │ │ │ │ │ ├── VectorLocalExportOptions.java │ │ │ │ │ ├── VectorMRExportCommand.java │ │ │ │ │ ├── VectorMRExportJobRunner.java │ │ │ │ │ └── VectorMRExportOptions.java │ │ │ │ ├── field/ │ │ │ │ │ └── SimpleFeatureSerializationProvider.java │ │ │ │ ├── index/ │ │ │ │ │ ├── ChooseBestMatchIndexQueryStrategy.java │ │ │ │ │ ├── ChooseHeuristicMatchIndexQueryStrategy.java │ │ │ │ │ ├── ChooseLocalityPreservingQueryStrategy.java │ │ │ │ │ ├── IndexQueryStrategySPI.java │ │ │ │ │ ├── SimpleFeaturePrimaryIndexConfiguration.java │ │ │ │ │ ├── SimpleFeatureSecondaryIndexConfiguration.java │ │ │ │ │ └── VectorTextIndexEntryConverter.java │ │ │ │ ├── ingest/ │ │ │ │ │ ├── AbstractSimpleFeatureIngestFormat.java │ │ │ │ │ ├── AbstractSimpleFeatureIngestPlugin.java │ │ │ │ │ ├── CQLFilterOptionProvider.java │ │ │ │ │ ├── DataSchemaOptionProvider.java │ │ │ │ │ ├── FeatureSerializationOptionProvider.java │ │ │ │ │ ├── GeometrySimpOptionProvider.java │ │ │ │ │ ├── MinimalSimpleFeatureIngestFormat.java │ │ │ │ │ ├── MinimalSimpleFeatureIngestPlugin.java │ │ │ │ │ ├── SerializableSimpleFeatureIngestOptions.java │ │ │ │ │ ├── SimpleFeatureIngestOptions.java │ │ │ │ │ └── TypeNameOptionProvider.java │ │ │ │ ├── plugin/ │ │ │ │ │ ├── DecimationProcess.java │ │ │ │ │ ├── DistributedRenderProcess.java │ │ │ │ │ ├── GeoWaveDataStoreComponents.java │ │ │ │ │ ├── GeoWaveFeatureCollection.java │ │ │ │ │ ├── GeoWaveFeatureReader.java │ │ │ │ │ ├── GeoWaveFeatureSource.java │ │ │ │ │ ├── GeoWaveFeatureWriter.java │ │ │ │ │ ├── GeoWaveGSProcessFactory.java │ │ │ │ │ ├── GeoWaveGTDataStore.java │ │ │ │ │ ├── GeoWaveGTDataStoreFactory.java │ │ │ │ │ ├── GeoWaveGTPluginUtils.java │ │ │ │ │ ├── GeoWavePluginConfig.java │ │ │ │ │ ├── GeoWavePluginException.java │ │ │ │ │ ├── GeoWaveQueryCaps.java │ │ │ │ │ ├── InternalProcessFactory.java │ │ │ │ │ ├── QueryIssuer.java │ │ │ │ │ ├── SubsampleProcess.java │ │ │ │ │ ├── lock/ │ │ │ │ │ │ ├── AbstractLockingManagement.java │ │ │ │ │ │ ├── AuthorizedLock.java │ │ │ │ │ │ ├── LockingManagement.java │ │ │ │ │ │ ├── LockingManagementFactory.java │ │ │ │ │ │ ├── MemoryLockManager.java │ │ │ │ │ │ └── MemoryLockManagerFactory.java │ │ │ │ │ └── transaction/ │ │ │ │ │ ├── AbstractTransactionManagement.java │ │ │ │ │ ├── GeoWaveAutoCommitTransactionState.java │ │ │ │ │ ├── GeoWaveEmptyTransaction.java │ │ │ │ │ ├── GeoWaveTransaction.java │ │ │ │ │ ├── GeoWaveTransactionManagement.java │ │ │ │ │ ├── GeoWaveTransactionManagementState.java │ │ │ │ │ ├── GeoWaveTransactionState.java │ │ │ │ │ ├── MemoryTransactionsAllocator.java │ │ │ │ │ ├── StatisticsCache.java │ │ │ │ │ └── TransactionsAllocator.java │ │ │ │ ├── query/ │ │ │ │ │ ├── GeoJsonQueryOutputFormat.java │ │ │ │ │ ├── ShapefileQueryOutputFormat.java │ │ │ │ │ └── aggregation/ │ │ │ │ │ └── VectorCountAggregation.java │ │ │ │ ├── render/ │ │ │ │ │ ├── AsyncQueueFeatureCollection.java │ │ │ │ │ ├── DistributedRenderAggregation.java │ │ │ │ │ ├── DistributedRenderCallback.java │ │ │ │ │ ├── DistributedRenderMapOutputFormat.java │ │ │ │ │ ├── DistributedRenderOptions.java │ │ │ │ │ ├── DistributedRenderResult.java │ │ │ │ │ ├── DistributedRenderWMSFacade.java │ │ │ │ │ ├── InternalDistributedRenderProcess.java │ │ │ │ │ ├── PersistableComposite.java │ │ │ │ │ └── PersistableRenderedImage.java │ │ │ │ └── util/ │ │ │ │ ├── DateUtilities.java │ │ │ │ ├── FeatureDataUtils.java │ │ │ │ ├── FeatureGeometryUtils.java │ │ │ │ ├── FeatureTranslatingIterator.java │ │ │ │ ├── PolygonAreaCalculator.java │ │ │ │ ├── QueryIndexHelper.java │ │ │ │ ├── SimpleFeatureUserDataConfigurationSet.java │ │ │ │ └── SimpleFeatureWrapper.java │ │ │ ├── protobuf/ │ │ │ │ └── CqlHBaseQueryFilters.proto │ │ │ └── resources/ │ │ │ ├── META-INF/ │ │ │ │ └── services/ │ │ │ │ ├── org.geotools.data.DataStoreFactorySpi │ │ │ │ ├── org.geotools.process.ProcessFactory │ │ │ │ ├── org.locationtech.geowave.adapter.vector.index.IndexQueryStrategySPI │ │ │ │ ├── org.locationtech.geowave.adapter.vector.plugin.lock.LockingManagementFactory │ │ │ │ ├── org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi │ │ │ │ ├── org.locationtech.geowave.core.index.persist.PersistableRegistrySpi │ │ │ │ └── org.locationtech.geowave.core.store.cli.query.QueryOutputFormatSpi │ │ │ └── applicationContext.xml │ │ └── test/ │ │ ├── java/ │ │ │ └── org/ │ │ │ └── locationtech/ │ │ │ └── geowave/ │ │ │ └── adapter/ │ │ │ └── vector/ │ │ │ ├── BaseDataStoreTest.java │ │ │ ├── FeatureDataAdapterTest.java │ │ │ ├── FeatureWritableTest.java │ │ │ ├── index/ │ │ │ │ ├── ChooseBestMatchIndexQueryStrategyTest.java │ │ │ │ ├── ChooseHeuristicMatchQueryStrategyTest.java │ │ │ │ └── ChooseLocalityPreservingQueryStrategyTest.java │ │ │ ├── plugin/ │ │ │ │ ├── ExtractGeometryFilterVisitorTest.java │ │ │ │ ├── ExtractTimeFilterVisitorTest.java │ │ │ │ ├── GeoToolsAttributesSubsetTest.java │ │ │ │ ├── GeoWaveFeatureReaderTest.java │ │ │ │ ├── GeoWaveFeatureSourceTest.java │ │ │ │ ├── GeoWavePluginConfigTest.java │ │ │ │ ├── WFSBoundedQueryTest.java │ │ │ │ ├── WFSBoundedSpatialQueryTest.java │ │ │ │ ├── WFSSpatialTest.java │ │ │ │ ├── WFSTemporalQueryTest.java │ │ │ │ ├── WFSTransactionTest.java │ │ │ │ └── lock/ │ │ │ │ └── MemoryLockManagerTest.java │ │ │ ├── query/ │ │ │ │ ├── CqlQueryFilterIteratorTest.java │ │ │ │ ├── TemporalRangeTest.java │ │ │ │ └── cql/ │ │ │ │ ├── CQLQueryFilterTest.java │ │ │ │ ├── CQLQueryTest.java │ │ │ │ └── FilterToCQLToolTest.java │ │ │ ├── stats/ │ │ │ │ ├── CountMinSketchStatisticsTest.java │ │ │ │ ├── FixedBinNumericHistogramStatisticTest.java │ │ │ │ ├── HyperLogLogStaticticsTest.java │ │ │ │ └── NumericHistogramStatisticsTest.java │ │ │ └── util/ │ │ │ ├── FeatureDataUtilsTest.java │ │ │ ├── QueryIndexHelperTest.java │ │ │ └── TimeDescriptorsTest.java │ │ └── resources/ │ │ └── statsFile.json │ ├── cli/ │ │ ├── accumulo-embed/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ └── main/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── locationtech/ │ │ │ │ └── geowave/ │ │ │ │ └── datastore/ │ │ │ │ └── accumulo/ │ │ │ │ └── cli/ │ │ │ │ ├── AccumuloMiniCluster.java │ │ │ │ ├── AccumuloMiniClusterShell.java │ │ │ │ ├── AccumuloRunServerCommand.java │ │ │ │ ├── EmbeddedAccumuloOperationProvider.java │ │ │ │ ├── MiniAccumuloClusterFactory.java │ │ │ │ └── MiniAccumuloUtils.java │ │ │ └── resources/ │ │ │ └── META-INF/ │ │ │ └── services/ │ │ │ └── org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi │ │ ├── bigtable-embed/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ └── main/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── locationtech/ │ │ │ │ └── geowave/ │ │ │ │ └── datastore/ │ │ │ │ └── bigtable/ │ │ │ │ └── cli/ │ │ │ │ ├── BigtableEmulator.java │ │ │ │ ├── BigtableOperationProvider.java │ │ │ │ ├── BigtableSection.java │ │ │ │ ├── RunBigtableEmulator.java │ │ │ │ └── RunBigtableEmulatorOptions.java │ │ │ └── resources/ │ │ │ └── META-INF/ │ │ │ └── services/ │ │ │ └── org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi │ │ ├── cassandra-embed/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ └── main/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── locationtech/ │ │ │ │ └── geowave/ │ │ │ │ └── datastore/ │ │ │ │ └── cassandra/ │ │ │ │ └── cli/ │ │ │ │ ├── CassandraOperationProvider.java │ │ │ │ ├── CassandraSection.java │ │ │ │ ├── CassandraServer.java │ │ │ │ └── RunCassandraServer.java │ │ │ └── resources/ │ │ │ ├── META-INF/ │ │ │ │ └── services/ │ │ │ │ └── org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi │ │ │ └── cassandra-default.yaml │ │ ├── debug/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ └── main/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── locationtech/ │ │ │ │ └── geowave/ │ │ │ │ └── cli/ │ │ │ │ └── debug/ │ │ │ │ ├── AbstractGeoWaveQuery.java │ │ │ │ ├── BBOXQuery.java │ │ │ │ ├── CQLQuery.java │ │ │ │ ├── ClientSideCQLQuery.java │ │ │ │ ├── DebugOperationsProvider.java │ │ │ │ ├── DebugSection.java │ │ │ │ ├── FullTableScan.java │ │ │ │ ├── MinimalFullTable.java │ │ │ │ └── SparkQuery.java │ │ │ └── resources/ │ │ │ └── META-INF/ │ │ │ └── services/ │ │ │ └── org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi │ │ ├── dynamodb-embed/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ └── main/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── locationtech/ │ │ │ │ └── geowave/ │ │ │ │ └── datastore/ │ │ │ │ └── dynamodb/ │ │ │ │ └── cli/ │ │ │ │ ├── DynamoDBLocal.java │ │ │ │ ├── DynamoDBOperationProvider.java │ │ │ │ ├── DynamoDBSection.java │ │ │ │ ├── RunDynamoDBLocal.java │ │ │ │ └── RunDynamoDBLocalOptions.java │ │ │ └── resources/ │ │ │ └── META-INF/ │ │ │ └── services/ │ │ │ └── org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi │ │ ├── geoserver/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ ├── main/ │ │ │ │ ├── java/ │ │ │ │ │ └── org/ │ │ │ │ │ └── locationtech/ │ │ │ │ │ └── geowave/ │ │ │ │ │ └── cli/ │ │ │ │ │ └── geoserver/ │ │ │ │ │ ├── ConfigGeoServerCommand.java │ │ │ │ │ ├── GeoServerCommand.java │ │ │ │ │ ├── GeoServerConfig.java │ │ │ │ │ ├── GeoServerOperationProvider.java │ │ │ │ │ ├── GeoServerRemoveCommand.java │ │ │ │ │ ├── GeoServerRestClient.java │ │ │ │ │ ├── GeoServerSSLConfigurationOptions.java │ │ │ │ │ ├── GeoServerSection.java │ │ │ │ │ ├── SSLOptionAnnotation.java │ │ │ │ │ ├── StoreSSLConfigurationOptions.java │ │ │ │ │ ├── constants/ │ │ │ │ │ │ └── GeoServerConstants.java │ │ │ │ │ ├── coverage/ │ │ │ │ │ │ ├── CoverageOperationProvider.java │ │ │ │ │ │ ├── CoverageSection.java │ │ │ │ │ │ ├── GeoServerAddCoverageCommand.java │ │ │ │ │ │ ├── GeoServerGetCoverageCommand.java │ │ │ │ │ │ ├── GeoServerListCoveragesCommand.java │ │ │ │ │ │ └── GeoServerRemoveCoverageCommand.java │ │ │ │ │ ├── cvstore/ │ │ │ │ │ │ ├── CoverageStoreOperationProvider.java │ │ │ │ │ │ ├── CoverageStoreSection.java │ │ │ │ │ │ ├── GeoServerAddCoverageStoreCommand.java │ │ │ │ │ │ ├── GeoServerGetCoverageStoreCommand.java │ │ │ │ │ │ ├── GeoServerListCoverageStoresCommand.java │ │ │ │ │ │ └── GeoServerRemoveCoverageStoreCommand.java │ │ │ │ │ ├── datastore/ │ │ │ │ │ │ ├── DatastoreOperationProvider.java │ │ │ │ │ │ ├── DatastoreSection.java │ │ │ │ │ │ ├── GeoServerAddDatastoreCommand.java │ │ │ │ │ │ ├── GeoServerGetDatastoreCommand.java │ │ │ │ │ │ ├── GeoServerGetStoreAdapterCommand.java │ │ │ │ │ │ ├── GeoServerListDatastoresCommand.java │ │ │ │ │ │ └── GeoServerRemoveDatastoreCommand.java │ │ │ │ │ ├── featurelayer/ │ │ │ │ │ │ ├── FeatureLayerOperationProvider.java │ │ │ │ │ │ ├── FeatureLayerSection.java │ │ │ │ │ │ ├── GeoServerAddFeatureLayerCommand.java │ │ │ │ │ │ ├── GeoServerGetFeatureLayerCommand.java │ │ │ │ │ │ ├── GeoServerListFeatureLayersCommand.java │ │ │ │ │ │ └── GeoServerRemoveFeatureLayerCommand.java │ │ │ │ │ ├── layer/ │ │ │ │ │ │ ├── GeoServerAddLayerCommand.java │ │ │ │ │ │ ├── LayerOperationProvider.java │ │ │ │ │ │ └── LayerSection.java │ │ │ │ │ ├── style/ │ │ │ │ │ │ ├── GeoServerAddStyleCommand.java │ │ │ │ │ │ ├── GeoServerGetStyleCommand.java │ │ │ │ │ │ ├── GeoServerListStylesCommand.java │ │ │ │ │ │ ├── GeoServerRemoveStyleCommand.java │ │ │ │ │ │ ├── GeoServerSetLayerStyleCommand.java │ │ │ │ │ │ ├── StyleOperationProvider.java │ │ │ │ │ │ └── StyleSection.java │ │ │ │ │ └── workspace/ │ │ │ │ │ ├── GeoServerAddWorkspaceCommand.java │ │ │ │ │ ├── GeoServerListWorkspacesCommand.java │ │ │ │ │ ├── GeoServerRemoveWorkspaceCommand.java │ │ │ │ │ ├── WorkspaceOperationProvider.java │ │ │ │ │ └── WorkspaceSection.java │ │ │ │ └── resources/ │ │ │ │ └── META-INF/ │ │ │ │ └── services/ │ │ │ │ └── org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi │ │ │ └── test/ │ │ │ └── java/ │ │ │ └── org/ │ │ │ └── locationtech/ │ │ │ └── geowave/ │ │ │ └── cli/ │ │ │ └── geoserver/ │ │ │ └── GeoServerRestClientTest.java │ │ ├── geoserver-embed/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ └── main/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── locationtech/ │ │ │ │ └── geowave/ │ │ │ │ └── cli/ │ │ │ │ └── geoserver/ │ │ │ │ ├── RunGeoServer.java │ │ │ │ ├── RunGeoServerOperationProvider.java │ │ │ │ └── RunGeoServerOptions.java │ │ │ └── resources/ │ │ │ ├── META-INF/ │ │ │ │ └── services/ │ │ │ │ └── org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi │ │ │ └── log4j-geoserver.properties │ │ ├── hbase-embed/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ └── main/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── locationtech/ │ │ │ │ └── geowave/ │ │ │ │ └── datastore/ │ │ │ │ └── hbase/ │ │ │ │ └── cli/ │ │ │ │ ├── GeoWaveHBaseUtility.java │ │ │ │ ├── HBaseMiniCluster.java │ │ │ │ ├── HBaseMiniClusterClassLoader.java │ │ │ │ ├── HBaseSection.java │ │ │ │ ├── HBaseTestVisibilityLabelServiceImpl.java │ │ │ │ ├── RunHBaseServer.java │ │ │ │ ├── RunHBaseServerOperationProvider.java │ │ │ │ ├── RunHBaseServerOptions.java │ │ │ │ └── ZookeeperMiniCluster.java │ │ │ └── resources/ │ │ │ ├── META-INF/ │ │ │ │ └── services/ │ │ │ │ └── org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi │ │ │ └── hbase.properties │ │ ├── kudu-embed/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ └── main/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── locationtech/ │ │ │ │ └── geowave/ │ │ │ │ └── datastore/ │ │ │ │ └── kudu/ │ │ │ │ └── cli/ │ │ │ │ ├── KuduLocal.java │ │ │ │ ├── KuduOperationProvider.java │ │ │ │ ├── KuduSection.java │ │ │ │ ├── RunKuduLocal.java │ │ │ │ └── RunKuduLocalOptions.java │ │ │ └── resources/ │ │ │ └── META-INF/ │ │ │ └── services/ │ │ │ └── org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi │ │ ├── landsat8/ │ │ │ ├── README.md │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ ├── main/ │ │ │ │ ├── java/ │ │ │ │ │ └── org/ │ │ │ │ │ └── locationtech/ │ │ │ │ │ └── geowave/ │ │ │ │ │ └── format/ │ │ │ │ │ └── landsat8/ │ │ │ │ │ ├── AnalyzeRunner.java │ │ │ │ │ ├── BandFeatureIterator.java │ │ │ │ │ ├── DownloadRunner.java │ │ │ │ │ ├── IngestRunner.java │ │ │ │ │ ├── Landsat8AnalyzeCommand.java │ │ │ │ │ ├── Landsat8BandConverterSpi.java │ │ │ │ │ ├── Landsat8BasicCommandLineOptions.java │ │ │ │ │ ├── Landsat8DownloadCommand.java │ │ │ │ │ ├── Landsat8DownloadCommandLineOptions.java │ │ │ │ │ ├── Landsat8IngestCommand.java │ │ │ │ │ ├── Landsat8IngestRasterCommand.java │ │ │ │ │ ├── Landsat8IngestVectorCommand.java │ │ │ │ │ ├── Landsat8OperationProvider.java │ │ │ │ │ ├── Landsat8RasterIngestCommandLineOptions.java │ │ │ │ │ ├── Landsat8Section.java │ │ │ │ │ ├── PropertyIgnoringFilterVisitor.java │ │ │ │ │ ├── RasterIngestRunner.java │ │ │ │ │ ├── SceneFeatureIterator.java │ │ │ │ │ ├── VectorIngestRunner.java │ │ │ │ │ ├── VectorOverrideCommandLineOptions.java │ │ │ │ │ ├── WRS2GeometryStore.java │ │ │ │ │ ├── index/ │ │ │ │ │ │ ├── Landsat8PersistableRegistry.java │ │ │ │ │ │ └── Landsat8TemporalBinningStrategy.java │ │ │ │ │ └── qa/ │ │ │ │ │ └── QABandToIceMaskConverter.java │ │ │ │ └── resources/ │ │ │ │ └── META-INF/ │ │ │ │ └── services/ │ │ │ │ ├── org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi │ │ │ │ ├── org.locationtech.geowave.core.index.persist.PersistableRegistrySpi │ │ │ │ └── org.locationtech.geowave.format.landsat.Landsat8BandConverterSpi │ │ │ └── test/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── locationtech/ │ │ │ │ └── geowave/ │ │ │ │ └── format/ │ │ │ │ └── landsat8/ │ │ │ │ ├── AnalyzeRunnerTest.java │ │ │ │ ├── DownloadRunnerTest.java │ │ │ │ ├── IngestRunnerTest.java │ │ │ │ ├── RasterIngestRunnerTest.java │ │ │ │ ├── SceneFeatureIteratorTest.java │ │ │ │ ├── Tests.java │ │ │ │ ├── VectorIngestRunnerTest.java │ │ │ │ └── WRS2GeometryStoreTest.java │ │ │ └── resources/ │ │ │ └── geowave-config.properties │ │ ├── osm/ │ │ │ ├── .gitignore │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ ├── main/ │ │ │ │ ├── avro/ │ │ │ │ │ ├── LongArray.avsc │ │ │ │ │ └── OsmAvro.avsc │ │ │ │ ├── java/ │ │ │ │ │ └── org/ │ │ │ │ │ └── locationtech/ │ │ │ │ │ └── geowave/ │ │ │ │ │ └── cli/ │ │ │ │ │ └── osm/ │ │ │ │ │ ├── accumulo/ │ │ │ │ │ │ └── osmschema/ │ │ │ │ │ │ ├── ColumnFamily.java │ │ │ │ │ │ ├── ColumnQualifier.java │ │ │ │ │ │ ├── Constants.java │ │ │ │ │ │ └── Schema.java │ │ │ │ │ ├── mapreduce/ │ │ │ │ │ │ ├── Convert/ │ │ │ │ │ │ │ ├── OSMConversionMapper.java │ │ │ │ │ │ │ ├── OSMConversionRunner.java │ │ │ │ │ │ │ ├── OsmProvider/ │ │ │ │ │ │ │ │ └── OsmProvider.java │ │ │ │ │ │ │ └── SimpleFeatureGenerator.java │ │ │ │ │ │ └── Ingest/ │ │ │ │ │ │ ├── OSMMapperBase.java │ │ │ │ │ │ ├── OSMNodeMapper.java │ │ │ │ │ │ ├── OSMRelationMapper.java │ │ │ │ │ │ ├── OSMRunner.java │ │ │ │ │ │ └── OSMWayMapper.java │ │ │ │ │ ├── operations/ │ │ │ │ │ │ ├── IngestOSMToGeoWaveCommand.java │ │ │ │ │ │ ├── OSMOperationProvider.java │ │ │ │ │ │ ├── OSMSection.java │ │ │ │ │ │ ├── StageOSMToHDFSCommand.java │ │ │ │ │ │ └── options/ │ │ │ │ │ │ └── OSMIngestCommandArgs.java │ │ │ │ │ ├── osmfeature/ │ │ │ │ │ │ ├── FeatureConfigParser.java │ │ │ │ │ │ └── types/ │ │ │ │ │ │ ├── attributes/ │ │ │ │ │ │ │ ├── AttributeDefinition.java │ │ │ │ │ │ │ ├── AttributeType.java │ │ │ │ │ │ │ └── AttributeTypes.java │ │ │ │ │ │ └── features/ │ │ │ │ │ │ ├── FeatureDefinition.java │ │ │ │ │ │ ├── FeatureDefinitionSet.java │ │ │ │ │ │ └── FeatureType.java │ │ │ │ │ ├── parser/ │ │ │ │ │ │ ├── OsmPbfParser.java │ │ │ │ │ │ ├── OsmPbfParserOptions.java │ │ │ │ │ │ └── OsmXmlLoader.java │ │ │ │ │ └── types/ │ │ │ │ │ └── TypeUtils.java │ │ │ │ └── resources/ │ │ │ │ └── META-INF/ │ │ │ │ └── services/ │ │ │ │ └── org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi │ │ │ └── test/ │ │ │ ├── data/ │ │ │ │ └── test_mapping.json │ │ │ └── java/ │ │ │ └── org/ │ │ │ └── locationtech/ │ │ │ └── geowave/ │ │ │ └── cli/ │ │ │ └── osm/ │ │ │ ├── ColumnQualifierTest.java │ │ │ └── osmfeature/ │ │ │ └── FeatureConfigParserTest.java │ │ ├── redis-embed/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ └── main/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── locationtech/ │ │ │ │ └── geowave/ │ │ │ │ └── datastore/ │ │ │ │ └── redis/ │ │ │ │ └── cli/ │ │ │ │ ├── RedisOperationProvider.java │ │ │ │ ├── RedisSection.java │ │ │ │ ├── RunRedisServer.java │ │ │ │ └── RunRedisServerOptions.java │ │ │ └── resources/ │ │ │ └── META-INF/ │ │ │ └── services/ │ │ │ └── org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi │ │ └── sentinel2/ │ │ ├── README.md │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── locationtech/ │ │ │ │ └── geowave/ │ │ │ │ └── format/ │ │ │ │ └── sentinel2/ │ │ │ │ ├── AnalyzeRunner.java │ │ │ │ ├── BandFeatureIterator.java │ │ │ │ ├── DownloadRunner.java │ │ │ │ ├── IngestRunner.java │ │ │ │ ├── PropertyIgnoringFilterVisitor.java │ │ │ │ ├── RasterBandData.java │ │ │ │ ├── RasterIngestRunner.java │ │ │ │ ├── SceneFeatureIterator.java │ │ │ │ ├── Sentinel2AnalyzeCommand.java │ │ │ │ ├── Sentinel2BandConverterSpi.java │ │ │ │ ├── Sentinel2BasicCommandLineOptions.java │ │ │ │ ├── Sentinel2DownloadCommand.java │ │ │ │ ├── Sentinel2DownloadCommandLineOptions.java │ │ │ │ ├── Sentinel2ImageryProvider.java │ │ │ │ ├── Sentinel2ImageryProvidersCommand.java │ │ │ │ ├── Sentinel2IngestCommand.java │ │ │ │ ├── Sentinel2IngestRasterCommand.java │ │ │ │ ├── Sentinel2IngestVectorCommand.java │ │ │ │ ├── Sentinel2OperationProvider.java │ │ │ │ ├── Sentinel2RasterIngestCommandLineOptions.java │ │ │ │ ├── Sentinel2Section.java │ │ │ │ ├── VectorIngestRunner.java │ │ │ │ ├── VectorOverrideCommandLineOptions.java │ │ │ │ ├── amazon/ │ │ │ │ │ └── AmazonImageryProvider.java │ │ │ │ └── theia/ │ │ │ │ └── TheiaImageryProvider.java │ │ │ └── resources/ │ │ │ └── META-INF/ │ │ │ └── services/ │ │ │ └── org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi │ │ └── test/ │ │ ├── java/ │ │ │ └── org/ │ │ │ └── locationtech/ │ │ │ └── geowave/ │ │ │ └── format/ │ │ │ └── sentinel2/ │ │ │ ├── AnalyzeRunnerTest.java │ │ │ ├── DownloadRunnerTest.java │ │ │ ├── IngestRunnerTest.java │ │ │ ├── RasterIngestRunnerTest.java │ │ │ ├── SceneFeatureIteratorTest.java │ │ │ ├── Tests.java │ │ │ └── VectorIngestRunnerTest.java │ │ └── resources/ │ │ ├── auth_theia.txt │ │ └── geowave-config.properties │ ├── datastores/ │ │ ├── accumulo/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ ├── main/ │ │ │ │ ├── java/ │ │ │ │ │ └── org/ │ │ │ │ │ └── locationtech/ │ │ │ │ │ └── geowave/ │ │ │ │ │ └── datastore/ │ │ │ │ │ └── accumulo/ │ │ │ │ │ ├── AccumuloDataStore.java │ │ │ │ │ ├── AccumuloDataStoreFactory.java │ │ │ │ │ ├── AccumuloFactoryHelper.java │ │ │ │ │ ├── AccumuloRow.java │ │ │ │ │ ├── AccumuloStoreFactoryFamily.java │ │ │ │ │ ├── IteratorConfig.java │ │ │ │ │ ├── MergingCombiner.java │ │ │ │ │ ├── MergingVisibilityCombiner.java │ │ │ │ │ ├── RowMergingCombiner.java │ │ │ │ │ ├── RowMergingVisibilityCombiner.java │ │ │ │ │ ├── cli/ │ │ │ │ │ │ ├── AbstractSplitsCommand.java │ │ │ │ │ │ ├── AccumuloOperationProvider.java │ │ │ │ │ │ ├── AccumuloSection.java │ │ │ │ │ │ ├── PreSplitPartitionIdCommand.java │ │ │ │ │ │ ├── SplitEqualIntervalCommand.java │ │ │ │ │ │ ├── SplitNumRecordsCommand.java │ │ │ │ │ │ └── SplitQuantileCommand.java │ │ │ │ │ ├── config/ │ │ │ │ │ │ ├── AccumuloOptions.java │ │ │ │ │ │ └── AccumuloRequiredOptions.java │ │ │ │ │ ├── iterators/ │ │ │ │ │ │ ├── AggregationIterator.java │ │ │ │ │ │ ├── AttributeSubsettingIterator.java │ │ │ │ │ │ ├── ExceptionHandlingFilter.java │ │ │ │ │ │ ├── ExceptionHandlingSkippingIterator.java │ │ │ │ │ │ ├── ExceptionHandlingTransformingIterator.java │ │ │ │ │ │ ├── FixedCardinalitySkippingIterator.java │ │ │ │ │ │ ├── NumericIndexStrategyFilterIterator.java │ │ │ │ │ │ ├── QueryFilterIterator.java │ │ │ │ │ │ ├── SecondaryIndexQueryFilterIterator.java │ │ │ │ │ │ ├── SingleEntryFilterIterator.java │ │ │ │ │ │ ├── VersionIterator.java │ │ │ │ │ │ ├── WholeRowAggregationIterator.java │ │ │ │ │ │ └── WholeRowQueryFilterIterator.java │ │ │ │ │ ├── mapreduce/ │ │ │ │ │ │ └── AccumuloSplitsProvider.java │ │ │ │ │ ├── operations/ │ │ │ │ │ │ ├── AbstractAccumuloWriter.java │ │ │ │ │ │ ├── AccumuloDataIndexWriter.java │ │ │ │ │ │ ├── AccumuloDeleter.java │ │ │ │ │ │ ├── AccumuloMetadataDeleter.java │ │ │ │ │ │ ├── AccumuloMetadataReader.java │ │ │ │ │ │ ├── AccumuloMetadataWriter.java │ │ │ │ │ │ ├── AccumuloOperations.java │ │ │ │ │ │ ├── AccumuloReader.java │ │ │ │ │ │ ├── AccumuloRowDeleter.java │ │ │ │ │ │ ├── AccumuloWriter.java │ │ │ │ │ │ └── config/ │ │ │ │ │ │ └── AccumuloDatastoreDefaultConfigProvider.java │ │ │ │ │ ├── split/ │ │ │ │ │ │ ├── AbstractAccumuloSplitsOperation.java │ │ │ │ │ │ └── SplitCommandLineOptions.java │ │ │ │ │ └── util/ │ │ │ │ │ ├── AccumuloKeyValuePairGenerator.java │ │ │ │ │ ├── AccumuloUtils.java │ │ │ │ │ ├── ConnectorPool.java │ │ │ │ │ ├── PersistentDataFormatter.java │ │ │ │ │ └── ScannerClosableWrapper.java │ │ │ │ └── resources/ │ │ │ │ └── META-INF/ │ │ │ │ └── services/ │ │ │ │ ├── org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi │ │ │ │ ├── org.locationtech.geowave.core.cli.spi.DefaultConfigProviderSpi │ │ │ │ └── org.locationtech.geowave.core.store.StoreFactoryFamilySpi │ │ │ └── test/ │ │ │ └── resources/ │ │ │ └── META-INF/ │ │ │ └── services/ │ │ │ └── org.locationtech.geowave.core.index.persist.PersistableRegistrySpi │ │ ├── bigtable/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ └── main/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── locationtech/ │ │ │ │ └── geowave/ │ │ │ │ └── datastore/ │ │ │ │ └── bigtable/ │ │ │ │ ├── BigTableConnectionPool.java │ │ │ │ ├── BigTableDataStoreFactory.java │ │ │ │ ├── BigTableFactoryHelper.java │ │ │ │ ├── BigTableStoreFactoryFamily.java │ │ │ │ ├── config/ │ │ │ │ │ └── BigTableOptions.java │ │ │ │ └── operations/ │ │ │ │ ├── BigTableOperations.java │ │ │ │ └── BigtableReader.java │ │ │ └── resources/ │ │ │ └── META-INF/ │ │ │ └── services/ │ │ │ └── org.locationtech.geowave.core.store.StoreFactoryFamilySpi │ │ ├── cassandra/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ ├── main/ │ │ │ │ ├── java/ │ │ │ │ │ └── org/ │ │ │ │ │ └── locationtech/ │ │ │ │ │ └── geowave/ │ │ │ │ │ └── datastore/ │ │ │ │ │ └── cassandra/ │ │ │ │ │ ├── CassandraDataStore.java │ │ │ │ │ ├── CassandraDataStoreFactory.java │ │ │ │ │ ├── CassandraDefaultConfigProvider.java │ │ │ │ │ ├── CassandraFactoryHelper.java │ │ │ │ │ ├── CassandraRow.java │ │ │ │ │ ├── CassandraStoreFactoryFamily.java │ │ │ │ │ ├── config/ │ │ │ │ │ │ ├── CassandraOptions.java │ │ │ │ │ │ └── CassandraRequiredOptions.java │ │ │ │ │ ├── operations/ │ │ │ │ │ │ ├── BatchHandler.java │ │ │ │ │ │ ├── BatchedRangeRead.java │ │ │ │ │ │ ├── BatchedWrite.java │ │ │ │ │ │ ├── CassandraDeleter.java │ │ │ │ │ │ ├── CassandraMetadataDeleter.java │ │ │ │ │ │ ├── CassandraMetadataReader.java │ │ │ │ │ │ ├── CassandraMetadataWriter.java │ │ │ │ │ │ ├── CassandraOperations.java │ │ │ │ │ │ ├── CassandraReader.java │ │ │ │ │ │ ├── CassandraWriter.java │ │ │ │ │ │ └── RowRead.java │ │ │ │ │ └── util/ │ │ │ │ │ ├── CassandraUtils.java │ │ │ │ │ ├── KeyspaceStatePool.java │ │ │ │ │ └── SessionPool.java │ │ │ │ └── resources/ │ │ │ │ └── META-INF/ │ │ │ │ └── services/ │ │ │ │ ├── org.locationtech.geowave.core.cli.spi.DefaultConfigProviderSpi │ │ │ │ └── org.locationtech.geowave.core.store.StoreFactoryFamilySpi │ │ │ └── test/ │ │ │ └── java/ │ │ │ └── org/ │ │ │ └── locationtech/ │ │ │ └── geowave/ │ │ │ └── datastore/ │ │ │ └── cassandra/ │ │ │ ├── CassandraOptionsTest.java │ │ │ └── CassandraRequiredOptionsTest.java │ │ ├── dynamodb/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ ├── main/ │ │ │ │ ├── java/ │ │ │ │ │ └── org/ │ │ │ │ │ └── locationtech/ │ │ │ │ │ └── geowave/ │ │ │ │ │ └── datastore/ │ │ │ │ │ └── dynamodb/ │ │ │ │ │ ├── DynamoDBClientPool.java │ │ │ │ │ ├── DynamoDBDataStore.java │ │ │ │ │ ├── DynamoDBDataStoreFactory.java │ │ │ │ │ ├── DynamoDBFactoryHelper.java │ │ │ │ │ ├── DynamoDBRow.java │ │ │ │ │ ├── DynamoDBStoreFactoryFamily.java │ │ │ │ │ ├── config/ │ │ │ │ │ │ └── DynamoDBOptions.java │ │ │ │ │ ├── operations/ │ │ │ │ │ │ ├── DynamoDBDeleter.java │ │ │ │ │ │ ├── DynamoDBMetadataDeleter.java │ │ │ │ │ │ ├── DynamoDBMetadataReader.java │ │ │ │ │ │ ├── DynamoDBMetadataWriter.java │ │ │ │ │ │ ├── DynamoDBOperations.java │ │ │ │ │ │ ├── DynamoDBReader.java │ │ │ │ │ │ └── DynamoDBWriter.java │ │ │ │ │ └── util/ │ │ │ │ │ ├── AsyncPaginatedQuery.java │ │ │ │ │ ├── AsyncPaginatedScan.java │ │ │ │ │ ├── DynamoDBUtils.java │ │ │ │ │ ├── LazyPaginatedQuery.java │ │ │ │ │ └── LazyPaginatedScan.java │ │ │ │ └── resources/ │ │ │ │ └── META-INF/ │ │ │ │ └── services/ │ │ │ │ └── org.locationtech.geowave.core.store.StoreFactoryFamilySpi │ │ │ └── test/ │ │ │ └── java/ │ │ │ └── org/ │ │ │ └── locationtech/ │ │ │ └── geowave/ │ │ │ └── datastore/ │ │ │ └── dynamodb/ │ │ │ └── util/ │ │ │ └── DynamoDBUtilsTest.java │ │ ├── filesystem/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ └── main/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── locationtech/ │ │ │ │ └── geowave/ │ │ │ │ └── datastore/ │ │ │ │ └── filesystem/ │ │ │ │ ├── FileSystemDataFormatter.java │ │ │ │ ├── FileSystemDataFormatterRegistry.java │ │ │ │ ├── FileSystemDataFormatterSpi.java │ │ │ │ ├── FileSystemDataStore.java │ │ │ │ ├── FileSystemDataStoreFactory.java │ │ │ │ ├── FileSystemDefaultConfigProvider.java │ │ │ │ ├── FileSystemFactoryHelper.java │ │ │ │ ├── FileSystemStoreFactoryFamily.java │ │ │ │ ├── cli/ │ │ │ │ │ ├── FileSystemOperationProvider.java │ │ │ │ │ ├── FileSystemSection.java │ │ │ │ │ └── ListFormatsCommand.java │ │ │ │ ├── config/ │ │ │ │ │ └── FileSystemOptions.java │ │ │ │ ├── operations/ │ │ │ │ │ ├── FileSystemDataIndexWriter.java │ │ │ │ │ ├── FileSystemMetadataDeleter.java │ │ │ │ │ ├── FileSystemMetadataReader.java │ │ │ │ │ ├── FileSystemMetadataWriter.java │ │ │ │ │ ├── FileSystemOperations.java │ │ │ │ │ ├── FileSystemQueryExecution.java │ │ │ │ │ ├── FileSystemReader.java │ │ │ │ │ ├── FileSystemRowDeleter.java │ │ │ │ │ └── FileSystemWriter.java │ │ │ │ └── util/ │ │ │ │ ├── AbstractFileSystemIterator.java │ │ │ │ ├── AbstractFileSystemTable.java │ │ │ │ ├── BasicFileSystemKey.java │ │ │ │ ├── DataFormatterCache.java │ │ │ │ ├── DataIndexRowIterator.java │ │ │ │ ├── FileSystemClient.java │ │ │ │ ├── FileSystemClientCache.java │ │ │ │ ├── FileSystemDataIndexTable.java │ │ │ │ ├── FileSystemGeoWaveMetadata.java │ │ │ │ ├── FileSystemIndexKeyWrapper.java │ │ │ │ ├── FileSystemIndexTable.java │ │ │ │ ├── FileSystemKey.java │ │ │ │ ├── FileSystemMetadataIterator.java │ │ │ │ ├── FileSystemMetadataTable.java │ │ │ │ ├── FileSystemRow.java │ │ │ │ ├── FileSystemRowIterator.java │ │ │ │ ├── FileSystemUtils.java │ │ │ │ └── GeoWaveBinaryDataFormatter.java │ │ │ └── resources/ │ │ │ └── META-INF/ │ │ │ └── services/ │ │ │ ├── org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi │ │ │ ├── org.locationtech.geowave.core.cli.spi.DefaultConfigProviderSpi │ │ │ ├── org.locationtech.geowave.core.store.StoreFactoryFamilySpi │ │ │ └── org.locationtech.geowave.datastore.filesystem.FileSystemDataFormatterSpi │ │ ├── hbase/ │ │ │ ├── coprocessors/ │ │ │ │ ├── pom.xml │ │ │ │ └── src/ │ │ │ │ └── main/ │ │ │ │ ├── java/ │ │ │ │ │ └── org/ │ │ │ │ │ └── locationtech/ │ │ │ │ │ └── geowave/ │ │ │ │ │ └── datastore/ │ │ │ │ │ └── hbase/ │ │ │ │ │ └── coprocessors/ │ │ │ │ │ ├── AggregationEndpoint.java │ │ │ │ │ ├── HBaseBulkDeleteEndpoint.java │ │ │ │ │ ├── ServerSideOperationsObserver.java │ │ │ │ │ └── VersionEndpoint.java │ │ │ │ └── resources/ │ │ │ │ ├── META-INF/ │ │ │ │ │ └── services/ │ │ │ │ │ ├── org.locationtech.geowave.core.cli.spi.DefaultConfigProviderSpi │ │ │ │ │ ├── org.locationtech.geowave.core.index.persist.PersistableRegistrySpi │ │ │ │ │ ├── org.locationtech.geowave.core.store.StoreFactoryFamilySpi │ │ │ │ │ └── org.locationtech.geowave.core.store.spi.ClassLoaderTransformerSpi │ │ │ │ └── hbase.properties │ │ │ └── core/ │ │ │ ├── .gitignore │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ └── main/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── locationtech/ │ │ │ │ └── geowave/ │ │ │ │ └── datastore/ │ │ │ │ └── hbase/ │ │ │ │ ├── HBaseDataStore.java │ │ │ │ ├── HBaseDataStoreFactory.java │ │ │ │ ├── HBaseFactoryHelper.java │ │ │ │ ├── HBasePersistableRegistry.java │ │ │ │ ├── HBaseRow.java │ │ │ │ ├── HBaseStoreFactoryFamily.java │ │ │ │ ├── config/ │ │ │ │ │ ├── HBaseOptions.java │ │ │ │ │ └── HBaseRequiredOptions.java │ │ │ │ ├── filters/ │ │ │ │ │ ├── FixedCardinalitySkippingFilter.java │ │ │ │ │ ├── HBaseDistributableFilter.java │ │ │ │ │ ├── HBaseMergingFilter.java │ │ │ │ │ ├── HBaseNumericIndexStrategyFilter.java │ │ │ │ │ └── SingleEntryFilter.java │ │ │ │ ├── mapreduce/ │ │ │ │ │ └── HBaseSplitsProvider.java │ │ │ │ ├── operations/ │ │ │ │ │ ├── GeoWaveColumnFamily.java │ │ │ │ │ ├── HBaseDataIndexWriter.java │ │ │ │ │ ├── HBaseDeleter.java │ │ │ │ │ ├── HBaseMetadataDeleter.java │ │ │ │ │ ├── HBaseMetadataReader.java │ │ │ │ │ ├── HBaseMetadataWriter.java │ │ │ │ │ ├── HBaseOperations.java │ │ │ │ │ ├── HBaseParallelDecoder.java │ │ │ │ │ ├── HBaseReader.java │ │ │ │ │ ├── HBaseRowDeleter.java │ │ │ │ │ ├── HBaseWriter.java │ │ │ │ │ └── config/ │ │ │ │ │ └── HBaseDatastoreDefaultConfigProvider.java │ │ │ │ ├── server/ │ │ │ │ │ ├── BasicRowScanner.java │ │ │ │ │ ├── GeoWaveColumnId.java │ │ │ │ │ ├── HBaseServerOp.java │ │ │ │ │ ├── MergingServerOp.java │ │ │ │ │ ├── MergingVisibilityServerOp.java │ │ │ │ │ ├── PartialCellEquality.java │ │ │ │ │ ├── RowMergingServerOp.java │ │ │ │ │ ├── RowMergingVisibilityServerOp.java │ │ │ │ │ ├── RowScanner.java │ │ │ │ │ ├── ServerOpInternalScannerWrapper.java │ │ │ │ │ ├── ServerOpRegionScannerWrapper.java │ │ │ │ │ ├── ServerSideOperationKey.java │ │ │ │ │ ├── ServerSideOperationStore.java │ │ │ │ │ └── ServerSideOperationUtils.java │ │ │ │ └── util/ │ │ │ │ ├── ConnectionPool.java │ │ │ │ ├── CoprocessorClassLoaderTransformer.java │ │ │ │ ├── GeoWaveBlockingRpcCallback.java │ │ │ │ ├── HBaseCellGenerator.java │ │ │ │ └── HBaseUtils.java │ │ │ ├── protobuf/ │ │ │ │ ├── AggregationService.proto │ │ │ │ ├── HBaseBulkDelete.proto │ │ │ │ ├── SingleEntryFilters.proto │ │ │ │ └── Version.proto │ │ │ └── resources/ │ │ │ ├── META-INF/ │ │ │ │ └── services/ │ │ │ │ ├── org.locationtech.geowave.core.cli.spi.DefaultConfigProviderSpi │ │ │ │ ├── org.locationtech.geowave.core.index.persist.PersistableRegistrySpi │ │ │ │ ├── org.locationtech.geowave.core.store.StoreFactoryFamilySpi │ │ │ │ └── org.locationtech.geowave.core.store.spi.ClassLoaderTransformerSpi │ │ │ └── hbase.properties │ │ ├── kudu/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ └── main/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── locationtech/ │ │ │ │ └── geowave/ │ │ │ │ └── datastore/ │ │ │ │ └── kudu/ │ │ │ │ ├── KuduColumnType.java │ │ │ │ ├── KuduDataIndexRow.java │ │ │ │ ├── KuduDataStore.java │ │ │ │ ├── KuduDataStoreFactory.java │ │ │ │ ├── KuduFactoryHelper.java │ │ │ │ ├── KuduMetadataRow.java │ │ │ │ ├── KuduRow.java │ │ │ │ ├── KuduStoreFactoryFamily.java │ │ │ │ ├── PersistentKuduRow.java │ │ │ │ ├── config/ │ │ │ │ │ ├── KuduOptions.java │ │ │ │ │ └── KuduRequiredOptions.java │ │ │ │ ├── operations/ │ │ │ │ │ ├── KuduDataIndexRead.java │ │ │ │ │ ├── KuduDeleter.java │ │ │ │ │ ├── KuduMetadataDeleter.java │ │ │ │ │ ├── KuduMetadataReader.java │ │ │ │ │ ├── KuduMetadataWriter.java │ │ │ │ │ ├── KuduOperations.java │ │ │ │ │ ├── KuduRangeRead.java │ │ │ │ │ ├── KuduReader.java │ │ │ │ │ └── KuduWriter.java │ │ │ │ └── util/ │ │ │ │ ├── AsyncClientPool.java │ │ │ │ ├── ClientPool.java │ │ │ │ └── KuduUtils.java │ │ │ └── resources/ │ │ │ └── META-INF/ │ │ │ └── services/ │ │ │ └── org.locationtech.geowave.core.store.StoreFactoryFamilySpi │ │ ├── redis/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ ├── main/ │ │ │ │ ├── java/ │ │ │ │ │ └── org/ │ │ │ │ │ └── locationtech/ │ │ │ │ │ └── geowave/ │ │ │ │ │ └── datastore/ │ │ │ │ │ └── redis/ │ │ │ │ │ ├── RedisDataStore.java │ │ │ │ │ ├── RedisDataStoreFactory.java │ │ │ │ │ ├── RedisDefaultConfigProvider.java │ │ │ │ │ ├── RedisFactoryHelper.java │ │ │ │ │ ├── RedisStoreFactoryFamily.java │ │ │ │ │ ├── config/ │ │ │ │ │ │ └── RedisOptions.java │ │ │ │ │ ├── operations/ │ │ │ │ │ │ ├── BatchedRangeRead.java │ │ │ │ │ │ ├── DataIndexRangeRead.java │ │ │ │ │ │ ├── DataIndexRead.java │ │ │ │ │ │ ├── RangeReadInfo.java │ │ │ │ │ │ ├── RedisDataIndexWriter.java │ │ │ │ │ │ ├── RedisMetadataDeleter.java │ │ │ │ │ │ ├── RedisMetadataReader.java │ │ │ │ │ │ ├── RedisMetadataWriter.java │ │ │ │ │ │ ├── RedisOperations.java │ │ │ │ │ │ ├── RedisReader.java │ │ │ │ │ │ ├── RedisRowDeleter.java │ │ │ │ │ │ └── RedisWriter.java │ │ │ │ │ └── util/ │ │ │ │ │ ├── AbstractRedisSetWrapper.java │ │ │ │ │ ├── GeoWaveMetadataCodec.java │ │ │ │ │ ├── GeoWaveMetadataWithTimestampCodec.java │ │ │ │ │ ├── GeoWaveRedisPersistedRow.java │ │ │ │ │ ├── GeoWaveRedisPersistedTimestampRow.java │ │ │ │ │ ├── GeoWaveRedisRow.java │ │ │ │ │ ├── GeoWaveRedisRowCodec.java │ │ │ │ │ ├── GeoWaveRedisRowWithTimestampCodec.java │ │ │ │ │ ├── GeoWaveTimestampMetadata.java │ │ │ │ │ ├── LazyPaginatedEntryRange.java │ │ │ │ │ ├── RedisMapWrapper.java │ │ │ │ │ ├── RedisScoredSetWrapper.java │ │ │ │ │ ├── RedisUtils.java │ │ │ │ │ └── RedissonClientCache.java │ │ │ │ └── resources/ │ │ │ │ └── META-INF/ │ │ │ │ └── services/ │ │ │ │ ├── org.locationtech.geowave.core.cli.spi.DefaultConfigProviderSpi │ │ │ │ └── org.locationtech.geowave.core.store.StoreFactoryFamilySpi │ │ │ └── test/ │ │ │ └── java/ │ │ │ └── org/ │ │ │ └── locationtech/ │ │ │ └── geowave/ │ │ │ └── datastore/ │ │ │ └── redis/ │ │ │ └── util/ │ │ │ ├── RedisScoredSetWrapperTest.java │ │ │ └── RedisUtilsTest.java │ │ └── rocksdb/ │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── locationtech/ │ │ │ │ └── geowave/ │ │ │ │ └── datastore/ │ │ │ │ └── rocksdb/ │ │ │ │ ├── RocksDBDataStore.java │ │ │ │ ├── RocksDBDataStoreFactory.java │ │ │ │ ├── RocksDBDefaultConfigProvider.java │ │ │ │ ├── RocksDBFactoryHelper.java │ │ │ │ ├── RocksDBStoreFactoryFamily.java │ │ │ │ ├── config/ │ │ │ │ │ └── RocksDBOptions.java │ │ │ │ ├── operations/ │ │ │ │ │ ├── RockDBDataIndexWriter.java │ │ │ │ │ ├── RocksDBMetadataDeleter.java │ │ │ │ │ ├── RocksDBMetadataReader.java │ │ │ │ │ ├── RocksDBMetadataWriter.java │ │ │ │ │ ├── RocksDBOperations.java │ │ │ │ │ ├── RocksDBQueryExecution.java │ │ │ │ │ ├── RocksDBReader.java │ │ │ │ │ ├── RocksDBRowDeleter.java │ │ │ │ │ └── RocksDBWriter.java │ │ │ │ └── util/ │ │ │ │ ├── AbstractRocksDBIterator.java │ │ │ │ ├── AbstractRocksDBTable.java │ │ │ │ ├── DataIndexBoundedReverseRowIterator.java │ │ │ │ ├── DataIndexForwardRowIterator.java │ │ │ │ ├── DataIndexReverseRowIterator.java │ │ │ │ ├── RocksDBClient.java │ │ │ │ ├── RocksDBClientCache.java │ │ │ │ ├── RocksDBDataIndexTable.java │ │ │ │ ├── RocksDBGeoWaveMetadata.java │ │ │ │ ├── RocksDBIndexTable.java │ │ │ │ ├── RocksDBMetadataIterator.java │ │ │ │ ├── RocksDBMetadataTable.java │ │ │ │ ├── RocksDBRow.java │ │ │ │ ├── RocksDBRowIterator.java │ │ │ │ └── RocksDBUtils.java │ │ │ └── resources/ │ │ │ └── META-INF/ │ │ │ └── services/ │ │ │ ├── org.locationtech.geowave.core.cli.spi.DefaultConfigProviderSpi │ │ │ └── org.locationtech.geowave.core.store.StoreFactoryFamilySpi │ │ └── test/ │ │ ├── java/ │ │ │ └── org/ │ │ │ └── locationtech/ │ │ │ └── geowave/ │ │ │ └── datastore/ │ │ │ └── rocksdb/ │ │ │ ├── RocksDBLockfileTest.java │ │ │ ├── RocksDBMetadataTableTest.java │ │ │ ├── RocksDBTestPersistableRegistry.java │ │ │ └── util/ │ │ │ └── RocksDBUtilsTest.java │ │ └── resources/ │ │ └── META-INF/ │ │ └── services/ │ │ └── org.locationtech.geowave.core.index.persist.PersistableRegistrySpi │ ├── formats/ │ │ ├── avro/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ ├── main/ │ │ │ │ ├── java/ │ │ │ │ │ └── org/ │ │ │ │ │ └── locationtech/ │ │ │ │ │ └── geowave/ │ │ │ │ │ └── format/ │ │ │ │ │ └── avro/ │ │ │ │ │ ├── GeoWaveAvroIngestFormat.java │ │ │ │ │ ├── GeoWaveAvroIngestPlugin.java │ │ │ │ │ └── GeoWaveAvroPersistableRegistry.java │ │ │ │ └── resources/ │ │ │ │ └── META-INF/ │ │ │ │ └── services/ │ │ │ │ ├── org.locationtech.geowave.core.index.persist.PersistableRegistrySpi │ │ │ │ └── org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi │ │ │ └── test/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── locationtech/ │ │ │ │ └── geowave/ │ │ │ │ └── format/ │ │ │ │ └── avro/ │ │ │ │ └── GeoWaveAvroIngestTest.java │ │ │ └── resources/ │ │ │ └── tornado_tracksbasicIT-export.avro │ │ ├── gdelt/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ ├── main/ │ │ │ │ ├── java/ │ │ │ │ │ └── org/ │ │ │ │ │ └── locationtech/ │ │ │ │ │ └── geowave/ │ │ │ │ │ └── format/ │ │ │ │ │ └── gdelt/ │ │ │ │ │ ├── GDELTIngestFormat.java │ │ │ │ │ ├── GDELTIngestPlugin.java │ │ │ │ │ ├── GDELTPersistableRegistry.java │ │ │ │ │ └── GDELTUtils.java │ │ │ │ └── resources/ │ │ │ │ └── META-INF/ │ │ │ │ └── services/ │ │ │ │ ├── org.locationtech.geowave.core.index.persist.PersistableRegistrySpi │ │ │ │ └── org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi │ │ │ └── test/ │ │ │ └── java/ │ │ │ └── org/ │ │ │ └── locationtech/ │ │ │ └── geowave/ │ │ │ └── format/ │ │ │ └── gdelt/ │ │ │ └── GDELTIngestTest.java │ │ ├── geolife/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ ├── main/ │ │ │ │ ├── java/ │ │ │ │ │ └── org/ │ │ │ │ │ └── locationtech/ │ │ │ │ │ └── geowave/ │ │ │ │ │ └── format/ │ │ │ │ │ └── geolife/ │ │ │ │ │ ├── GeoLifeIngestFormat.java │ │ │ │ │ ├── GeoLifeIngestPlugin.java │ │ │ │ │ ├── GeoLifePersistableRegistry.java │ │ │ │ │ └── GeoLifeUtils.java │ │ │ │ └── resources/ │ │ │ │ └── META-INF/ │ │ │ │ └── services/ │ │ │ │ ├── org.locationtech.geowave.core.index.persist.PersistableRegistrySpi │ │ │ │ └── org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi │ │ │ └── test/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── locationtech/ │ │ │ │ └── geowave/ │ │ │ │ └── format/ │ │ │ │ └── geolife/ │ │ │ │ └── GEOLIFEIngestTest.java │ │ │ └── resources/ │ │ │ └── 20081023025304.plt │ │ ├── geotools-raster/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ └── main/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── locationtech/ │ │ │ │ └── geowave/ │ │ │ │ └── format/ │ │ │ │ └── geotools/ │ │ │ │ └── raster/ │ │ │ │ ├── GeoToolsRasterDataStoreIngestFormat.java │ │ │ │ ├── GeoToolsRasterDataStoreIngestPlugin.java │ │ │ │ ├── NoDataMergeStrategyProvider.java │ │ │ │ ├── NoMergeStrategyProvider.java │ │ │ │ ├── RasterMergeStrategyProviderSpi.java │ │ │ │ └── RasterOptionProvider.java │ │ │ └── resources/ │ │ │ └── META-INF/ │ │ │ └── services/ │ │ │ ├── org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi │ │ │ └── org.locationtech.geowave.format.geotools.raster.RasterMergeStrategyProviderSpi │ │ ├── geotools-vector/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ └── main/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── locationtech/ │ │ │ │ └── geowave/ │ │ │ │ └── format/ │ │ │ │ └── geotools/ │ │ │ │ └── vector/ │ │ │ │ ├── AbstractFieldRetypingSource.java │ │ │ │ ├── GeoToolsVectorDataOptions.java │ │ │ │ ├── GeoToolsVectorDataStoreIngestFormat.java │ │ │ │ ├── GeoToolsVectorDataStoreIngestPlugin.java │ │ │ │ ├── RetypingVectorDataPlugin.java │ │ │ │ ├── SimpleFeatureGeoWaveWrapper.java │ │ │ │ └── retyping/ │ │ │ │ └── date/ │ │ │ │ ├── DateFieldOptionProvider.java │ │ │ │ ├── DateFieldRetypingPlugin.java │ │ │ │ └── DateFieldRetypingSource.java │ │ │ └── resources/ │ │ │ └── META-INF/ │ │ │ └── services/ │ │ │ └── org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi │ │ ├── gpx/ │ │ │ ├── .gitignore │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ ├── main/ │ │ │ │ ├── avro/ │ │ │ │ │ └── gpxtrack.avsc │ │ │ │ ├── java/ │ │ │ │ │ └── org/ │ │ │ │ │ └── locationtech/ │ │ │ │ │ └── geowave/ │ │ │ │ │ └── format/ │ │ │ │ │ └── gpx/ │ │ │ │ │ ├── GPXConsumer.java │ │ │ │ │ ├── GpxIngestFormat.java │ │ │ │ │ ├── GpxIngestPlugin.java │ │ │ │ │ ├── GpxPersistableRegistry.java │ │ │ │ │ ├── GpxUtils.java │ │ │ │ │ └── MaxExtentOptProvider.java │ │ │ │ └── resources/ │ │ │ │ ├── META-INF/ │ │ │ │ │ └── services/ │ │ │ │ │ ├── org.locationtech.geowave.core.index.persist.PersistableRegistrySpi │ │ │ │ │ └── org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi │ │ │ │ └── org/ │ │ │ │ └── locationtech/ │ │ │ │ └── geowave/ │ │ │ │ └── types/ │ │ │ │ └── gpx/ │ │ │ │ ├── gpx-1_0.xsd │ │ │ │ └── gpx-1_1.xsd │ │ │ └── test/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── locationtech/ │ │ │ │ └── geowave/ │ │ │ │ └── types/ │ │ │ │ ├── HelperClass.java │ │ │ │ ├── ValidateObject.java │ │ │ │ └── gpx/ │ │ │ │ ├── GPXConsumerTest.java │ │ │ │ └── GPXIngestPluginTest.java │ │ │ └── resources/ │ │ │ ├── 12345.xml │ │ │ ├── gpx/ │ │ │ │ ├── 000991807.gpx │ │ │ │ └── mystic_basin_trail.gpx │ │ │ ├── metadata.xml │ │ │ └── sample_gpx.xml │ │ ├── stanag4676/ │ │ │ ├── format/ │ │ │ │ ├── pom.xml │ │ │ │ └── src/ │ │ │ │ └── main/ │ │ │ │ ├── java/ │ │ │ │ │ └── org/ │ │ │ │ │ └── locationtech/ │ │ │ │ │ └── geowave/ │ │ │ │ │ └── format/ │ │ │ │ │ └── stanag4676/ │ │ │ │ │ ├── ByteBufferBackedInputStream.java │ │ │ │ │ ├── ComparatorStanag4676EventWritable.java │ │ │ │ │ ├── IngestMessageHandler.java │ │ │ │ │ ├── Stanag4676EventWritable.java │ │ │ │ │ ├── Stanag4676IngestFormat.java │ │ │ │ │ ├── Stanag4676IngestPlugin.java │ │ │ │ │ ├── Stanag4676PersistableRegistry.java │ │ │ │ │ ├── Stanag4676Utils.java │ │ │ │ │ ├── image/ │ │ │ │ │ │ ├── ImageChip.java │ │ │ │ │ │ ├── ImageChipDataAdapter.java │ │ │ │ │ │ ├── ImageChipInfo.java │ │ │ │ │ │ └── ImageChipUtils.java │ │ │ │ │ └── parser/ │ │ │ │ │ ├── JDOMUtils.java │ │ │ │ │ ├── NATO4676Decoder.java │ │ │ │ │ ├── NATO4676Encoder.java │ │ │ │ │ ├── TrackDecoder.java │ │ │ │ │ ├── TrackEncoder.java │ │ │ │ │ ├── TrackFileReader.java │ │ │ │ │ ├── TrackReader.java │ │ │ │ │ ├── TrackWriter.java │ │ │ │ │ ├── model/ │ │ │ │ │ │ ├── Area.java │ │ │ │ │ │ ├── ClassificationCredibility.java │ │ │ │ │ │ ├── ClassificationLevel.java │ │ │ │ │ │ ├── CovarianceMatrix.java │ │ │ │ │ │ ├── ExerciseIndicator.java │ │ │ │ │ │ ├── FrequencyUnitType.java │ │ │ │ │ │ ├── GeodeticPosition.java │ │ │ │ │ │ ├── IDdata.java │ │ │ │ │ │ ├── Identity.java │ │ │ │ │ │ ├── IdentityAmplification.java │ │ │ │ │ │ ├── IffMode.java │ │ │ │ │ │ ├── LineageRelation.java │ │ │ │ │ │ ├── LineageRelationType.java │ │ │ │ │ │ ├── MissionFrame.java │ │ │ │ │ │ ├── MissionSummary.java │ │ │ │ │ │ ├── MissionSummaryMessage.java │ │ │ │ │ │ ├── ModalityType.java │ │ │ │ │ │ ├── MotionEventPoint.java │ │ │ │ │ │ ├── MotionImagery.java │ │ │ │ │ │ ├── NATO4676Message.java │ │ │ │ │ │ ├── ObjectClassification.java │ │ │ │ │ │ ├── Position.java │ │ │ │ │ │ ├── Security.java │ │ │ │ │ │ ├── SimulationIndicator.java │ │ │ │ │ │ ├── SymbolicSpectralRange.java │ │ │ │ │ │ ├── Track.java │ │ │ │ │ │ ├── TrackClassification.java │ │ │ │ │ │ ├── TrackDotSource.java │ │ │ │ │ │ ├── TrackEnvironment.java │ │ │ │ │ │ ├── TrackEvent.java │ │ │ │ │ │ ├── TrackIdentity.java │ │ │ │ │ │ ├── TrackItem.java │ │ │ │ │ │ ├── TrackManagement.java │ │ │ │ │ │ ├── TrackMessage.java │ │ │ │ │ │ ├── TrackPoint.java │ │ │ │ │ │ ├── TrackPointDetail.java │ │ │ │ │ │ ├── TrackPointType.java │ │ │ │ │ │ ├── TrackRun.java │ │ │ │ │ │ ├── TrackRunParameter.java │ │ │ │ │ │ ├── TrackStatus.java │ │ │ │ │ │ └── TrackerType.java │ │ │ │ │ └── util/ │ │ │ │ │ ├── EarthVector.java │ │ │ │ │ └── Length.java │ │ │ │ └── resources/ │ │ │ │ └── META-INF/ │ │ │ │ └── services/ │ │ │ │ ├── org.locationtech.geowave.core.index.persist.PersistableRegistrySpi │ │ │ │ └── org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi │ │ │ └── service/ │ │ │ ├── config/ │ │ │ │ └── log4j.properties │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ └── main/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── locationtech/ │ │ │ │ └── geowave/ │ │ │ │ └── types/ │ │ │ │ └── stanag4676/ │ │ │ │ └── service/ │ │ │ │ ├── Stanag4676ImageryChipApplication.java │ │ │ │ └── rest/ │ │ │ │ └── Stanag4676ImageryChipService.java │ │ │ ├── resources/ │ │ │ │ ├── geoserver_files/ │ │ │ │ │ ├── 4676InGeowave.txt │ │ │ │ │ ├── MotionPointStyle.xml │ │ │ │ │ ├── TrackPointDecimateHeatMap.xml │ │ │ │ │ ├── TrackSpeedStyle.xml │ │ │ │ │ ├── colormap.sld │ │ │ │ │ ├── config.xml │ │ │ │ │ ├── motion_point-content.ftl │ │ │ │ │ ├── track-content.ftl │ │ │ │ │ └── track_point-content.ftl │ │ │ │ └── log4j.properties │ │ │ └── webapp/ │ │ │ ├── .placeholder │ │ │ └── WEB-INF/ │ │ │ ├── config.properties │ │ │ └── web.xml │ │ ├── tdrive/ │ │ │ ├── .gitignore │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ ├── main/ │ │ │ │ ├── avro/ │ │ │ │ │ └── tdrivepoint.avsc │ │ │ │ ├── java/ │ │ │ │ │ └── org/ │ │ │ │ │ └── locationtech/ │ │ │ │ │ └── geowave/ │ │ │ │ │ └── format/ │ │ │ │ │ └── tdrive/ │ │ │ │ │ ├── TdriveIngestFormat.java │ │ │ │ │ ├── TdriveIngestPlugin.java │ │ │ │ │ ├── TdrivePersistableRegistry.java │ │ │ │ │ └── TdriveUtils.java │ │ │ │ └── resources/ │ │ │ │ └── META-INF/ │ │ │ │ └── services/ │ │ │ │ ├── org.locationtech.geowave.core.index.persist.PersistableRegistrySpi │ │ │ │ └── org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi │ │ │ └── test/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── locationtech/ │ │ │ │ └── geowave/ │ │ │ │ └── format/ │ │ │ │ └── tdrive/ │ │ │ │ └── TDRIVEIngestTest.java │ │ │ └── resources/ │ │ │ └── 9879.txt │ │ └── twitter/ │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ ├── java/ │ │ │ │ └── org/ │ │ │ │ └── locationtech/ │ │ │ │ └── geowave/ │ │ │ │ └── format/ │ │ │ │ └── twitter/ │ │ │ │ ├── TwitterIngestFormat.java │ │ │ │ ├── TwitterIngestPlugin.java │ │ │ │ ├── TwitterPersistableRegistry.java │ │ │ │ └── TwitterUtils.java │ │ │ └── resources/ │ │ │ └── META-INF/ │ │ │ └── services/ │ │ │ ├── org.locationtech.geowave.core.index.persist.PersistableRegistrySpi │ │ │ └── org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi │ │ └── test/ │ │ └── java/ │ │ └── org/ │ │ └── locationtech/ │ │ └── geowave/ │ │ └── format/ │ │ └── twitter/ │ │ └── TwitterIngestTest.java │ └── pom.xml ├── migration/ │ ├── pom.xml │ └── src/ │ ├── main/ │ │ ├── java/ │ │ │ └── org/ │ │ │ └── locationtech/ │ │ │ └── geowave/ │ │ │ └── migration/ │ │ │ ├── MigrationPersistableRegistry.java │ │ │ ├── cli/ │ │ │ │ ├── MigrationCommand.java │ │ │ │ └── MigrationOperationProvider.java │ │ │ └── legacy/ │ │ │ ├── adapter/ │ │ │ │ ├── LegacyInternalDataAdapterWrapper.java │ │ │ │ └── vector/ │ │ │ │ ├── LegacyFeatureDataAdapter.java │ │ │ │ ├── LegacyStatsConfigurationCollection.java │ │ │ │ └── LegacyVisibilityConfiguration.java │ │ │ └── core/ │ │ │ ├── geotime/ │ │ │ │ ├── LegacyCustomCRSSpatialField.java │ │ │ │ ├── LegacyLatitudeField.java │ │ │ │ ├── LegacyLongitudeField.java │ │ │ │ └── LegacySpatialField.java │ │ │ └── store/ │ │ │ ├── LegacyAdapterIndexMappingStore.java │ │ │ └── LegacyAdapterToIndexMapping.java │ │ └── resources/ │ │ └── META-INF/ │ │ └── services/ │ │ ├── org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi │ │ └── org.locationtech.geowave.core.index.persist.PersistableRegistrySpi │ └── test/ │ └── java/ │ └── org/ │ └── locationtech/ │ └── geowave/ │ └── migration/ │ └── MigrationTest.java ├── pom.xml ├── python/ │ ├── pom.xml │ └── src/ │ ├── examples/ │ │ └── public_schools.csv │ └── main/ │ ├── java/ │ │ └── org/ │ │ └── locationtech/ │ │ └── geowave/ │ │ └── python/ │ │ ├── Debug.java │ │ ├── GeoWavePy4JGateway.java │ │ └── cli/ │ │ ├── PythonOperationProvider.java │ │ ├── PythonRunGatewayCommand.java │ │ ├── PythonRunGatewayOptions.java │ │ └── PythonSection.java │ ├── python/ │ │ ├── .gitignore │ │ ├── LICENSE │ │ ├── README.md │ │ ├── maven_version.py │ │ ├── pygw/ │ │ │ ├── __init__.py │ │ │ ├── base/ │ │ │ │ ├── __init__.py │ │ │ │ ├── closeable_iterator.py │ │ │ │ ├── data_type_adapter.py │ │ │ │ ├── envelope.py │ │ │ │ ├── geowave_object.py │ │ │ │ ├── interval.py │ │ │ │ ├── java_transformer.py │ │ │ │ ├── range.py │ │ │ │ ├── type_conversions.py │ │ │ │ ├── write_results.py │ │ │ │ └── writer.py │ │ │ ├── config.py │ │ │ ├── debug.py │ │ │ ├── gateway.py │ │ │ ├── geotools/ │ │ │ │ ├── __init__.py │ │ │ │ ├── attribute_descriptor.py │ │ │ │ ├── feature_data_adapter.py │ │ │ │ ├── simple_feature.py │ │ │ │ ├── simple_feature_builder.py │ │ │ │ ├── simple_feature_type.py │ │ │ │ └── simple_feature_type_builder.py │ │ │ ├── index/ │ │ │ │ ├── __init__.py │ │ │ │ ├── index.py │ │ │ │ ├── index_builder.py │ │ │ │ ├── spatial_index_builder.py │ │ │ │ └── spatial_temporal_index_builder.py │ │ │ ├── query/ │ │ │ │ ├── __init__.py │ │ │ │ ├── aggregation_query.py │ │ │ │ ├── aggregation_query_builder.py │ │ │ │ ├── base_query_builder.py │ │ │ │ ├── query.py │ │ │ │ ├── query_builder.py │ │ │ │ ├── query_constraints.py │ │ │ │ ├── query_constraints_factory.py │ │ │ │ ├── query_hint_key.py │ │ │ │ ├── statistics/ │ │ │ │ │ ├── __init__.py │ │ │ │ │ ├── statistic_query.py │ │ │ │ │ └── statistic_query_builder.py │ │ │ │ └── vector/ │ │ │ │ ├── __init__.py │ │ │ │ ├── filter_factory.py │ │ │ │ ├── spatial_temporal_constraints_builder.py │ │ │ │ ├── vector_aggregation_query_builder.py │ │ │ │ ├── vector_query_builder.py │ │ │ │ └── vector_query_constraints_factory.py │ │ │ ├── statistics/ │ │ │ │ ├── __init__.py │ │ │ │ ├── bin_constraints.py │ │ │ │ ├── binning_strategy/ │ │ │ │ │ ├── __init__.py │ │ │ │ │ ├── composite_binning_strategy.py │ │ │ │ │ ├── data_type_binning_strategy.py │ │ │ │ │ ├── field_value_binning_strategy.py │ │ │ │ │ ├── numeric_range_field_value_binning_strategy.py │ │ │ │ │ ├── partition_binning_strategy.py │ │ │ │ │ ├── spatial_field_value_binning_strategy.py │ │ │ │ │ └── time_range_field_value_binning_strategy.py │ │ │ │ ├── binning_strategy_mappings.py │ │ │ │ ├── data_type/ │ │ │ │ │ ├── __init__.py │ │ │ │ │ └── count_statistic.py │ │ │ │ ├── field/ │ │ │ │ │ ├── __init__.py │ │ │ │ │ ├── bloom_filter_statistic.py │ │ │ │ │ ├── bounding_box_statistic.py │ │ │ │ │ ├── count_min_sketch_statistic.py │ │ │ │ │ ├── fixed_bin_numeric_histogram_statistic.py │ │ │ │ │ ├── hyper_log_log_statistic.py │ │ │ │ │ ├── numeric_histogram_statistic.py │ │ │ │ │ ├── numeric_mean_statistic.py │ │ │ │ │ ├── numeric_range_statistic.py │ │ │ │ │ ├── numeric_stats_statistic.py │ │ │ │ │ └── time_range_statistic.py │ │ │ │ ├── index/ │ │ │ │ │ ├── __init__.py │ │ │ │ │ ├── differing_visibility_count_statistic.py │ │ │ │ │ ├── duplicate_entry_count_statistic.py │ │ │ │ │ ├── field_visibility_count_statistic.py │ │ │ │ │ ├── index_meta_data_set_statistic.py │ │ │ │ │ ├── max_duplicates_statistic.py │ │ │ │ │ ├── partitions_statistic.py │ │ │ │ │ └── row_range_histogram_statistic.py │ │ │ │ ├── statistic.py │ │ │ │ ├── statistic_binning_strategy.py │ │ │ │ ├── statistic_mappings.py │ │ │ │ ├── statistic_type.py │ │ │ │ ├── statistic_value.py │ │ │ │ └── transformers.py │ │ │ ├── store/ │ │ │ │ ├── __init__.py │ │ │ │ ├── accumulo/ │ │ │ │ │ ├── __init__.py │ │ │ │ │ └── accumulo_options.py │ │ │ │ ├── bigtable/ │ │ │ │ │ ├── __init__.py │ │ │ │ │ └── big_table_options.py │ │ │ │ ├── cassandra/ │ │ │ │ │ ├── __init__.py │ │ │ │ │ └── options.py │ │ │ │ ├── data_store.py │ │ │ │ ├── data_store_factory.py │ │ │ │ ├── data_store_options.py │ │ │ │ ├── dynamodb/ │ │ │ │ │ ├── __init__.py │ │ │ │ │ └── options.py │ │ │ │ ├── hbase/ │ │ │ │ │ ├── __init__.py │ │ │ │ │ └── options.py │ │ │ │ ├── kudu/ │ │ │ │ │ ├── __init__.py │ │ │ │ │ └── options.py │ │ │ │ ├── redis/ │ │ │ │ │ ├── __init__.py │ │ │ │ │ └── options.py │ │ │ │ └── rocksdb/ │ │ │ │ ├── __init__.py │ │ │ │ └── options.py │ │ │ └── test/ │ │ │ ├── __init__.py │ │ │ ├── aggregation_test.py │ │ │ ├── conftest.py │ │ │ ├── data_store_test.py │ │ │ ├── geotools_test.py │ │ │ ├── query_test.py │ │ │ ├── statistics_test.py │ │ │ └── type_conversion_test.py │ │ ├── pytest.ini │ │ ├── requirements.txt │ │ └── setup.py │ └── resources/ │ └── META-INF/ │ └── services/ │ └── org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi ├── services/ │ ├── .gitignore │ ├── api/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ └── java/ │ │ └── org/ │ │ └── locationtech/ │ │ └── geowave/ │ │ └── service/ │ │ ├── AnalyticService.java │ │ ├── BaseService.java │ │ ├── ConfigService.java │ │ ├── FileUploadService.java │ │ ├── GeoServerService.java │ │ ├── IndexService.java │ │ ├── IngestService.java │ │ ├── ServiceUtils.java │ │ ├── StatService.java │ │ ├── StoreService.java │ │ └── TypeService.java │ ├── client/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ └── java/ │ │ └── org/ │ │ └── locationtech/ │ │ └── geowave/ │ │ └── service/ │ │ └── client/ │ │ ├── AnalyticServiceClient.java │ │ ├── BaseServiceClient.java │ │ ├── ConfigServiceClient.java │ │ ├── FileUploadServiceClient.java │ │ ├── GeoServerServiceClient.java │ │ ├── IndexServiceClient.java │ │ ├── IngestServiceClient.java │ │ ├── StatServiceClient.java │ │ ├── StoreServiceClient.java │ │ └── TypeServiceClient.java │ ├── grpc/ │ │ ├── protobuf/ │ │ │ ├── .gitignore │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ └── main/ │ │ │ └── protobuf/ │ │ │ └── GeoWaveVector.proto │ │ ├── protobuf-generator/ │ │ │ ├── pom.xml │ │ │ └── src/ │ │ │ └── main/ │ │ │ └── java/ │ │ │ └── org/ │ │ │ └── locationtech/ │ │ │ └── geowave/ │ │ │ └── service/ │ │ │ └── grpc/ │ │ │ ├── GeoWaveGrpcOperationParser.java │ │ │ └── GeowaveOperationGrpcGenerator.java │ │ └── server/ │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ ├── java/ │ │ │ └── org/ │ │ │ └── locationtech/ │ │ │ └── geowave/ │ │ │ └── service/ │ │ │ └── grpc/ │ │ │ ├── GeoWaveGrpcServer.java │ │ │ ├── GeoWaveGrpcServiceOptions.java │ │ │ ├── GeoWaveGrpcServiceSpi.java │ │ │ ├── cli/ │ │ │ │ ├── GrpcOperationProvider.java │ │ │ │ ├── GrpcSection.java │ │ │ │ ├── StartGrpcServerCommand.java │ │ │ │ ├── StartGrpcServerCommandOptions.java │ │ │ │ └── StopGrpcServerCommand.java │ │ │ └── services/ │ │ │ ├── GeoWaveGrpcAnalyticMapreduceService.java │ │ │ ├── GeoWaveGrpcAnalyticSparkService.java │ │ │ ├── GeoWaveGrpcCliGeoserverService.java │ │ │ ├── GeoWaveGrpcCoreCliService.java │ │ │ ├── GeoWaveGrpcCoreIngestService.java │ │ │ ├── GeoWaveGrpcCoreMapreduceService.java │ │ │ ├── GeoWaveGrpcCoreStoreService.java │ │ │ ├── GeoWaveGrpcServiceCommandUtil.java │ │ │ └── GeoWaveGrpcVectorService.java │ │ └── resources/ │ │ └── META-INF/ │ │ └── services/ │ │ ├── org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi │ │ └── org.locationtech.geowave.service.grpc.GeoWaveGrpcServiceSpi │ ├── pom.xml │ └── rest/ │ ├── README.md │ ├── pom.xml │ └── src/ │ ├── main/ │ │ ├── java/ │ │ │ └── org/ │ │ │ └── locationtech/ │ │ │ └── geowave/ │ │ │ └── service/ │ │ │ └── rest/ │ │ │ ├── ApiRestletApplication.java │ │ │ ├── AsyncOperationStatusResource.java │ │ │ ├── FileUploadResource.java │ │ │ ├── GeoWaveOperationFinder.java │ │ │ ├── GeoWaveOperationServiceWrapper.java │ │ │ ├── MainResource.java │ │ │ ├── RestRoute.java │ │ │ ├── SwaggerApiParser.java │ │ │ ├── SwaggerOperationParser.java │ │ │ ├── SwaggerResource.java │ │ │ ├── exceptions/ │ │ │ │ └── MissingArgumentException.java │ │ │ ├── field/ │ │ │ │ ├── AbstractMainParam.java │ │ │ │ ├── BasicRestField.java │ │ │ │ ├── ListMainParam.java │ │ │ │ ├── ParameterRestField.java │ │ │ │ ├── ParameterRestFieldValue.java │ │ │ │ ├── RequestParameters.java │ │ │ │ ├── RequestParametersForm.java │ │ │ │ ├── RequestParametersJson.java │ │ │ │ ├── RestField.java │ │ │ │ ├── RestFieldFactory.java │ │ │ │ ├── RestFieldValue.java │ │ │ │ └── StringMainParam.java │ │ │ ├── operations/ │ │ │ │ ├── AddAccumuloStoreCommand.java │ │ │ │ ├── AddBigTableStoreCommand.java │ │ │ │ ├── AddCassandraStoreCommand.java │ │ │ │ ├── AddDynamoDBStoreCommand.java │ │ │ │ ├── AddFileSystemStoreCommand.java │ │ │ │ ├── AddHBaseStoreCommand.java │ │ │ │ ├── AddKuduStoreCommand.java │ │ │ │ ├── AddRedisStoreCommand.java │ │ │ │ ├── AddRocksDBStoreCommand.java │ │ │ │ ├── AddSpatialIndexCommand.java │ │ │ │ ├── AddSpatialTemporalIndexCommand.java │ │ │ │ └── RestOperationStatusMessage.java │ │ │ └── security/ │ │ │ ├── GeoWaveApiKeyFilter.java │ │ │ ├── GeoWaveApiKeySetterFilter.java │ │ │ ├── GeoWaveBaseApiKeyDB.java │ │ │ ├── GeoWaveSQLiteApiKeyDB.java │ │ │ └── oauth2/ │ │ │ ├── FacebookAccessTokenConverter.java │ │ │ └── FacebookTokenServices.java │ │ ├── resources/ │ │ │ └── facebook-oauth2-example.properties │ │ └── webapp/ │ │ └── WEB-INF/ │ │ ├── facebook-oauth2-example-security-servlet.xml │ │ ├── security-servlet.xml │ │ └── web.xml │ └── test/ │ └── java/ │ └── org/ │ └── locationtech/ │ └── geowave/ │ └── service/ │ └── rest/ │ ├── GeoWaveOperationServiceWrapperTest.java │ └── field/ │ ├── RequestParametersFormTest.java │ └── RequestParametersJsonTest.java └── test/ ├── .gitignore ├── README.md ├── pom.xml └── src/ ├── main/ │ ├── java/ │ │ └── org/ │ │ └── locationtech/ │ │ └── geowave/ │ │ └── test/ │ │ ├── AccumuloStoreTestEnvironment.java │ │ ├── BigtableStoreTestEnvironment.java │ │ ├── CassandraStoreTestEnvironment.java │ │ ├── DynamoDBStoreTestEnvironment.java │ │ ├── FileSystemStoreTestEnvironment.java │ │ ├── GeoWaveITRunner.java │ │ ├── GeoWaveITSuiteRunner.java │ │ ├── HBaseStoreTestEnvironment.java │ │ ├── KerberosTestEnvironment.java │ │ ├── KuduStoreTestEnvironment.java │ │ ├── RedisStoreTestEnvironment.java │ │ ├── RocksDBStoreTestEnvironment.java │ │ ├── StoreTestEnvironment.java │ │ ├── TestDataStoreOptions.java │ │ ├── TestEnvironment.java │ │ ├── TestUtils.java │ │ ├── ZookeeperTestEnvironment.java │ │ ├── annotation/ │ │ │ ├── Environments.java │ │ │ ├── GeoWaveTestStore.java │ │ │ ├── GeoWaveTestStoreImpl.java │ │ │ ├── NamespaceOverride.java │ │ │ └── OptionsOverride.java │ │ ├── kafka/ │ │ │ ├── KafkaTestEnvironment.java │ │ │ └── KafkaTestUtils.java │ │ ├── mapreduce/ │ │ │ ├── MapReduceTestEnvironment.java │ │ │ └── MapReduceTestUtils.java │ │ ├── services/ │ │ │ ├── ServicesTestEnvironment.java │ │ │ └── grpc/ │ │ │ ├── GeoWaveGrpcTestClient.java │ │ │ └── GeoWaveGrpcTestUtils.java │ │ └── spark/ │ │ ├── SparkTestEnvironment.java │ │ └── SparkUtils.java │ └── resources/ │ └── META-INF/ │ └── services/ │ └── org.locationtech.geowave.test.kerberos.KerberosTestingUtilSpi └── test/ ├── java/ │ └── org/ │ └── locationtech/ │ └── geowave/ │ ├── mapreduce/ │ │ ├── MapReduceMemoryDataStore.java │ │ ├── MapReduceMemoryOperations.java │ │ └── splits/ │ │ └── SplitsProviderIT.java │ └── test/ │ ├── GeoWaveITSuite.java │ ├── IntegrationTestPersistableRegistry.java │ ├── PersistableRegistryTest.java │ ├── basic/ │ │ ├── AbstractGeoWaveBasicVectorIT.java │ │ ├── AbstractGeoWaveIT.java │ │ ├── GeoWaveAttributeIndexIT.java │ │ ├── GeoWaveBasicCustomCRSRasterIT.java │ │ ├── GeoWaveBasicRasterIT.java │ │ ├── GeoWaveBasicSpatialTemporalVectorIT.java │ │ ├── GeoWaveBasicTemporalVectorIT.java │ │ ├── GeoWaveBasicURLIngestIT.java │ │ ├── GeoWaveCustomCRSSpatialVectorIT.java │ │ ├── GeoWaveCustomIndexIT.java │ │ ├── GeoWaveEnumIndexIT.java │ │ ├── GeoWaveGeometryPrecisionIT.java │ │ ├── GeoWaveMultiProcessIngestIT.java │ │ ├── GeoWaveSpatialBinningAggregationIT.java │ │ ├── GeoWaveSpatialBinningStatisticsIT.java │ │ ├── GeoWaveStatisticsIT.java │ │ ├── GeoWaveTextIndexIT.java │ │ ├── GeoWaveVectorSerializationIT.java │ │ └── GeoWaveVisibilityIT.java │ ├── config/ │ │ └── ConfigCacheIT.java │ ├── docs/ │ │ └── GeoWaveDocumentationExamplesIT.java │ ├── javaspark/ │ │ └── GeoWaveSparkIngestIT.java │ ├── kafka/ │ │ └── BasicKafkaIT.java │ ├── landsat/ │ │ ├── CustomCRSLandsatIT.java │ │ └── LandsatIT.java │ ├── mapreduce/ │ │ ├── BasicMapReduceIT.java │ │ ├── CustomCRSKDERasterResizeIT.java │ │ ├── DBScanIT.java │ │ ├── GeoWaveKMeansIT.java │ │ ├── GeoWaveNNIT.java │ │ └── StoreCopyIT.java │ ├── osm/ │ │ └── MapReduceIT.java │ ├── query/ │ │ ├── AttributesSubsetQueryIT.java │ │ ├── BasicDataTypeAdapterQueryIT.java │ │ ├── ExpressionQueryIT.java │ │ ├── GeoWaveQueryLanguageIT.java │ │ ├── PolygonDataIdQueryIT.java │ │ ├── QueryOptionsIT.java │ │ └── SpatialTemporalQueryIT.java │ ├── secondary/ │ │ ├── AbstractSecondaryIndexIT.java │ │ ├── BasicSecondaryIndexIT.java │ │ ├── CustomSecondaryIndexIT.java │ │ ├── DataIndexOnlyIT.java │ │ ├── MapReduceSecondaryIndexIT.java │ │ ├── SimpleQuerySecondaryIndexIT.java │ │ └── VisibilitySecondaryIndexIT.java │ ├── services/ │ │ ├── AnalyticIT.java │ │ ├── BaseServiceIT.java │ │ ├── ConfigServicesIT.java │ │ ├── FileUploadIT.java │ │ ├── GeoServerIT.java │ │ ├── GeoServerIngestIT.java │ │ ├── IndexServicesIT.java │ │ ├── IngestIT.java │ │ ├── StatServicesIT.java │ │ ├── StoreServicesIT.java │ │ ├── TypeServicesIT.java │ │ └── grpc/ │ │ └── GeoWaveGrpcIT.java │ ├── spark/ │ │ ├── GeoWaveBasicSparkIT.java │ │ ├── GeoWaveSparkKMeansIT.java │ │ ├── GeoWaveSparkSQLIT.java │ │ └── GeoWaveSparkSpatialJoinIT.java │ └── stability/ │ └── GeoWaveStabilityIT.java └── resources/ ├── META-INF/ │ └── services/ │ └── org.locationtech.geowave.core.index.persist.PersistableRegistrySpi ├── cassandra.yaml ├── hadoop-metrics2.properties ├── hbase.properties ├── jul-test.properties ├── kerberos-config.xml ├── log4j-test.properties ├── logging.xml ├── org/ │ └── locationtech/ │ └── geowave/ │ └── test/ │ ├── geonames/ │ │ ├── barbados/ │ │ │ └── BB.txt │ │ └── readme.txt │ ├── multi-polygon-test.geojson │ └── query/ │ └── stateCapitals.csv ├── sentinel/ │ └── sentinel2_band_example.jp2 ├── sld/ │ ├── DecimatePoints-100px.sld │ ├── DecimatePoints-10px.sld │ ├── DecimatePoints-2px.sld │ ├── DistributedRender.sld │ ├── SubsamplePoints-100px.sld │ ├── SubsamplePoints-10px.sld │ └── SubsamplePoints-2px.sld ├── wfs-requests/ │ ├── geostuff_layer.xml │ ├── insert.xml │ ├── lock.xml │ ├── query.xml │ ├── update.xml │ ├── wfs.xml │ └── wms.xml └── wms/ └── simplePoint.sld ================================================ FILE CONTENTS ================================================ ================================================ FILE: .gitattributes ================================================ # Handle line endings automatically for files detected as text # and leave all files detected as binary untouched. * text=auto # Never modify line endings of our bash scripts *.sh -crlf # # The above will handle all files NOT found below # # These files are text and should be normalized (Convert crlf => lf) *.css text *.html text *.java text *.js text *.json text *.properties text *.txt text *.xml text # These files are binary and should be left untouched # (binary is a macro for -text -diff) *.class binary *.gif binary *.jar binary *.jpg binary *.png binary *.war binary ================================================ FILE: .github/workflows/publish.yml ================================================ name: Publish on: push: branches: - master env: DEV_RESOURCES_VERSION: 1.7 MAVEN_OPTS: "-XX:CompressedClassSpaceSize=256m -XX:+UseSerialGC -Xmx2g -XX:MaxMetaspaceSize=512m" jobs: publish: if: github.repository == 'locationtech/geowave' runs-on: ubuntu-18.04 name: Publish Artifacts and Docs env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} CHANGELOG_GITHUB_TOKEN: ${{ secrets.CHANGELOG_TOKEN }} GPG_OWNERTRUST: ${{ secrets.GPG_OWNERTRUST }} GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }} GPG_SECRET_KEYS: ${{ secrets.GPG_SECRET_KEYS }} PYPI_CREDENTIALS: ${{ secrets.PYPI_CREDENTIALS }} SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }} SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }} steps: - name: Checkout repository uses: actions/checkout@v2 - name: Set up JDK 1.8 uses: joschi/setup-jdk@v2 with: java-version: '8' architecture: x64 - name: Cache resources uses: actions/cache@v2 env: cache-name: cache-geowave-resources with: key: ${{ runner.os }}-Publish path: ~/.m2 - name: Install Local Artifacts run: mvn install -B -DskipTests -Dspotbugs.skip - name: Publish Artifacts run: ./.utility/publish-artifacts.sh - name: Generate Changelog uses: heinrichreimer/github-changelog-generator-action@v2.2 with: issues: true issuesWoLabels: true pullRequests: true prWoLabels: true author: true unreleased: true stripGeneratorNotice: true verbose: true compareLink: true httpCache: true filterByMilestone: true - name: Convert Changelog to HTML uses: docker://pandoc/core:2.9 with: args: "-f markdown -t html -s -c stylesheets/changelog.css -o changelog.html CHANGELOG.md" - name: Build HTML Docs run: mvn -P html -pl docs install -DskipTests -Dspotbugs.skip - name: Build Aggregate Javadocs run: mvn javadoc:aggregate -B -DskipTests -Dspotbugs.skip - name: Build Python Docs run: ./.utility/build-python-docs.sh - name: Publish Docs to GH-Pages run: ./.utility/publish-docs.sh ================================================ FILE: .github/workflows/test.yml ================================================ name: Tests on: [push, pull_request] env: DEV_RESOURCES_VERSION: 1.7 MAVEN_PROFILES: '""' IT_ONLY: true MAVEN_OPTS: "-XX:CompressedClassSpaceSize=256m -XX:+UseSerialGC -Xmx2g -XX:MaxMetaspaceSize=512m" jobs: unit-tests: runs-on: ubuntu-20.04 name: Unit Tests on Latest ASF Versions env: IT_ONLY: false steps: - name: Checkout repository uses: actions/checkout@v2 - name: Set up JDK 1.8 uses: joschi/setup-jdk@v2 with: java-version: '8' architecture: x64 - name: Set up Maven uses: stCarolas/setup-maven@v4 with: maven-version: 3.6.3 - name: Cache maven resources uses: actions/cache@v2 env: cache-name: cache-maven-resources with: key: ${{ runner.os }}-mvn-${{ hashFiles('**/pom.xml') }} restore-keys: | ${{ runner.os }}-mvn- path: | ~/.m2/repository - name: Cache other resources uses: actions/cache@v2 env: cache-name: cache-resources with: key: ${{ runner.os }}-other-${{ secrets.CACHE_ID }} restore-keys: | ${{ runner.os }}-other-${{ secrets.CACHE_ID }} path: | ~/.downloads test/landsat8 test/sentinel2 test/target/temp/gdal - name: Run run: ./.utility/run-tests.sh - name: Publish Unit Test Results uses: scacap/action-surefire-report@v1 if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository with: check_name: Unit Test Results report_paths: "**/target/surefire-reports/*.xml" github_token: ${{ secrets.GITHUB_TOKEN }} python-tests: runs-on: ubuntu-20.04 name: Python Tests on Latest ASF Versions env: IT_ONLY: false PYTHON_BUILD: true steps: - name: Checkout repository uses: actions/checkout@v2 - name: Set up AdoptOpenJDK 1.8 uses: joschi/setup-jdk@v2 with: java-version: '8' architecture: x64 - name: Set up Maven uses: stCarolas/setup-maven@v4 with: maven-version: 3.6.3 - name: Cache maven resources uses: actions/cache@v2 env: cache-name: cache-maven-resources with: key: ${{ runner.os }}-mvn-${{ hashFiles('**/pom.xml') }} restore-keys: | ${{ runner.os }}-mvn- path: | ~/.m2/repository - name: Cache other resources uses: actions/cache@v2 env: cache-name: cache-resources with: key: ${{ runner.os }}-other-${{ secrets.CACHE_ID }} restore-keys: | ${{ runner.os }}-other-${{ secrets.CACHE_ID }} path: | ~/.downloads test/landsat8 test/sentinel2 test/target/temp/gdal - name: Run run: ./.utility/run-tests.sh - name: Python Test Results uses: scacap/action-surefire-report@v1 if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository with: check_name: Python Test Results report_paths: python/src/main/python/test-report.xml github_token: ${{ secrets.GITHUB_TOKEN }} integration-tests: runs-on: ubuntu-20.04 name: ${{ matrix.profile }} Integration Tests strategy: fail-fast: false matrix: profile: [redis-it, rocksdb-it, accumulo-it-client, accumulo-it-server, hbase-it-client, hbase-it-server, dynamodb-it, bigtable-it, kudu-it, filesystem-it, 'filesystem-it,secondary-index-it', 'rocksdb-it,secondary-index-it', 'accumulo-it-server,compatibility','hbase-it-server,secondary-index-it'] include: - profile: cassandra-it retry_tests: true ## for now kerberos test environment isn't quite working, skip the kerberos tests until the issue is resolved # include a new variable of TEST_KERBEROS = true # - profile: accumulo-it-kerberos # test_kerberos: true # - profile: 'accumulo-it-kerberos,compatibility' # test_kerberos: true env: IT_ONLY: true MAVEN_PROFILES: ${{ matrix.profile }} TEST_KERBEROS: ${{ matrix.test_kerberos }} RETRY_TESTS: ${{ matrix.retry_tests }} steps: - name: Checkout repository uses: actions/checkout@v2 - name: Set up AdoptOpenJDK 1.8 uses: joschi/setup-jdk@v2 with: java-version: '8' architecture: x64 - name: Set up Maven uses: stCarolas/setup-maven@v4 with: maven-version: 3.6.3 - name: Cache maven resources uses: actions/cache@v2 env: cache-name: cache-maven-resources with: key: ${{ runner.os }}-mvn-${{ hashFiles('**/pom.xml') }} restore-keys: | ${{ runner.os }}-mvn- path: | ~/.m2/repository - name: Cache other resources uses: actions/cache@v2 env: cache-name: cache-resources with: key: ${{ runner.os }}-other-${{ secrets.CACHE_ID }} restore-keys: | ${{ runner.os }}-other-${{ secrets.CACHE_ID }} path: | ~/.downloads test/landsat8 test/sentinel2 test/target/temp/gdal - name: Run run: ./.utility/retry ./.utility/run-tests.sh - name: Publish Integration Test ${{ matrix.profile }} Results uses: scacap/action-surefire-report@v1 if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository with: check_name: ${{ matrix.profile }} Results report_paths: test/target/failsafe-reports/TEST-org.locationtech.geowave.test.GeoWaveITSuite.xml github_token: ${{ secrets.GITHUB_TOKEN }} ================================================ FILE: .gitignore ================================================ *.project *.classpath *.prefs *.settings target *.log dependency-reduced-pom.xml *.imls *.iml .idea/ .DS_Store bin docker-root generated .metadata **/.factorypath .vscode ================================================ FILE: .utility/.maven.xml ================================================ ossrh ${env.SONATYPE_USERNAME} ${env.SONATYPE_PASSWORD} osgeo-release 120000 120000 ossrh true ${env.GPG_EXECUTABLE} ${env.GPG_PASSPHRASE} ================================================ FILE: .utility/build-dev-resources.sh ================================================ #!/bin/bash set -v pushd dev-resources # Build the dev-resources jar echo -e "Building dev-resources..." mvn clean install popd ================================================ FILE: .utility/build-python-docs.sh ================================================ #!/bin/bash # Build and Run Java Gateway mvn -q package -P geowave-tools-singlejar -Dspotbugs.skip -DskipTests >/dev/null GEOWAVE_VERSION=$(mvn -q -Dexec.executable=echo -Dexec.args='${project.version}' --non-recursive exec:exec) nohup java -cp deploy/target/geowave-deploy-${GEOWAVE_VERSION}-tools.jar org.locationtech.geowave.core.cli.GeoWaveMain util python rungateway & # Install pip and venv sudo apt-get install -yq python3-pip python3-venv # Build Python docs cd python/src/main/python python3 -m venv tests-venv source ./tests-venv/bin/activate pip install --upgrade pip pip install wheel pip install -r requirements.txt pdoc --html pygw EXIT_CODE=$? cd ../../../.. mv python/src/main/python/html/pygw target/site/pydocs deactivate exit $EXIT_CODE ================================================ FILE: .utility/publish-artifacts.sh ================================================ #!/bin/bash set -ev echo -e "Building javadocs...\n" mvn javadoc:javadoc -B -DskipTests -Dspotbugs.skip echo $GPG_SECRET_KEYS | base64 --decode | gpg --import --no-tty --batch --yes echo $GPG_OWNERTRUST | base64 --decode | gpg --import-ownertrust --no-tty --batch --yes # Build the dev-resources jar if ! curl --head --silent --fail https://oss.sonatype.org/service/local/repositories/releases/content/org/locationtech/geowave/geowave-dev-resources/${DEV_RESOURCES_VERSION}/geowave-dev-resources-${DEV_RESOURCES_VERSION}.pom 2> /dev/null; then pushd dev-resources echo -e "Deploying dev-resources..." mvn deploy --settings ../.utility/.maven.xml -DskipTests -Dspotbugs.skip -B -U -Prelease popd fi echo -e "Deploying geowave artifacts..." mvn deploy --settings .utility/.maven.xml -DskipTests -Dspotbugs.skip -B -U -Prelease # Get the version from the build.properties file filePath=deploy/target/classes/build.properties GEOWAVE_VERSION=$(grep project.version $filePath| awk -F= '{print $2}') # Don't publish snapshots to PyPi if [[ ! "$GEOWAVE_VERSION" =~ "SNAPSHOT" ]] ; then if [[ -z "${PYPI_CREDENTIALS}" ]]; then echo -e "No PyPi credentials, skipping PyPi distribution..." else echo -e "Deploying pygw to PyPi..." pushd python/src/main/python python3 -m venv publish-venv source ./publish-venv/bin/activate pip install --upgrade pip wheel setuptools twine python3 setup.py bdist_wheel --python-tag=py3 sdist twine upload --skip-existing -u __token__ -p $PYPI_CREDENTIALS dist/* deactivate popd fi fi ================================================ FILE: .utility/publish-docs.sh ================================================ #!/bin/bash # Get the version from the build.properties file filePath=deploy/target/classes/build.properties GEOWAVE_VERSION=$(grep project.version $filePath| awk -F= '{print $2}') echo -e "Copying changelog...\n" cp changelog.html target/site/ echo -e "Publishing site ...\n" # Save docs to latest cp -R target/site $HOME/latest cd $HOME git config --global user.email "geowave-dev@eclipse.org" git config --global user.name "geowave-dev" git clone --quiet --depth 1 --branch=gh-pages https://x-access-token:${GITHUB_TOKEN}@github.com/locationtech/geowave gh-pages > /dev/null cd gh-pages # Back up previous versions mv previous-versions $HOME/previous-versions # Remove old latest rm -rf latest if [[ ! "$GEOWAVE_VERSION" =~ "SNAPSHOT" ]] && [[ ! "$GEOWAVE_VERSION" =~ "RC" ]] ; then # If this isn't a snapshot or release candidate, this becomes the main site echo -e "Publishing release documentation ...\n" cp -Rf $HOME/latest $HOME/site/ else echo -e "Publishing snapshot documentation ...\n" # Otherwise keep old release cp -Rf . $HOME/site/ fi # Save previous versions of the documentation cp -r $HOME/previous-versions $HOME/site/ # Save latest cp -r $HOME/latest $HOME/site/ git rm -r -f -q . cp -Rf $HOME/site/* . # Don't check in big binary blobs # TODO: Push to S3 if we want to link to them via the web site rm -f *.epub *.pdf *.pdfmarks git add -f . git commit -m "Lastest docs on successful github build $GITHUB_RUN_NUMBER auto-pushed to gh-pages" git push -fq origin gh-pages > /dev/null echo -e "Published docs to gh-pages.\n" ================================================ FILE: .utility/retry ================================================ #!/usr/bin/env bash set -euo pipefail x() { echo "+ $*" >&2 "$@" } if [[ -z "${RETRY_TESTS}" ]]; then x "$@" && exit 0 else max_retry_time_seconds=$(( 120 * 60 )) retry_delay_seconds=10 END=$(( $(date +%s) + ${max_retry_time_seconds} )) while (( $(date +%s) < $END )); do x "$@" && exit 0 sleep "${retry_delay_seconds}" done echo "$0: retrying [$*] timed out" >&2 exit 1 fi ================================================ FILE: .utility/run-python-tests.sh ================================================ #!/bin/bash # Build and Run Java Gateway mvn -q package -P geowave-tools-singlejar -Dfindbugs.skip=true -DskipTests=true -Dspotbugs.skip=true GEOWAVE_VERSION=$(mvn -q -Dexec.executable=echo -Dexec.args='${project.version}' --non-recursive exec:exec) echo -e "GeoWave version: $GEOWAVE_VERSION\n" nohup java -cp deploy/target/geowave-deploy-${GEOWAVE_VERSION}-tools.jar org.locationtech.geowave.core.cli.GeoWaveMain util python rungateway & echo -e "Gateway started...\n" # Install pip and venv sudo apt-get install -yq python3-pip python3-venv # Run Python tests cd python/src/main/python python3 -m venv tests-venv source ./tests-venv/bin/activate pip install --upgrade pip pip install wheel pip install -r requirements.txt pytest --junitxml=test-report.xml --cov-report= --cov=pygw pygw/test/ EXIT_CODE=$? deactivate exit $EXIT_CODE ================================================ FILE: .utility/run-tests.sh ================================================ #!/bin/bash set -ev chmod +x .utility/*.sh .utility/build-dev-resources.sh if [ "$PYTHON_BUILD" == "true" ]; then echo -e "Running Python tests...\n" source .utility/run-python-tests.sh else if [ "$IT_ONLY" == "true" ]; then echo -e "Skipping unit tests w/ verify...\n" wget -q https://archive.apache.org/dist/hadoop/common/hadoop-3.1.2/hadoop-3.1.2.tar.gz tar -xzf ./hadoop-3.1.2.tar.gz hadoop-3.1.2/lib/native/ export LD_LIBRARY_PATH=$(pwd)/hadoop-3.1.2/lib/native/ mvn -q -B verify -am -pl test -Dtest=SkipUnitTests -Dfindbugs.skip -Dspotbugs.skip -DfailIfNoTests=false -P $MAVEN_PROFILES else echo -e "Running unit tests only w/ verify...\n" mvn -q -B verify -Dformatter.action=validate -P $MAVEN_PROFILES fi fi ================================================ FILE: CONTRIBUTING.md ================================================ Before your contribution can be accepted by the project, you need to create an Eclipse Foundation account and electronically sign the Eclipse Contributor Agreement (ECA). - http://www.eclipse.org/legal/ECA.php For more information on contributing to GeoWave, please see our developer guide here: - http://locationtech.github.io/geowave/devguide.html#contributions ================================================ FILE: LICENSE ================================================ Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ================================================ FILE: NOTICE ================================================ Copyright 2019-2020 Maxar Technologies Inc. Copyright 2016-2020 Prominent Edge Copyright 2019-2020 BlackLynx Copyright 2017-2019 Radiant Solutions Copyright 2013-2016 RadiantBlue Technologies Copyright 2016-2017 DigitalGlobe, Inc. Copyright 2013-2017 Booz Allen Hamilton Copyright 2010 Lars Francke – RE: OsmAvro.avsc Copyright 2013-2016 Commonwealth Computer Research, Inc. RE: XZOrderSFC.java Copyright 2014 GeoSolutions RE: WarpNearestOpImage.java Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Portions of this software were developed under contract to the U.S. Government (National Geospatial-Intelligence Agency). ================================================ FILE: README.md ================================================ 

GeoWave

## About | Continuous Integration | License | Chat | |:------------------:|:-------:|:----:| | GitHub Action Test Status | [![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) | [![Join the chat at https://gitter.im/locationtech/geowave](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/locationtech/geowave?utm_source=badge&utm_medium=badge&utm_content=badge) | GeoWave is an open source set of software that: * Capabilities * Adds multi-dimensional indexing capability to key/value stores (currently [Apache Accumulo](https://accumulo.apache.org), [Apache HBase](https://hbase.apache.org), [Apache Cassandra](http://cassandra.apache.org/), [Amazon DynamoDB](https://aws.amazon.com/dynamodb/), [Cloud Bigtable](https://cloud.google.com/bigtable/), [Redis](https://redis.io/), [RocksDB](https://rocksdb.org/), and [Apache Kudu](https://kudu.apache.org/), as well as direct FileSystem support) * Adds support for geographic objects and geospatial operators to these stores * Provides Map-Reduce input and output formats for distributed processing and analysis of geospatial data * Geospatial software plugins * [GeoServer](http://geoserver.org/) plugin to allow geospatial data in various key/value stores to be shared and visualized via OGC standard services Basically, GeoWave is working to bridge geospatial software with modern key/value stores and distributed compute systems. ## The Docs * [GeoWave](https://locationtech.github.io/geowave/latest/index.html) - Latest snapshot documentation homepage * [GeoWave Overview](https://locationtech.github.io/geowave/latest/overview.html) - Overview of GeoWave's capabilities * [Installation Guide](https://locationtech.github.io/geowave/latest/installation-guide.html) - Installation instructions for standalone installers and from RPMs * [Quickstart Guide](https://locationtech.github.io/geowave/latest/quickstart.html) - A quick demo of GeoWave features using the command-line interface * [User Guide](https://locationtech.github.io/geowave/latest/userguide.html) - A guide for using GeoWave through the command-line interface and GeoServer plugin * [Developer Guide](https://locationtech.github.io/geowave/latest/devguide.html) - A guide for developing applications that utilize GeoWave * [Command-Line Interface](https://locationtech.github.io/geowave/latest/commands.html) - Full documentation for the GeoWave CLI * [Changelog](https://locationtech.github.io/geowave/latest/changelog.html) - Changes and features for each of our [GitHub releases](https://github.com/locationtech/geowave/releases) * The underlying principles employed in GeoWave are outlined in past academic publications to include largely the background theory in [Advances in Spatial and Temporal Databases 2017](https://link.springer.com/chapter/10.1007/978-3-319-64367-0_6) and a derivative, more applied paper in [FOSS4G Conference Proceedings 2017](http://scholarworks.umass.edu/cgi/viewcontent.cgi?article=1027&context=foss4g). ## The Software * We have [multi-platform standalone installers](https://locationtech.github.io/geowave/latest/installation-guide.html#standalone-installers) for the GeoWave's command-line tools to help get started * This is often the quickest and easiest way to get started using GeoWave on your own machine * We have a [RPM repository](https://locationtech.github.io/geowave/latest/downloads.html) * This contains various packages including puppet modules, best used for distributed environments. * See the [Installation Guide](https://locationtech.github.io/geowave/latest/installation-guide.html#installation-from-rpm) for more info. * Maven artifacts are available on Maven Central * And you can always [build from source](https://locationtech.github.io/geowave/latest/devguide.html#development-setup) ## Community * Community support is available on [chat](https://gitter.im/locationtech/geowave) and on [our mailing list](mailto:geowave-dev@eclipse.org). ## Getting Started ### Programmatic Access You can use Maven to reference pre-built GeoWave artifacts with the following pom.xml snippet (replacing `${keyvalue-datastore}` with your data store of choice and `${geowave.version}` with the GeoWave version you'd like to use): ``` org.locationtech.geowave geowave-datastore-${keyvalue-datastore} ${geowave.version} org.locationtech.geowave geowave-adapter-vector ${geowave.version} org.locationtech.geowave geowave-adapter-raster ${geowave.version} ``` Use the libraries available in the `api` package to leverage GeoWave's capabilities (where `` might be `AccumuloRequiredOptions` or `HBaseRequiredOptions` and simple examples of creating the data type and index can be found in `SimpleIngest` within the `examples` directory): ```java DataStore store = DataStoreFactory.createDataStore(); store.addType(, ); try(Writer writer = store.createWriter()){ //write data writer.writer(); } //this just queries everything try(CloseableIterator it = store.query(QueryBuilder.newBuilder().build())){ while(it.hasNext()){ //retrieve results matching query criteria and do something it.next(); } } ``` See the [Developer Guide](https://locationtech.github.io/geowave/latest/devguide.html#programmatic-api-examples) for more detailed programmatic API examples. ### Command-line Access Alternatively, you can always use the GeoWave command-line to access the same capabilities: ```bash # Add a new RocksDB data store called myStore in the current directory geowave store add -t rocksdb myStore # Add a spatial index called spatialIdx to myStore geowave index add -t spatial myStore spatialIdx # Ingest a shapefile with states into myStore in the spatialIdx index geowave ingest localToGW -f geotools-vector states.shp myStore spatialIdx # Query all the data in the states type from myStore geowave vector query "SELECT * FROM myStore.states" ``` See the [CLI documentation](https://locationtech.github.io/geowave/latest/commands.html) for a full list of commands and their options. ## Some GeoWave rendered eye candy

Geolife data at city scale

Geolife data at block scale

OSM GPX tracks at country scale

See [Example Screenshots](https://locationtech.github.io/geowave/latest/overview.html#example-screenshots) in the GeoWave Overview for more information. ## Supported versions of core libraries We work to maintain a N and N-1 tested and supported version pace for the following core libraries. | GeoServer | GeoTools | Accumulo | HBase | Hadoop | Java | |:---------:|:--------:|:--------:|:-----:|:------:|:----:| | 2.19.x | 25.x | [1.9.x,2.0.x] | 2.4.x | [2.10.x,3.1.x] | Java8 | * [Apache Maven](http://maven.apache.org/) 3.x or greater is required for building ## Origin GeoWave was originally developed at the National Geospatial-Intelligence Agency (NGA) in collaboration with [RadiantBlue Technologies](http://www.radiantblue.com/) (now [Maxar Technologies](https://www.maxar.com/)) and [Booz Allen Hamilton](http://www.boozallen.com/). The software use, modification, and distribution rights are stipulated within the [Apache 2.0](http://www.apache.org/licenses/LICENSE-2.0.html) license. ## Contributing All pull request contributions to this project will be released under the Apache 2.0 or compatible license. Contributions are welcome and guidelines are provided [here](https://locationtech.github.io/geowave/latest/devguide.html#how-to-contribute). Did I mention our [documentation!](https://locationtech.github.io/geowave/latest/index.html) ================================================ FILE: analytics/api/pom.xml ================================================ 4.0.0 geowave-analytic-parent org.locationtech.geowave ../ 2.0.2-SNAPSHOT geowave-analytic-api GeoWave Analytics API org.locationtech.geowave geowave-adapter-vector ${project.version} jdk.tools jdk.tools mockito-all org.mockito javax.servlet * org.geoserver gs-wms org.geoserver gs-main org.locationtech.geowave geowave-adapter-raster ${project.version} jdk.tools jdk.tools mockito-all org.mockito javax.servlet * org.geoserver gs-wms org.geoserver gs-main ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/AdapterWithObjectWritable.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.io.Writable; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.mapreduce.HadoopWritableSerializationTool; public class AdapterWithObjectWritable implements Writable { private ObjectWritable objectWritable; private Short internalAdapterId = null; private ByteArray dataId; public void setObject(final ObjectWritable data) { objectWritable = data; } public ObjectWritable getObjectWritable() { return objectWritable; } protected void setObjectWritable(final ObjectWritable objectWritable) { this.objectWritable = objectWritable; } public Short getInternalAdapterId() { return internalAdapterId; } public void setInternalAdapterId(final short internalAdapterId) { this.internalAdapterId = internalAdapterId; } public ByteArray getDataId() { return dataId; } public void setDataId(final ByteArray dataId) { this.dataId = dataId; } @Override public void readFields(final DataInput input) throws IOException { internalAdapterId = input.readShort(); final int dataIdLength = input.readUnsignedShort(); if (dataIdLength > 0) { final byte[] dataIdBinary = new byte[dataIdLength]; input.readFully(dataIdBinary); dataId = new ByteArray(dataIdBinary); } if (objectWritable == null) { objectWritable = new ObjectWritable(); } objectWritable.readFields(input); } @Override public void write(final DataOutput output) throws IOException { output.writeShort(internalAdapterId); if (dataId != null) { final byte[] dataIdBinary = dataId.getBytes(); output.writeShort((short) dataIdBinary.length); output.write(dataIdBinary); } else { output.writeShort(0); } objectWritable.write(output); } public static void fillWritableWithAdapter( final HadoopWritableSerializationTool serializationTool, final AdapterWithObjectWritable writableToFill, final short internalAdapterId, final ByteArray dataId, final Object entry) { writableToFill.setInternalAdapterId(internalAdapterId); writableToFill.setDataId(dataId); writableToFill.setObject(serializationTool.toWritable(internalAdapterId, entry)); } public static Object fromWritableWithAdapter( final HadoopWritableSerializationTool serializationTool, final AdapterWithObjectWritable writableToExtract) { final short internalAdapterId = writableToExtract.getInternalAdapterId(); final Object innerObj = writableToExtract.objectWritable.get(); return (innerObj instanceof Writable) ? serializationTool.getHadoopWritableSerializerForAdapter(internalAdapterId).fromWritable( (Writable) innerObj) : innerObj; } @Override public String toString() { return "AdapterWithObjectWritable [ internalAdapterId=" + internalAdapterId + ", dataId=" + dataId.getString() + "]"; } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/AnalyticFeature.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic; import java.util.List; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.feature.simple.SimpleFeatureTypeBuilder; import org.geotools.feature.type.BasicFeatureTypes; import org.geotools.referencing.CRS; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.jts.geom.Geometry; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.AttributeDescriptor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A set of utilities to describe and create a simple feature for use within the set of analytics. */ public class AnalyticFeature { static final Logger LOGGER = LoggerFactory.getLogger(AnalyticFeature.class); public static SimpleFeature createGeometryFeature( final SimpleFeatureType featureType, final String batchId, final String dataId, final String name, final String groupID, final double weight, final Geometry geometry, final String[] extraDimensionNames, final double[] extraDimensions, final int zoomLevel, final int iteration, final long count) { if (extraDimensionNames.length != extraDimensions.length) { LOGGER.error( "The number of extraDimension names does not equal the number of extraDimensions"); throw new IllegalArgumentException( "The number of extraDimension names does not equal the number of extraDimensions"); } final List descriptors = featureType.getAttributeDescriptors(); final Object[] defaults = new Object[descriptors.size()]; int p = 0; for (final AttributeDescriptor descriptor : descriptors) { defaults[p++] = descriptor.getDefaultValue(); } final SimpleFeature newFeature = SimpleFeatureBuilder.build(featureType, defaults, dataId); newFeature.setAttribute(ClusterFeatureAttribute.NAME.attrName(), name); newFeature.setAttribute(ClusterFeatureAttribute.GROUP_ID.attrName(), groupID); newFeature.setAttribute(ClusterFeatureAttribute.ITERATION.attrName(), iteration); newFeature.setAttribute(ClusterFeatureAttribute.WEIGHT.attrName(), weight); newFeature.setAttribute(ClusterFeatureAttribute.BATCH_ID.attrName(), batchId); newFeature.setAttribute(ClusterFeatureAttribute.COUNT.attrName(), count); newFeature.setAttribute(ClusterFeatureAttribute.GEOMETRY.attrName(), geometry); newFeature.setAttribute(ClusterFeatureAttribute.ZOOM_LEVEL.attrName(), zoomLevel); int i = 0; for (final String dimName : extraDimensionNames) { newFeature.setAttribute(dimName, new Double(extraDimensions[i++])); } return newFeature; } public static FeatureDataAdapter createFeatureAdapter( final String centroidDataTypeId, final String[] extraNumericDimensions, final String namespaceURI, final String SRID, final ClusterFeatureAttribute[] attributes, final Class geometryClass) { try { final SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder(); builder.setName(centroidDataTypeId); builder.setNamespaceURI( namespaceURI == null ? BasicFeatureTypes.DEFAULT_NAMESPACE : namespaceURI); builder.setSRS(SRID); builder.setCRS(CRS.decode(SRID, true)); for (final ClusterFeatureAttribute attrVal : attributes) { builder.add( attrVal.name, attrVal.equals(ClusterFeatureAttribute.GEOMETRY) ? geometryClass : attrVal.type); } for (final String extraDim : extraNumericDimensions) { builder.add(extraDim, Double.class); } final FeatureDataAdapter adapter = new FeatureDataAdapter(builder.buildFeatureType()); return adapter; } catch (final Exception e) { LOGGER.warn("Schema Creation Error. Hint: Check the SRID.", e); } return null; } public static FeatureDataAdapter createGeometryFeatureAdapter( final String centroidDataTypeId, final String[] extraNumericDimensions, final String namespaceURI, final String SRID) { return createFeatureAdapter( centroidDataTypeId, extraNumericDimensions, namespaceURI, SRID, ClusterFeatureAttribute.values(), Geometry.class); } public static enum ClusterFeatureAttribute { NAME("name", String.class), GROUP_ID("groupID", String.class), ITERATION("iteration", Integer.class), GEOMETRY("geometry", Geometry.class), WEIGHT("weight", Double.class), COUNT("count", Long.class), ZOOM_LEVEL("level", Integer.class), BATCH_ID("batchID", String.class); private final String name; private final Class type; ClusterFeatureAttribute(final String name, final Class type) { this.name = name; this.type = type; } public String attrName() { return name; } public Class getType() { return type; } } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/AnalyticItemWrapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic; import org.locationtech.jts.geom.Geometry; /** * Wrap an object used to by analytical processes. This class provides generic wrapper to specific * functions associated with analytic processes such as managing centroids. * * @param */ public interface AnalyticItemWrapper { public String getID(); public T getWrappedItem(); public long getAssociationCount(); public void resetAssociatonCount(); public void incrementAssociationCount(long increment); public int getIterationID(); public String getName(); public String[] getExtraDimensions(); public double[] getDimensionValues(); public Geometry getGeometry(); public double getCost(); public void setCost(double cost); public String getGroupID(); public void setGroupID(String groupID); public void setZoomLevel(int level); public int getZoomLevel(); public void setBatchID(String batchID); public String getBatchID(); } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/AnalyticItemWrapperFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic; import java.io.IOException; import org.apache.hadoop.mapreduce.JobContext; import org.locationtech.jts.geom.Coordinate; import org.slf4j.Logger; /** * Create an analytic item wrapper for the provided item. * * @param the type of the item to wrap */ public interface AnalyticItemWrapperFactory { /** * Wrap the item. */ public AnalyticItemWrapper create(T item); /** * Creates a new item based on the old item with new coordinates and dimension values */ public AnalyticItemWrapper createNextItem( final T feature, final String groupID, final Coordinate coordinate, final String[] extraNames, final double[] extraValues); public void initialize(final JobContext context, Class scope, Logger logger) throws IOException; } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/AnalyticPersistableRegistry.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic; import org.locationtech.geowave.analytic.clustering.DistortionGroupManagement.BatchIdFilter; import org.locationtech.geowave.analytic.clustering.DistortionGroupManagement.BatchIdQuery; import org.locationtech.geowave.analytic.clustering.DistortionGroupManagement.DistortionDataAdapter; import org.locationtech.geowave.analytic.store.PersistableStore; import org.locationtech.geowave.core.index.persist.InternalPersistableRegistry; import org.locationtech.geowave.core.index.persist.PersistableRegistrySpi; public class AnalyticPersistableRegistry implements PersistableRegistrySpi, InternalPersistableRegistry { @Override public PersistableIdAndConstructor[] getSupportedPersistables() { return new PersistableIdAndConstructor[] { new PersistableIdAndConstructor((short) 700, GeoObjectDimensionValues::new), new PersistableIdAndConstructor((short) 701, BatchIdFilter::new), new PersistableIdAndConstructor((short) 702, DistortionDataAdapter::new), new PersistableIdAndConstructor((short) 703, PersistableStore::new), new PersistableIdAndConstructor((short) 704, BatchIdQuery::new)}; } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/GeoObjectDimensionValues.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.Persistable; /** * Extracted numeric dimension values associated with an item or a sum of dimension values from * multiple items. */ public class GeoObjectDimensionValues implements Persistable { public double x = 0.0; public double y = 0.0; public double z = 0.0; public double[] values = new double[0]; public double distance = 0.0; public long count = 0; public GeoObjectDimensionValues(final int extraValuesCount) { values = new double[extraValuesCount]; } public GeoObjectDimensionValues() {} public GeoObjectDimensionValues( final double x, final double y, final double z, final double[] extraDimensions, final double distance) { super(); this.x = x; this.y = y; this.z = z; values = extraDimensions; this.distance = distance; count = 1; } public void add(final GeoObjectDimensionValues association) { x += association.x; y += association.y; z += association.z; for (int i = 0; i < values.length; i++) { values[i] += association.values[i]; } distance += association.distance; count += association.count; } public void set( final double x, final double y, final double z, final double[] extraDimensions, final double distance) { this.x = x; this.y = y; this.z = z; values = extraDimensions; this.distance = distance; count = 1; } public long getCount() { return count; } public void setCount(final long count) { this.count = count; } public double getX() { return x; } public void setX(final double x) { this.x = x; } public double getY() { return y; } public void setY(final double y) { this.y = y; } public double getZ() { return z; } public void setZ(final double z) { this.z = z; } public double getDistance() { return distance; } public void setDistance(final double distance) { this.distance = distance; } @Override public byte[] toBinary() { final ByteBuffer b = ByteBuffer.allocate( ((4 + values.length) * 8) + VarintUtils.unsignedIntByteLength(values.length) + VarintUtils.unsignedLongByteLength(count)); VarintUtils.writeUnsignedLong(count, b); b.putDouble(x); b.putDouble(y); b.putDouble(z); b.putDouble(distance); VarintUtils.writeUnsignedInt(values.length, b); for (final double value : values) { b.putDouble(value); } return b.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer b = ByteBuffer.wrap(bytes); count = VarintUtils.readUnsignedLong(b); x = b.getDouble(); y = b.getDouble(); z = b.getDouble(); distance = b.getDouble(); int i = VarintUtils.readUnsignedInt(b); values = new double[i]; for (; i > 0; i--) { values[i - 1] = b.getDouble(); } } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/GeometryCalculations.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic; import java.util.LinkedList; import java.util.List; import javax.measure.Unit; import javax.measure.quantity.Length; import org.geotools.geometry.jts.ReferencedEnvelope; import org.geotools.referencing.GeodeticCalculator; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.PrecisionModel; import org.opengis.geometry.DirectPosition; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.opengis.referencing.operation.TransformException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import tech.units.indriya.unit.Units; public class GeometryCalculations { private static final Logger LOGGER = LoggerFactory.getLogger(GeometryCalculations.class); final GeometryFactory factory; final CoordinateReferenceSystem crs; final double xMin, yMin, xMax, yMax; public GeometryCalculations(final CoordinateReferenceSystem crs) { factory = new GeometryFactory(new PrecisionModel(), 4326); this.crs = crs; xMin = crs.getCoordinateSystem().getAxis(0).getMinimumValue(); xMax = crs.getCoordinateSystem().getAxis(0).getMaximumValue(); yMin = crs.getCoordinateSystem().getAxis(1).getMinimumValue(); yMax = crs.getCoordinateSystem().getAxis(1).getMaximumValue(); } /** * Build geometries with the provided coordinate at the center. The width of the geometry is twice * the distance provided. More than one geometry is return when passing the date line. * * @param distances [x,y] = [longitude, latitude] * @param unit * @param coordinate * @return the geometries that were built */ public List buildSurroundingGeometries( final double[] distances, final Unit unit, final Coordinate coordinate) { final List geos = new LinkedList<>(); final GeodeticCalculator geoCalc = new GeodeticCalculator(); geoCalc.setStartingGeographicPoint(coordinate.x, coordinate.y); try { geoCalc.setDirection(0, unit.getConverterTo(Units.METRE).convert(distances[1])); final DirectPosition north = geoCalc.getDestinationPosition(); geoCalc.setDirection(90, unit.getConverterTo(Units.METRE).convert(distances[0])); final DirectPosition east = geoCalc.getDestinationPosition(); geoCalc.setStartingGeographicPoint(coordinate.x, coordinate.y); geoCalc.setDirection(-90, unit.getConverterTo(Units.METRE).convert(distances[0])); final DirectPosition west = geoCalc.getDestinationPosition(); geoCalc.setDirection(180, unit.getConverterTo(Units.METRE).convert(distances[1])); final DirectPosition south = geoCalc.getDestinationPosition(); final double x1 = west.getOrdinate(0); final double x2 = east.getOrdinate(0); final double y1 = north.getOrdinate(1); final double y2 = south.getOrdinate(1); handleBoundaries(geos, coordinate, x1, x2, y1, y2); return geos; } catch (final TransformException ex) { LOGGER.error("Unable to build geometry", ex); } return null; } private void handleBoundaries( final List geos, final Coordinate coordinate, final double x1, final double x2, final double y1, final double y2) { if (Math.signum(x1) > Math.signum(coordinate.x)) { ReferencedEnvelope bounds = new ReferencedEnvelope(x1, xMax, Math.max(y1, yMin), Math.min(y2, yMax), crs); geos.add(factory.toGeometry(bounds)); bounds = new ReferencedEnvelope(xMin, x2, Math.max(y1, yMin), Math.min(y2, yMax), crs); geos.add(factory.toGeometry(bounds)); } else if (Math.signum(x2) < Math.signum(coordinate.x)) { ReferencedEnvelope bounds = new ReferencedEnvelope(xMin, x2, Math.max(y1, yMin), Math.min(y2, yMax), crs); geos.add(factory.toGeometry(bounds)); bounds = new ReferencedEnvelope(x1, xMax, Math.max(y1, yMin), Math.min(y2, yMax), crs); geos.add(factory.toGeometry(bounds)); } else { final ReferencedEnvelope bounds = new ReferencedEnvelope(x1, x2, Math.max(y1, yMin), Math.min(y2, yMax), crs); geos.add(factory.toGeometry(bounds)); } } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/GeometryDataSetGenerator.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Random; import java.util.UUID; import org.apache.commons.lang3.tuple.Pair; import org.apache.commons.math3.geometry.euclidean.twod.Vector2D; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.feature.simple.SimpleFeatureTypeBuilder; /** Generate clusters of geometries. */ import org.geotools.referencing.CRS; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.analytic.distance.CoordinateCircleDistanceFn; import org.locationtech.geowave.analytic.distance.DistanceFn; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.Writer; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.LineString; import org.locationtech.jts.geom.Point; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.type.AttributeDescriptor; import org.opengis.feature.type.GeometryType; import org.opengis.referencing.FactoryException; import org.opengis.referencing.cs.CoordinateSystem; import org.opengis.referencing.cs.CoordinateSystemAxis; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** Generate clusters of geometries. */ public class GeometryDataSetGenerator { static final Logger LOGGER = LoggerFactory.getLogger(GeometryDataSetGenerator.class); private final Random rand = new Random(); private final GeometryFactory geoFactory = new GeometryFactory(); private final DistanceFn distanceFunction; private final SimpleFeatureBuilder builder; // coordinate system boundaries private SimpleFeature minFeature; private double[] minAxis; private double[] maxAxis; private CoordinateSystem coordSystem; private boolean includePolygons = true; public GeometryDataSetGenerator( final DistanceFn distanceFunction, final SimpleFeatureBuilder builder) { super(); this.distanceFunction = distanceFunction; this.builder = builder; init(); } public boolean isIncludePolygons() { return includePolygons; } public void setIncludePolygons(final boolean includePolygons) { this.includePolygons = includePolygons; } public SimpleFeature getCorner() { return minFeature; } public Geometry getBoundingRegion() { final int[] adder = {1, 2, -1, 2}; int num = 0; int addCnt = 0; final int dims = coordSystem.getDimension(); final int coords = (int) Math.pow(dims, 2); final Coordinate[] coordinates = new Coordinate[coords + 1]; for (int i = 0; i < coords; i++) { coordinates[i] = new Coordinate(); for (int j = 0; j < dims; j++) { final boolean isMin = ((num >> j) % 2) == 0; coordinates[i].setOrdinate(j, isMin ? minAxis[j] : maxAxis[j]); } num += adder[addCnt]; addCnt = (addCnt + 1) % 4; } coordinates[coords] = coordinates[0]; return geoFactory.createPolygon(coordinates); } /** * Calculate the range for the given bounds * * @param factor * @param minAxis * @param maxAxis * @return */ private double[] createRange( final double factor, final double[] minAxis, final double[] maxAxis) { final double[] range = new double[minAxis.length]; for (int i = 0; i < minAxis.length; i++) { range[i] = (maxAxis[i] - minAxis[i]) * factor; } return range; } /** * Pick a random grid cell and supply the boundary. The grid is determined by the parameter,which * provides a percentage of distance over the total range for each cell. * * @param minCenterDistanceFactor * @return */ private Pair gridCellBounds( final double minCenterDistanceFactor, final double[] minAxis, final double[] maxAxis) { final double[] range = createRange(1.0, minAxis, maxAxis); final double[] min = new double[range.length]; final double[] max = new double[range.length]; for (int i = 0; i < range.length; i++) { // HP Fortify "Insecure Randomness" false positive // This random number is not used for any purpose // related to security or cryptography min[i] = Math.max( minAxis[i] + (minCenterDistanceFactor * (rand.nextInt(Integer.MAX_VALUE) % (range[i] / minCenterDistanceFactor))), minAxis[i]); max[i] = Math.min(min[i] + (minCenterDistanceFactor * range[i]), maxAxis[i]); } return Pair.of(min, max); } public void writeToGeoWave(final DataStore dataStore, final List featureData) throws IOException { final Index index = SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()); final FeatureDataAdapter adapter = new FeatureDataAdapter(featureData.get(0).getFeatureType()); final SimpleFeatureBuilder featureBuilder = new SimpleFeatureBuilder(featureData.get(0).getFeatureType()); LOGGER.info( "Writing " + featureData.size() + " records to " + adapter.getFeatureType().getTypeName()); dataStore.addType(adapter, index); try (Writer writer = dataStore.createWriter(adapter.getTypeName())) { for (final SimpleFeature feature : featureData) { writer.write(feature); featureBuilder.reset(); } } } public List generatePointSet( final double minCenterDistanceFactor, final double outlierFactor, final int numberOfCenters, final int minSetSize) { return this.generatePointSet( minCenterDistanceFactor, outlierFactor, numberOfCenters, minSetSize, minAxis, maxAxis); } public List generatePointSet( final LineString line, final double distanceFactor, final int points) { final List pointSet = new ArrayList<>(); for (final Point point : CurvedDensityDataGeneratorTool.generatePoints( line, distanceFactor, points)) { pointSet.add(createFeatureWithGeometry(point)); } return pointSet; } public List generatePointSet( final double minCenterDistanceFactor, final double outlierFactor, final int numberOfCenters, final int minSetSize, final double[] minAxis, final double[] maxAxis) { final List pointSet = new ArrayList<>(); final List minForCenter = new ArrayList<>(); final List maxForCenter = new ArrayList<>(); final double[] range = createRange(minCenterDistanceFactor, minAxis, maxAxis); if (numberOfCenters >= minSetSize) { LOGGER.error("The number of centers passed much be less than the minimum set size"); throw new IllegalArgumentException( "The number of centers passed much be less than the minimum set size"); } final double minDistance = computeMinDistance(minCenterDistanceFactor, minAxis, maxAxis); /** Pick the initial centers which have minimum distance from each other. */ while (pointSet.size() < numberOfCenters) { final Pair axis = gridCellBounds(minCenterDistanceFactor, minAxis, maxAxis); final SimpleFeature nextFeature = createNewFeature(axis.getLeft(), axis.getRight()); if (isFarEnough(nextFeature, pointSet, minDistance)) { pointSet.add(nextFeature); } } /** * Calculate the boundaries around each center point to place additional points, thus creating * clusters */ for (final SimpleFeature center : pointSet) { final double[] centerMinAxis = new double[coordSystem.getDimension()]; final double[] centerMaxAxis = new double[coordSystem.getDimension()]; final Geometry geo = (Geometry) center.getDefaultGeometry(); final Coordinate centerCoord = geo.getCentroid().getCoordinate(); for (int i = 0; i < centerMinAxis.length; i++) { centerMinAxis[i] = centerCoord.getOrdinate(i) - (range[i] / 2.0); centerMaxAxis[i] = centerCoord.getOrdinate(i) + (range[i] / 2.0); } minForCenter.add(centerMinAxis); maxForCenter.add(centerMaxAxis); } /* * Pick a random center point and add a new geometry with the bounding range around that point. */ final int clusterdItemsCount = (int) Math.ceil((minSetSize) * (1.0 - outlierFactor)); while (pointSet.size() < clusterdItemsCount) { // HP Fortify "Insecure Randomness" false positive // This random number is not used for any purpose // related to security or cryptography final int centerPos = rand.nextInt(Integer.MAX_VALUE) % minForCenter.size(); pointSet.add(createNewFeature(minForCenter.get(centerPos), maxForCenter.get(centerPos))); } /** Add random points as potential outliers (no guarantees) */ while (pointSet.size() < minSetSize) { pointSet.add(createNewFeature(minAxis, maxAxis)); } return pointSet; } public List addRandomNoisePoints( final List pointSet, final int minSetSize, final double[] minAxis, final double[] maxAxis) { while (pointSet.size() < minSetSize) { pointSet.add(createNewFeature(minAxis, maxAxis)); } return pointSet; } private void init() { coordSystem = builder.getFeatureType().getCoordinateReferenceSystem().getCoordinateSystem(); minAxis = new double[coordSystem.getDimension()]; maxAxis = new double[coordSystem.getDimension()]; for (int i = 0; i < coordSystem.getDimension(); i++) { final CoordinateSystemAxis axis = coordSystem.getAxis(i); minAxis[i] = axis.getMinimumValue(); maxAxis[i] = axis.getMaximumValue(); } final int dims = coordSystem.getDimension(); final Coordinate coordinate = new Coordinate(); for (int i = 0; i < dims; i++) { coordinate.setOrdinate(i, minAxis[i]); } minFeature = createFeatureWithGeometry(geoFactory.createPoint(coordinate)); } private boolean isFarEnough( final SimpleFeature feature, final List set, final double minDistance) { for (final SimpleFeature setItem : set) { if (distanceFunction.measure(feature, setItem) < minDistance) { return false; } } return true; } /** * Find the distance maximum distance of the entire space and multiply that by the distance factor * to determine a minimum distance each initial center point occurs from each other. * * @param minCenterDistanceFactor * @return */ private double computeMinDistance( final double minCenterDistanceFactor, final double[] minAxis, final double[] maxAxis) { assert minCenterDistanceFactor < 0.75; final int dims = coordSystem.getDimension(); Coordinate coordinate = new Coordinate(); for (int i = 0; i < dims; i++) { coordinate.setOrdinate(i, minAxis[i]); } final SimpleFeature minFeature = createFeatureWithGeometry(geoFactory.createPoint(coordinate)); coordinate = new Coordinate(); for (int i = 0; i < dims; i++) { coordinate.setOrdinate(i, maxAxis[i]); } final SimpleFeature maxFeature = createFeatureWithGeometry(geoFactory.createPoint(coordinate)); return minCenterDistanceFactor * distanceFunction.measure(minFeature, maxFeature); } private SimpleFeature createNewFeature(final double[] minAxis, final double[] maxAxis) { final int dims = coordSystem.getDimension(); // HP Fortify "Insecure Randomness" false positive // This random number is not used for any purpose // related to security or cryptography final int shapeSize = includePolygons ? (rand.nextInt(Integer.MAX_VALUE) % 5) + 1 : 1; final Coordinate[] shape = new Coordinate[shapeSize > 2 ? shapeSize + 1 : shapeSize]; final double[] constrainedMaxAxis = Arrays.copyOf(maxAxis, maxAxis.length); final double[] constrainedMinAxis = Arrays.copyOf(minAxis, minAxis.length); for (int s = 0; s < shapeSize; s++) { final Coordinate coordinate = new Coordinate(); for (int i = 0; i < dims; i++) { // HP Fortify "Insecure Randomness" false positive // This random number is not used for any purpose // related to security or cryptography coordinate.setOrdinate( i, constrainedMinAxis[i] + (rand.nextDouble() * (constrainedMaxAxis[i] - constrainedMinAxis[i]))); } shape[s] = coordinate; if (s == 0) { constrain(coordinate, constrainedMaxAxis, constrainedMinAxis); } } if (shapeSize > 2) { shape[shapeSize] = shape[0]; return createFeatureWithGeometry(geoFactory.createLinearRing(shape).convexHull()); } else if (shapeSize == 2) { return createFeatureWithGeometry(geoFactory.createLineString(shape)); } else { return createFeatureWithGeometry(geoFactory.createPoint(shape[0])); } } public GeometryFactory getFactory() { return geoFactory; } /** * Change the constrain min and max to center around the coordinate to keep the polygons tight. * * @param coordinate * @param constrainedMaxAxis * @param constrainedMinAxis */ private void constrain( final Coordinate coordinate, final double[] constrainedMaxAxis, final double[] constrainedMinAxis) { for (int i = 0; i < constrainedMaxAxis.length; i++) { final double range = (constrainedMaxAxis[i] - constrainedMinAxis[i]) * 0.001; constrainedMaxAxis[i] = Math.min(coordinate.getOrdinate(i) + range, constrainedMaxAxis[i]); constrainedMinAxis[i] = Math.max(coordinate.getOrdinate(i) - range, constrainedMinAxis[i]); } } private SimpleFeature createFeatureWithGeometry(final Geometry geometry) { final Object[] values = new Object[builder.getFeatureType().getAttributeCount()]; for (int i = 0; i < values.length; i++) { final AttributeDescriptor desc = builder.getFeatureType().getDescriptor(i); if (desc.getType() instanceof GeometryType) { values[i] = geometry; } else { final Class binding = desc.getType().getBinding(); if (String.class.isAssignableFrom(binding)) { values[i] = UUID.randomUUID().toString(); } } } return builder.buildFeature(UUID.randomUUID().toString(), values); } // public static void main( // final String[] args ) // throws Exception { // final Options allOptions = new Options(); // DataStoreCommandLineOptions.applyOptions(allOptions); // final Option typeNameOption = new Option( // "typename", // true, // "a name for the feature type (required)"); // typeNameOption.setRequired(true); // allOptions.addOption(typeNameOption); // CommandLine commandLine = new BasicParser().parse( // allOptions, // args); // // final CommandLineResult dataStoreOption = // DataStoreCommandLineOptions.parseOptions( // allOptions, // commandLine); // if (dataStoreOption.isCommandLineChange()) { // commandLine = dataStoreOption.getCommandLine(); // } // else { // throw new ParseException( // "Unable to parse data store from command line"); // } // final DataStore dataStore = dataStoreOption.getResult().createStore(); // final String typeName = commandLine.getOptionValue("typename"); // final GeometryDataSetGenerator dataGenerator = new // GeometryDataSetGenerator( // new FeatureCentroidDistanceFn(), // getBuilder(typeName)); // dataGenerator.writeToGeoWave( // dataStore, // dataGenerator.generatePointSet( // 0.2, // 0.2, // 5, // 5000, // new double[] { // -100, // -45 // }, // new double[] { // -90, // -35 // })); // dataGenerator.writeToGeoWave( // dataStore, // dataGenerator.generatePointSet( // 0.2, // 0.2, // 7, // 5000, // new double[] { // 0, // 0 // }, // new double[] { // 10, // 10 // })); // dataGenerator.writeToGeoWave( // dataStore, // dataGenerator.addRandomNoisePoints( // dataGenerator.generatePointSet( // 0.2, // 0.2, // 6, // 5000, // new double[] { // 65, // 35 // }, // new double[] { // 75, // 45 // }), // 6000, // new double[] { // -90, // -90 // }, // new double[] { // 90, // 90 // })); // } private static SimpleFeatureBuilder getBuilder(final String name) throws FactoryException { final SimpleFeatureTypeBuilder typeBuilder = new SimpleFeatureTypeBuilder(); typeBuilder.setName(name); typeBuilder.setCRS(CRS.decode("EPSG:4326", true)); // <- Coordinate // reference // add attributes in order typeBuilder.add("geom", Geometry.class); typeBuilder.add("name", String.class); typeBuilder.add("count", Long.class); // build the type return new SimpleFeatureBuilder(typeBuilder.buildFeatureType()); } public static class CurvedDensityDataGeneratorTool { private static final CoordinateCircleDistanceFn DISTANCE_FN = new CoordinateCircleDistanceFn(); private CurvedDensityDataGeneratorTool() {} public static final List generatePoints( final LineString line, final double distanceFactor, final int points) { final List results = new ArrayList<>(); Coordinate lastCoor = null; double distanceTotal = 0.0; final double[] distancesBetweenCoords = new double[line.getCoordinates().length - 1]; int i = 0; for (final Coordinate coor : line.getCoordinates()) { if (lastCoor != null) { distancesBetweenCoords[i] = Math.abs(DISTANCE_FN.measure(lastCoor, coor)); distanceTotal += distancesBetweenCoords[i++]; } lastCoor = coor; } lastCoor = null; i = 0; for (final Coordinate coor : line.getCoordinates()) { if (lastCoor != null) { results.addAll( generatePoints( line.getFactory(), toVec(coor), toVec(lastCoor), distanceFactor, (int) ((points) * (distancesBetweenCoords[i++] / distanceTotal)))); } lastCoor = coor; } return results; } private static final List generatePoints( final GeometryFactory factory, final Vector2D coordinateOne, final Vector2D coordinateTwo, final double distanceFactor, final int points) { final List results = new ArrayList<>(); final Random rand = new Random(); final Vector2D originVec = coordinateTwo.subtract(coordinateOne); for (int i = 0; i < points; i++) { // HP Fortify "Insecure Randomness" false positive // This random number is not used for any purpose // related to security or cryptography final double factor = rand.nextDouble(); final Vector2D projectionPoint = originVec.scalarMultiply(factor); final double direction = rand.nextGaussian() * distanceFactor; final Vector2D orthogonal = new Vector2D(originVec.getY(), -originVec.getX()); results.add( factory.createPoint( toCoordinate( orthogonal.scalarMultiply(direction).add(projectionPoint).add(coordinateOne)))); } return results; } public static Coordinate toCoordinate(final Vector2D vec) { return new Coordinate(vec.getX(), vec.getY()); } public static Vector2D toVec(final Coordinate coor) { return new Vector2D(coor.x, coor.y); } } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/GeometryHullTool.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.TreeSet; import org.apache.commons.lang3.tuple.Pair; import org.apache.commons.math.util.MathUtils; import org.apache.commons.math3.geometry.Vector; import org.apache.commons.math3.geometry.euclidean.twod.Euclidean2D; import org.apache.commons.math3.geometry.euclidean.twod.Vector2D; import org.locationtech.geowave.analytic.clustering.NeighborData; import org.locationtech.geowave.analytic.distance.DistanceFn; import org.locationtech.geowave.core.index.FloatCompareUtils; import org.locationtech.jts.algorithm.CGAlgorithms; import org.locationtech.jts.algorithm.ConvexHull; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.Polygon; import org.locationtech.jts.operation.union.UnaryUnionOp; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** Set of algorithms to mere hulls and increase the gradient of convexity over hulls. */ public class GeometryHullTool { protected static final Logger LOGGER = LoggerFactory.getLogger(GeometryHullTool.class); DistanceFn distanceFnForCoordinate; double concaveThreshold = 1.8; public void connect(final List geometries) {} public DistanceFn getDistanceFnForCoordinate() { return distanceFnForCoordinate; } public void setDistanceFnForCoordinate(final DistanceFn distanceFnForCoordinate) { this.distanceFnForCoordinate = distanceFnForCoordinate; } protected double getConcaveThreshold() { return concaveThreshold; } /* * Set the threshold for the concave algorithm */ protected void setConcaveThreshold(final double concaveThreshold) { this.concaveThreshold = concaveThreshold; } protected static class Edge implements Comparable { Coordinate start; Coordinate end; double distance; Edge next, last; private TreeSet> points = null; public Edge(final Coordinate start, final Coordinate end, final double distance) { super(); this.start = start; this.end = end; this.distance = distance; } public TreeSet> getPoints() { if (points == null) { points = new TreeSet<>(); } return points; } @Override public int compareTo(final Edge edge) { return (distance - edge.distance) > 0 ? 1 : -1; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((end == null) ? 0 : end.hashCode()); result = (prime * result) + ((start == null) ? 0 : start.hashCode()); return result; } public void connectLast(final Edge last) { this.last = last; last.next = this; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final Edge other = (Edge) obj; if (end == null) { if (other.end != null) { return false; } } else if (!end.equals(other.end)) { return false; } if (start == null) { if (other.start != null) { return false; } } else if (!start.equals(other.start)) { return false; } return true; } @Override public String toString() { return "Edge [start=" + start + ", end=" + end + ", distance=" + distance + "]"; } } private Edge createEdgeWithSideEffects( final Coordinate start, final Coordinate end, final Set innerPoints, final TreeSet edges) { final Edge newEdge = new Edge(start, end, distanceFnForCoordinate.measure(start, end)); innerPoints.remove(newEdge.start); innerPoints.remove(newEdge.end); edges.add(newEdge); return newEdge; } /* * Generate a concave hull, if possible, given a geometry and a set of additional points. * * @param fast expedite processing allowing for some outliers. */ public Geometry createHullFromGeometry( final Geometry clusterGeometry, final Collection additionalPoints, final boolean fast) { if (additionalPoints.isEmpty()) { return clusterGeometry; } final Set batchCoords = new HashSet<>(); if (clusterGeometry != null) { for (final Coordinate coordinate : clusterGeometry.getCoordinates()) { batchCoords.add(coordinate); } } for (final Coordinate coordinate : additionalPoints) { batchCoords.add(coordinate); } final GeometryFactory factory = clusterGeometry == null ? new GeometryFactory() : clusterGeometry.getFactory(); final Coordinate[] actualCoords = batchCoords.toArray(new Coordinate[batchCoords.size()]); if (batchCoords.size() == 2) { return factory.createLineString(actualCoords); } final ConvexHull convexHull = new ConvexHull(actualCoords, factory); final Geometry convexHullGeo = convexHull.getConvexHull(); try { // does this shape benefit from concave hulling? // it cannot be a line string if ((batchCoords.size() > 5) && (convexHullGeo.getArea() > 0.0)) { final Geometry concaveHull = fast ? concaveHull(convexHullGeo, batchCoords) : concaveHullParkOhMethod(convexHullGeo, batchCoords); if (fast && !concaveHull.isSimple()) { LOGGER.warn("Produced non simple hull", concaveHull.toText()); return concaveHullParkOhMethod(convexHullGeo, batchCoords); } return concaveHull; } else { return convexHullGeo; } } catch (final Exception ex) { /* * Geometry[] points = new Geometry[actualCoords.length + 1]; for (int i = 0; i < * actualCoords.length; i++) points[i] = hull.getFactory().createPoint( actualCoords[i]); * points[points.length - 1] = hull; try { ShapefileTool.writeShape( "test_perf_xh", new File( * "./targettest_perf_xh"), points); } catch (IOException e) { e.printStackTrace(); } */ LOGGER.error("Failed to compute hull", ex); return convexHullGeo; } } /** * Gift unwrapping (e.g. dig) concept, taking a convex hull and a set of inner points, add inner * points to the hull without violating hull invariants--all points must reside on the hull or * inside the hull. Based on: Jin-Seo Park and Se-Jong Oh. "A New Concave Algorithm and * Concaveness Measure for n-dimensional Datasets" . Department of Nanobiomedical Science. Dankook * University". 2010. * *

Per the paper, N = concaveThreshold */ public Geometry concaveHullParkOhMethod( final Geometry geometry, final Collection providedInnerPoints) { final Set innerPoints = new HashSet<>(providedInnerPoints); final TreeSet edges = new TreeSet<>(); final Coordinate[] geoCoordinateList = geometry.getCoordinates(); final int s = geoCoordinateList.length - 1; final Edge firstEdge = createEdgeWithSideEffects(geoCoordinateList[0], geoCoordinateList[1], innerPoints, edges); Edge lastEdge = firstEdge; for (int i = 1; i < s; i++) { final Edge newEdge = createEdgeWithSideEffects( geoCoordinateList[i], geoCoordinateList[i + 1], innerPoints, edges); newEdge.connectLast(lastEdge); lastEdge = newEdge; } firstEdge.connectLast(lastEdge); while (!edges.isEmpty() && !innerPoints.isEmpty()) { final Edge edge = edges.pollLast(); lastEdge = edge; double score = Double.MAX_VALUE; Coordinate selectedCandidate = null; for (final Coordinate candidate : innerPoints) { final double dist = calcDistance(edge.start, edge.end, candidate); // on the hull if (MathUtils.equals(dist, 0.0, 0.000000001)) { score = 0.0; selectedCandidate = candidate; break; } if ((dist > 0) && (dist < score)) { score = dist; selectedCandidate = candidate; } } if (selectedCandidate == null) { continue; } // if one a line segment of the hull, then remove candidate if (FloatCompareUtils.checkDoublesEqual(score, 0.0)) { innerPoints.remove(selectedCandidate); edges.add(edge); continue; } // Park and Oh look only at the neighbor edges // but this fails in some cases. if (isCandidateCloserToAnotherEdge(score, edge, edges, selectedCandidate)) { continue; } innerPoints.remove(selectedCandidate); final double eh = edge.distance; final double startToCandidate = distanceFnForCoordinate.measure(edge.start, selectedCandidate); final double endToCandidate = distanceFnForCoordinate.measure(edge.end, selectedCandidate); final double min = Math.min(startToCandidate, endToCandidate); // protected against duplicates if ((eh / min) > concaveThreshold) { final Edge newEdge1 = new Edge(edge.start, selectedCandidate, startToCandidate); final Edge newEdge2 = new Edge(selectedCandidate, edge.end, endToCandidate); // need to replace this with something more intelligent. This // occurs in cases of sharp angles. An angular approach may also // work // look for an angle to flip in the reverse direction. if (!intersectAnotherEdge(newEdge1, edge) && !intersectAnotherEdge(newEdge2, edge) && !intersectAnotherEdge(newEdge1, edge.last) && !intersectAnotherEdge(newEdge2, edge.next)) { edges.add(newEdge2); edges.add(newEdge1); newEdge1.connectLast(edge.last); newEdge2.connectLast(newEdge1); edge.next.connectLast(newEdge2); lastEdge = newEdge1; } } } return geometry.getFactory().createPolygon(reassemble(lastEdge)); } /** * Gift unwrapping (e.g. dig) concept, taking a convex hull and a set of inner points, add inner * points to the hull without violating hull invariants--all points must reside on the hull or * inside the hull. Based on: Jin-Seo Park and Se-Jong Oh. "A New Concave Algorithm and * Concaveness Measure for n-dimensional Datasets" . Department of Nanobiomedical Science. Dankook * University". 2010. * *

Per the paper, N = concaveThreshold. * *

This algorithm evaluates remarkably faster than Park and Oh, but the quality of the result * is marginally less. If it is acceptable to have some small number of points fall outside of the * hull and speed is critical, use this method. The measure of error is difficult to calculate * since it is not directly calculated based on the number of inner points. Rather, the measure is * based on some number of points in proximity the optimal concave hull. */ public Geometry concaveHull( final Geometry geometry, final Collection providedInnerPoints) { final Set innerPoints = (providedInnerPoints instanceof Set) ? (Set) providedInnerPoints : new HashSet<>(providedInnerPoints); final TreeSet edges = new TreeSet<>(); final Coordinate[] geoCoordinateList = geometry.getCoordinates(); final int s = geoCoordinateList.length - 1; final Edge firstEdge = createEdgeWithSideEffects(geoCoordinateList[0], geoCoordinateList[1], innerPoints, edges); Edge lastEdge = firstEdge; for (int i = 1; i < s; i++) { final Edge newEdge = createEdgeWithSideEffects( geoCoordinateList[i], geoCoordinateList[i + 1], innerPoints, edges); newEdge.connectLast(lastEdge); lastEdge = newEdge; } firstEdge.connectLast(lastEdge); for (final Coordinate candidate : innerPoints) { double min = Double.MAX_VALUE; Edge bestEdge = null; for (final Edge edge : edges) { final double dist = calcDistance(edge.start, edge.end, candidate); if ((dist > 0) && (dist < min)) { min = dist; bestEdge = edge; } } if (bestEdge != null) { bestEdge.getPoints().add(new NeighborData<>(candidate, null, min)); } } while (!edges.isEmpty()) { final Edge edge = edges.pollLast(); lastEdge = edge; NeighborData candidate = edge.getPoints().pollFirst(); while (candidate != null) { if (!MathUtils.equals(candidate.getDistance(), 0.0, 0.000000001)) { final Coordinate selectedCandidate = candidate.getElement(); final double eh = edge.distance; final double startToCandidate = distanceFnForCoordinate.measure(edge.start, selectedCandidate); final double endToCandidate = distanceFnForCoordinate.measure(edge.end, selectedCandidate); final double min = Math.min(startToCandidate, endToCandidate); // protected against duplicates if ((eh / min) > concaveThreshold) { final Edge newEdge1 = new Edge(edge.start, selectedCandidate, startToCandidate); final Edge newEdge2 = new Edge(selectedCandidate, edge.end, endToCandidate); edges.add(newEdge2); edges.add(newEdge1); newEdge1.connectLast(edge.last); newEdge2.connectLast(newEdge1); edge.next.connectLast(newEdge2); lastEdge = newEdge1; for (final NeighborData otherPoint : edge.getPoints()) { final double[] distProfile1 = calcDistanceSegment(newEdge1.start, newEdge1.end, otherPoint.getElement()); final double[] distProfile2 = calcDistanceSegment(newEdge2.start, newEdge2.end, otherPoint.getElement()); if ((distProfile1[0] >= 0.0) && (distProfile1[0] <= 1.0)) { if ((distProfile1[0] < 0.0) || (distProfile1[0] > 1.0) || (distProfile2[1] > distProfile1[1])) { otherPoint.setDistance(distProfile1[1]); newEdge1.getPoints().add(otherPoint); } else { otherPoint.setDistance(distProfile2[1]); newEdge2.getPoints().add(otherPoint); } } else if ((distProfile2[0] >= 0.0) && (distProfile2[0] <= 1.0)) { otherPoint.setDistance(distProfile2[1]); newEdge2.getPoints().add(otherPoint); } } edge.getPoints().clear(); // forces this loop to end } } candidate = edge.getPoints().pollFirst(); } } return geometry.getFactory().createPolygon(reassemble(lastEdge)); } public static boolean intersectAnotherEdge(final Edge newEdge, final Edge edgeToReplace) { Edge nextEdge = edgeToReplace.next.next; final Edge stopEdge = edgeToReplace.last; while (nextEdge != stopEdge) { if (edgesIntersect(newEdge, nextEdge)) { return true; } nextEdge = nextEdge.next; } return false; } public static boolean edgesIntersect(final Edge e1, final Edge e2) { return CGAlgorithms.distanceLineLine(e1.start, e1.end, e2.start, e2.end) <= 0.0; } private static boolean isCandidateCloserToAnotherEdge( final double distanceToBeat, final Edge selectedEdgeToBeat, final Collection edges, final Coordinate selectedCandidate) { for (final Edge edge : edges) { if (selectedEdgeToBeat.equals(edge)) { continue; } final double dist = calcDistance(edge.start, edge.end, selectedCandidate); if ((dist >= 0.0) && (dist < distanceToBeat)) { return true; } } return false; } private static Coordinate[] reassemble(final Edge lastEdge) { final List coordinates = new ArrayList<>(); coordinates.add(lastEdge.start); Edge nextEdge = lastEdge.next; while (nextEdge != lastEdge) { coordinates.add(nextEdge.start); nextEdge = nextEdge.next; } coordinates.add(lastEdge.start); return coordinates.toArray(new Coordinate[coordinates.size()]); } protected boolean isInside(final Coordinate coor, final Coordinate[] hullCoordinates) { double maxAngle = 0; for (int i = 1; i < hullCoordinates.length; i++) { final Coordinate hullCoordinate = hullCoordinates[i]; maxAngle = Math.max(calcAngle(hullCoordinates[0], coor, hullCoordinate), maxAngle); } // return 360 == Math.abs(maxAngle); return ((Math.abs(maxAngle) >= 359.999) && (Math.abs(maxAngle) <= 360.0001)); } /** * Forms create edges between two shapes maintaining convexity. * *

Does not currently work if the shapes intersect */ public Geometry connect(final Geometry shape1, final Geometry shape2) { try { if ((shape1 instanceof Polygon) && (shape2 instanceof Polygon) && !shape1.intersects(shape2)) { return connect(shape1, shape2, getClosestPoints(shape1, shape2, distanceFnForCoordinate)); } return UnaryUnionOp.union(Arrays.asList(shape1, shape2)); } catch (final Exception ex) { LOGGER.warn("Exception caught in connect method", ex); } return createHullFromGeometry(shape1, Arrays.asList(shape2.getCoordinates()), false); } protected Geometry connect( final Geometry shape1, final Geometry shape2, final Pair closestCoordinates) { Coordinate[] leftCoords = shape1.getCoordinates(), rightCoords = shape2.getCoordinates(); int startLeft, startRight; if ((leftCoords[closestCoordinates.getLeft()].x < rightCoords[closestCoordinates.getRight()].x)) { startLeft = closestCoordinates.getLeft(); startRight = closestCoordinates.getRight(); } else { leftCoords = shape2.getCoordinates(); rightCoords = shape1.getCoordinates(); startLeft = closestCoordinates.getRight(); startRight = closestCoordinates.getLeft(); } final HashSet visitedSet = new HashSet<>(); visitedSet.add(leftCoords[startLeft]); visitedSet.add(rightCoords[startRight]); final boolean leftClockwise = clockwise(leftCoords); final boolean rightClockwise = clockwise(rightCoords); final Pair upperCoords = walk(visitedSet, leftCoords, rightCoords, startLeft, startRight, new DirectionFactory() { @Override public Direction createLeftFootDirection(final int start, final int max) { return leftClockwise ? new IncreaseDirection(start, max, true) : new DecreaseDirection(start, max, true); } @Override public Direction createRightFootDirection(final int start, final int max) { return rightClockwise ? new DecreaseDirection(start, max, false) : new IncreaseDirection(start, max, false); } }); final Pair lowerCoords = walk(visitedSet, leftCoords, rightCoords, startLeft, startRight, new DirectionFactory() { @Override public Direction createLeftFootDirection(final int start, final int max) { return leftClockwise ? new DecreaseDirection(start, max, false) : new IncreaseDirection(start, max, false); } @Override public Direction createRightFootDirection(final int start, final int max) { return rightClockwise ? new IncreaseDirection(start, max, true) : new DecreaseDirection(start, max, true); } }); final List newCoordinateSet = new ArrayList<>(); final Direction leftSet = leftClockwise ? new IncreaseDirection( upperCoords.getLeft(), lowerCoords.getLeft() + 1, leftCoords.length) : new DecreaseDirection( upperCoords.getLeft(), lowerCoords.getLeft() - 1, leftCoords.length); newCoordinateSet.add(leftCoords[upperCoords.getLeft()]); while (leftSet.hasNext()) { newCoordinateSet.add(leftCoords[leftSet.next()]); } final Direction rightSet = rightClockwise ? new IncreaseDirection( lowerCoords.getRight(), upperCoords.getRight() + 1, rightCoords.length) : new DecreaseDirection( lowerCoords.getRight(), upperCoords.getRight() - 1, rightCoords.length); newCoordinateSet.add(rightCoords[lowerCoords.getRight()]); while (rightSet.hasNext()) { newCoordinateSet.add(rightCoords[rightSet.next()]); } newCoordinateSet.add(leftCoords[upperCoords.getLeft()]); return shape1.getFactory().createPolygon( newCoordinateSet.toArray(new Coordinate[newCoordinateSet.size()])); } private Pair walk( final Set visited, final Coordinate[] shape1Coords, final Coordinate[] shape2Coords, final int start1, final int start2, final DirectionFactory factory) { final int upPos = takeBiggestStep( visited, shape2Coords[start2], shape1Coords, factory.createLeftFootDirection(start1, shape1Coords.length)); // even if the left foot was stationary, try to move the right foot final int downPos = takeBiggestStep( visited, shape1Coords[upPos], shape2Coords, factory.createRightFootDirection(start2, shape2Coords.length)); // if the right step moved, then see if another l/r step can be taken if (downPos != start2) { return walk(visited, shape1Coords, shape2Coords, upPos, downPos, factory); } return Pair.of(upPos, start2); } /** * Determine if the polygon is defined clockwise */ public static boolean clockwise(final Coordinate[] set) { double sum = 0.0; for (int i = 1; i < set.length; i++) { sum += (set[i].x - set[i - 1].x) / (set[i].y + set[i - 1].y); } return sum > 0.0; } public static double calcSmallestAngle( final Coordinate one, final Coordinate vertex, final Coordinate two) { final double angle = Math.abs(calcAngle(one, vertex, two)); return (angle > 180.0) ? angle - 180.0 : angle; } /** * Calculate the angle between two points and a given vertex */ public static double calcAngle( final Coordinate one, final Coordinate vertex, final Coordinate two) { final double p1x = one.x - vertex.x; final double p1y = one.y - vertex.y; final double p2x = two.x - vertex.x; final double p2y = two.y - vertex.y; final double angle1 = Math.toDegrees(Math.atan2(p1y, p1x)); final double angle2 = Math.toDegrees(Math.atan2(p2y, p2x)); return angle2 - angle1; } /** * Calculate the distance between two points and a given vertex * * @return array if doubles double[0] = length of the projection from start on the line containing * the segment(start to end) double[1] = distance to the segment double[2] = distance to * the line containing the segment(start to end) */ public static double[] calcDistanceSegment( final Coordinate start, final Coordinate end, final Coordinate point) { final Vector vOne = new Vector2D(start.x, start.y); final Vector vTwo = new Vector2D(end.x, end.y); final Vector vVertex = new Vector2D(point.x, point.y); final Vector E1 = vTwo.subtract(vOne); final Vector E2 = vVertex.subtract(vOne); final double distOneTwo = E2.dotProduct(E1); final double lengthVOneSq = E1.getNormSq(); final double projectionLength = distOneTwo / lengthVOneSq; final Vector projection = E1.scalarMultiply(projectionLength).add(vOne); final double o = ((projectionLength < 0.0) ? vOne.distance(vVertex) : ((projectionLength > 1.0) ? vTwo.distance(vVertex) : vVertex.distance(projection))); return new double[] {projectionLength, o, vVertex.distance(projection)}; } public static double calcDistance( final Coordinate start, final Coordinate end, final Coordinate point) { final double[] p = calcDistanceSegment(start, end, point); return ((p[0] < 0.0) || (p[0] > 1.0)) ? -1 : p[1]; } public static Pair getClosestPoints( final Geometry shape1, final Geometry shape2, final DistanceFn distanceFnForCoordinate) { int bestShape1Position = 0; int bestShape2Position = 0; double minDist = Double.MAX_VALUE; int pos1 = 0, pos2 = 0; for (final Coordinate coord1 : shape1.getCoordinates()) { pos2 = 0; for (final Coordinate coord2 : shape2.getCoordinates()) { final double dist = (distanceFnForCoordinate.measure(coord1, coord2)); if (dist < minDist) { bestShape1Position = pos1; bestShape2Position = pos2; minDist = dist; } pos2++; } pos1++; } return Pair.of(bestShape1Position, bestShape2Position); } private int takeBiggestStep( final Set visited, final Coordinate station, final Coordinate[] shapeCoords, final Direction legIncrement) { double angle = 0.0; final Coordinate startPoint = shapeCoords[legIncrement.getStart()]; int last = legIncrement.getStart(); Coordinate lastCoordinate = shapeCoords[last]; while (legIncrement.hasNext()) { final int pos = legIncrement.next(); // skip over duplicate (a ring or polygon has one duplicate) if (shapeCoords[pos].equals(lastCoordinate)) { continue; } lastCoordinate = shapeCoords[pos]; if (visited.contains(lastCoordinate)) { break; } double currentAngle = legIncrement.angleChange(calcAngle(startPoint, station, lastCoordinate)); currentAngle = currentAngle < -180 ? currentAngle + 360 : currentAngle; if ((currentAngle >= angle) && (currentAngle < 180.0)) { angle = currentAngle; last = pos; visited.add(shapeCoords[pos]); } else { return last; } } return last; } private interface DirectionFactory { Direction createLeftFootDirection(int start, int max); Direction createRightFootDirection(int start, int max); } private interface Direction extends Iterator { public int getStart(); public double angleChange(double angle); } private class IncreaseDirection implements Direction { final int max; final int start; final int stop; int current = 0; final boolean angleIsNegative; @Override public int getStart() { return start; } public IncreaseDirection(final int start, final int max, final boolean angleIsNegative) { super(); this.max = max; current = getNext(start); stop = start; this.start = start; this.angleIsNegative = angleIsNegative; } public IncreaseDirection(final int start, final int stop, final int max) { super(); this.max = max; current = getNext(start); this.stop = stop; this.start = start; angleIsNegative = true; } @Override public Integer next() { final int n = current; current = getNext(current); return n; } @Override public boolean hasNext() { return current != stop; } protected int getNext(final int n) { return (n + 1) % max; } @Override public void remove() {} @Override public double angleChange(final double angle) { return angleIsNegative ? -angle : angle; } } private class DecreaseDirection extends IncreaseDirection implements Direction { public DecreaseDirection(final int start, final int max, final boolean angleIsNegative) { super(start, max, angleIsNegative); } public DecreaseDirection(final int start, final int stop, final int max) { super(start, stop, max); } @Override protected int getNext(final int n) { return (n == 0) ? max - 1 : n - 1; } } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/IndependentJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic; import java.util.Collection; import org.locationtech.geowave.analytic.param.ParameterEnum; public interface IndependentJobRunner { public int run(PropertyManagement properties) throws Exception; public Collection> getParameters(); } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/Projection.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.JobContext; import org.locationtech.jts.geom.Geometry; /** * Project a n-dimensional item into a two-dimensional polygon for convex hull construction. * * @param */ public interface Projection { public Geometry getProjection(T anItem); public void initialize(JobContext context, Class scope) throws IOException; public void setup( PropertyManagement runTimeProperties, Class scope, Configuration configuration); } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/PropertyManagement.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic; import java.io.Serializable; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.ParseException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.locationtech.geowave.analytic.param.ParameterEnum; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.api.Query; import org.locationtech.geowave.core.store.api.QueryBuilder; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Manage properties used by the Map Reduce environment that are provided through the API (e.g. * command). Allow these arguments to be placed an 'args' list for 'main' executables (e.g. * ToolRunner). * *

The class supports some basic conversions. * *

Non-serializable objects: {@link Persistable} instances are converted to and from byte * formats. {@link QueryConstraints} is a special case, supporting WKT String. {@link Path} are * converted to a from string representation of the their URI. * *

Serializable objects: {@link NumericRange} supports min,max in string representation (e.g. * "1.0,2.0") * *

NOTE: ConfigutationWrapper implementation is scopeless. * *

EXPECTED FUTURE WORK: I am bit unsatisfied with the duality of the parameters base class. In * one case, in is treated a description for a class value and, in the other case, it is treated as * a description for the type of a property value. The former is really a descriptor of a Class of * type class. Generics do not help due to erasure. The impact of this inconsistency is the * inability to validate on 'store'. Instead, validation occurs on 'gets'. The ultimate goal is to * uniformly provide feedback to parameters from command line arguments and property files on * submission to the manager rather than on extraction from the manager. */ public class PropertyManagement implements Serializable { /** */ private static final long serialVersionUID = -4186468044516636362L; static final Logger LOGGER = LoggerFactory.getLogger(PropertyManagement.class); private final Map, Serializable> localProperties = new HashMap<>(); private final List> converters = new ArrayList<>(); private PropertyManagement nestProperties = null; public PropertyManagement() { converters.add(new QueryConverter()); converters.add(new PathConverter()); converters.add(new PersistableConverter()); converters.add(new DoubleConverter()); converters.add(new IntegerConverter()); converters.add(new ByteConverter()); } public PropertyManagement( final PropertyConverter[] converters, final ParameterEnum[] names, final Object[] values) { this.converters.add(new QueryConverter()); this.converters.add(new PathConverter()); this.converters.add(new PersistableConverter()); this.converters.add(new DoubleConverter()); this.converters.add(new IntegerConverter()); this.converters.add(new ByteConverter()); for (final PropertyConverter converter : converters) { addConverter(converter); } storeAll(names, values); } public PropertyManagement(final ParameterEnum[] names, final Object[] values) { converters.add(new QueryConverter()); converters.add(new PathConverter()); converters.add(new PersistableConverter()); converters.add(new DoubleConverter()); converters.add(new IntegerConverter()); converters.add(new ByteConverter()); storeAll(names, values); } public PropertyManagement(final PropertyManagement pm) { nestProperties = pm; converters.addAll(pm.converters); } public Serializable get(final ParameterEnum propertyName) { return getPropertyValue(propertyName); } public synchronized void store( final ParameterEnum property, final T value, final PropertyConverter converter) { Serializable convertedValue; try { convertedValue = converter.convert(value); } catch (final Exception e) { throw new IllegalArgumentException( String.format( "Cannot store %s with value %s. Expected type = %s; Error message = %s", property.self().toString(), value.toString(), property.getHelper().getBaseClass().toString(), e.getLocalizedMessage()), e); } localProperties.put(property, convertedValue); addConverter(converter); } public synchronized void store(final ParameterEnum property, final Object value) { if (value != null) { Serializable convertedValue; try { convertedValue = convertIfNecessary(property, value); } catch (final Exception e) { throw new IllegalArgumentException( String.format( "Cannot store %s with value %s:%s", property.self().toString(), value.toString(), e.getLocalizedMessage())); } localProperties.put(property, convertedValue); } } /** Does not work for non-serializable data (e.g. Path or Persistable) */ public synchronized Serializable storeIfEmpty( final ParameterEnum propertyEnum, final Serializable value) { if (!containsPropertyValue(propertyEnum) && (value != null)) { LOGGER.info("Setting parameter : {} to {}", propertyEnum.toString(), value.toString()); store(propertyEnum, value); return value; } return getPropertyValue(propertyEnum); } public synchronized void copy( final ParameterEnum propertyNameFrom, final ParameterEnum propertyNameTo) { if (containsPropertyValue(propertyNameFrom)) { localProperties.put(propertyNameTo, getPropertyValue(propertyNameFrom)); } } public synchronized void storeAll(final ParameterEnum[] names, final Object[] values) { if (values.length != names.length) { LOGGER.error( "The number of values must equal the number of names passed to the store method"); throw new IllegalArgumentException( "The number of values must equal the number of names passed to the store method"); } int i = 0; for (final Object value : values) { store(names[i++], value); } } public void setConfig( final ParameterEnum[] parameters, final Configuration config, final Class scope) { for (final ParameterEnum param : parameters) { Object value; try { value = getProperty(param); param.getHelper().setValue(config, scope, value); } catch (final Exception e) { LOGGER.error("Property " + param.self().toString() + " is not available", e); throw new IllegalArgumentException( "Property " + param.self().toString() + " is not available", e); } } } @SuppressWarnings("unchecked") public T getClassInstance( final ParameterEnum property, final Class iface, final Class defaultClass) throws InstantiationException { final Object o = getPropertyValue(property); try { final Class clazz = o == null ? defaultClass : (o instanceof Class) ? (Class) o : Class.forName(o.toString()); if (!property.getHelper().getBaseClass().isAssignableFrom(clazz)) { LOGGER.error( "Class for property " + property.self().toString() + " does not implement " + property.getHelper().getBaseClass().toString()); } return (T) clazz.newInstance(); } catch (final ClassNotFoundException e) { LOGGER.error("Class for property " + property.self().toString() + " is not found", e); throw new InstantiationException(property.self().toString()); } catch (final InstantiationException e) { LOGGER.error("Class for property " + property.self().toString() + " is not instiatable", e); throw new InstantiationException(property.self().toString()); } catch (final IllegalAccessException e) { LOGGER.error("Class for property " + property.self().toString() + " is not accessible", e); throw new InstantiationException(property.self().toString()); } } public synchronized boolean hasProperty(final ParameterEnum property) { return containsPropertyValue(property); } public String getPropertyAsString(final ParameterEnum property) { return getPropertyAsString(property, null); } /** * Returns the value as, without conversion from the properties. Throws an exception if a * conversion is required to a specific type */ public Object getProperty(final ParameterEnum property) throws Exception { final Serializable value = getPropertyValue(property); if (!Serializable.class.isAssignableFrom(property.getHelper().getBaseClass())) { for (final PropertyConverter converter : converters) { if (converter.baseClass().isAssignableFrom(property.getHelper().getBaseClass())) { return this.validate(property, converter.convert(value)); } } } return this.validate(property, value); } /** * Returns the value after conversion. Throws an exception if a conversion fails. */ public T getProperty(final ParameterEnum property, final PropertyConverter converter) throws Exception { final Serializable value = getPropertyValue(property); return converter.convert(value); } public byte[] getPropertyAsBytes(final ParameterEnum property) { final Object val = getPropertyValue(property); if (val != null) { if (val instanceof byte[]) { return (byte[]) val; } return ByteArrayUtils.byteArrayFromString(val.toString()); } return null; } public String getPropertyAsString(final ParameterEnum property, final String defaultValue) { // not using containsKey to avoid synchronization final Object value = getPropertyValue(property); return (String) validate(property, value == null ? defaultValue : value.toString()); } public Boolean getPropertyAsBoolean(final ParameterEnum property, final Boolean defaultValue) { final Object val = getPropertyValue(property); if (val != null) { return Boolean.valueOf(val.toString()); } LOGGER.warn("Using default value for parameter : " + property.self().toString()); return defaultValue; } public Integer getPropertyAsInt(final ParameterEnum property, final int defaultValue) { final Object val = getPropertyValue(property); if (val != null) { if (val instanceof Integer) { return (Integer) val; } return (Integer) validate(property, Integer.parseInt(val.toString())); } LOGGER.warn("Using default value for parameter : " + property.self().toString()); return defaultValue; } public Double getPropertyAsDouble(final ParameterEnum property, final double defaultValue) { final Object val = getPropertyValue(property); if (val != null) { if (val instanceof Double) { return (Double) val; } return Double.parseDouble(val.toString()); } LOGGER.warn("Using default value for parameter : " + property.self().toString()); return defaultValue; } public NumericRange getPropertyAsRange( final ParameterEnum property, final NumericRange defaultValue) { final Object val = getPropertyValue(property); if (val != null) { if (val instanceof NumericRange) { return (NumericRange) val; } final String p = val.toString(); final String[] parts = p.split(","); try { if (parts.length == 2) { return new NumericRange( Double.parseDouble(parts[0].trim()), Double.parseDouble(parts[1].trim())); } else { return new NumericRange(0, Double.parseDouble(p)); } } catch (final Exception ex) { LOGGER.error("Invalid range parameter " + property.self().toString(), ex); return defaultValue; } } LOGGER.warn("Using default value for parameter : " + property.self().toString()); return defaultValue; } public Class getPropertyAsClass(final ParameterEnum property) { final Object val = getPropertyValue(property); if (val != null) { if (val instanceof Class) { return validate((Class) val, property.getHelper().getBaseClass()); } try { return validate( (Class) Class.forName(val.toString()), property.getHelper().getBaseClass()); } catch (final ClassNotFoundException e) { LOGGER.error("Class not found for property " + property, e); } catch (final java.lang.IllegalArgumentException ex) { LOGGER.error("Invalid class for property" + property, ex); throw new IllegalArgumentException("Invalid class for property" + property); } } return null; } public Class getPropertyAsClass(final ParameterEnum property, final Class iface) throws ClassNotFoundException { final Object val = getPropertyValue(property); if (val != null) { if (val instanceof Class) { return validate((Class) val, property.getHelper().getBaseClass()); } try { return validate( (Class) Class.forName(val.toString()), property.getHelper().getBaseClass()); } catch (final ClassNotFoundException e) { LOGGER.error("Class not found for property " + property.self().toString()); throw e; } catch (final java.lang.IllegalArgumentException ex) { LOGGER.error("Invalid class for property" + property.self().toString(), ex); throw new IllegalArgumentException("Invalid class for property" + property); } } else { LOGGER.error("Value not found for property " + property.self().toString()); } throw new ClassNotFoundException("Value not found for property " + property.self().toString()); } public Class getPropertyAsClass( final ParameterEnum property, final Class iface, final Class defaultClass) { final Object val = getPropertyValue(property); if (val != null) { if (val instanceof Class) { return validate((Class) val, property.getHelper().getBaseClass()); } try { return validate( (Class) Class.forName(val.toString()), property.getHelper().getBaseClass()); } catch (final ClassNotFoundException e) { LOGGER.error("Class not found for property " + property, e); } catch (final java.lang.IllegalArgumentException ex) { LOGGER.error("Invalid class for property" + property, ex); throw new IllegalArgumentException("Invalid class for property" + property); } } LOGGER.warn("Using default class for parameter : " + property.self().toString()); return defaultClass; } private Class validate(final Class classToValidate, final Class iface) throws IllegalArgumentException { if (!iface.isAssignableFrom(classToValidate)) { throw new IllegalArgumentException(classToValidate + "is an invalid subclass of " + iface); } return classToValidate; } public Query getPropertyAsQuery(final ParameterEnum property) throws Exception { final Serializable val = getPropertyValue(property); if (val != null) { return (Query) validate(property, new QueryConverter().convert(val)); } return null; } public Path getPropertyAsPath(final ParameterEnum property) throws Exception { final Serializable val = getPropertyValue(property); if (val != null) { return (Path) validate(property, new PathConverter().convert(val)); } return null; } public Persistable getPropertyAsPersistable(final ParameterEnum property) throws Exception { final Serializable val = getPropertyValue(property); if (val != null) { return (Persistable) validate(property, new PersistableConverter().convert(val)); } return null; } public void setJobConfiguration(final Configuration configuration, final Class scope) { for (final ParameterEnum param : localProperties.keySet()) { param.getHelper().setValue(configuration, scope, param.getHelper().getValue(this)); } if ((nestProperties != null) && !nestProperties.localProperties.isEmpty()) { nestProperties.setJobConfiguration(configuration, scope); } } public void dump() { LOGGER.info("Properties : "); for (final Map.Entry, Serializable> prop : localProperties.entrySet()) { LOGGER.info("{} = {}", prop.getKey(), prop.getValue()); } nestProperties.dump(); } /** * Add to the set of converters used to take a String representation of a value and convert it * into another serializable form. * *

This is done if the preferred internal representation does not match that of a string. For * example, a query is maintained as bytes even though it can be provided as a query * * @param converter */ public synchronized void addConverter(final PropertyConverter converter) { converters.add(converter); } private static byte[] toBytes(final Persistable persistableObject) throws UnsupportedEncodingException { return PersistenceUtils.toBinary(persistableObject); } private static Persistable fromBytes(final byte[] data) throws InstantiationException, IllegalAccessException, ClassNotFoundException, UnsupportedEncodingException { return PersistenceUtils.fromBinary(data); } private Object validate(final ParameterEnum propertyName, final Object value) { if (value != null) { if (value instanceof Class) { if (((Class) value).isAssignableFrom(propertyName.getHelper().getBaseClass())) { throw new IllegalArgumentException( String.format( "%s does not accept class %s", propertyName.self().toString(), ((Class) value).getName())); } } else if (!propertyName.getHelper().getBaseClass().isInstance(value)) { throw new IllegalArgumentException( String.format( "%s does not accept type %s", propertyName.self().toString(), value.getClass().getName())); } } return value; } @SuppressWarnings("unchecked") private Serializable convertIfNecessary(final ParameterEnum property, final Object value) throws Exception { if (!(value instanceof Serializable)) { for (@SuppressWarnings("rawtypes") final PropertyConverter converter : converters) { if (converter.baseClass().isAssignableFrom(property.getHelper().getBaseClass())) { return converter.convert(value); } } } if (!property.getHelper().getBaseClass().isInstance(value) && (value instanceof String)) { for (@SuppressWarnings("rawtypes") final PropertyConverter converter : converters) { if (converter.baseClass().isAssignableFrom(property.getHelper().getBaseClass())) { return converter.convert(converter.convert(value.toString())); } } } return (Serializable) value; } public interface PropertyConverter extends Serializable { public Serializable convert(T ob) throws Exception; public T convert(Serializable ob) throws Exception; public Class baseClass(); } public interface PropertyGroup extends Serializable { public T convert(CommandLine commandLine) throws ParseException; public ParameterEnum getParameter(); } public static class QueryConverter implements PropertyConverter { /** */ private static final long serialVersionUID = 1L; @Override public Serializable convert(final Query ob) { try { return toBytes(ob); } catch (final UnsupportedEncodingException e) { throw new IllegalArgumentException( String.format( "Cannot convert %s to a Query: %s", ob.toString(), e.getLocalizedMessage())); } } @Override public Query convert(final Serializable ob) throws Exception { if (ob instanceof byte[]) { return (Query) PropertyManagement.fromBytes((byte[]) ob); } else if (ob instanceof Query) { return (Query) ob; } return QueryBuilder.newBuilder().build(); } @Override public Class baseClass() { return Query.class; } } public static class PathConverter implements PropertyConverter { /** */ private static final long serialVersionUID = 1L; @Override public Serializable convert(final Path ob) { return ob.toUri().toString(); } @Override public Path convert(final Serializable ob) throws Exception { return new Path(ob.toString()); } @Override public Class baseClass() { return Path.class; } } public static class ByteConverter implements PropertyConverter { private static final long serialVersionUID = 1L; @Override public Serializable convert(final byte[] ob) { return ByteArrayUtils.byteArrayToString(ob); } @Override public byte[] convert(final Serializable ob) throws Exception { return ByteArrayUtils.byteArrayFromString(ob.toString()); } @Override public Class baseClass() { return byte[].class; } } public static class IntegerConverter implements PropertyConverter { private static final long serialVersionUID = 1L; @Override public Serializable convert(final Integer ob) { return ob; } @Override public Integer convert(final Serializable ob) throws Exception { return Integer.parseInt(ob.toString()); } @Override public Class baseClass() { return Integer.class; } } public static class DoubleConverter implements PropertyConverter { /** */ private static final long serialVersionUID = 1L; @Override public Serializable convert(final Double ob) { return ob; } @Override public Double convert(final Serializable ob) throws Exception { return Double.parseDouble(ob.toString()); } @Override public Class baseClass() { return Double.class; } } public static class PersistableConverter implements PropertyConverter { /** */ private static final long serialVersionUID = 1L; @Override public Serializable convert(final Persistable ob) { try { return toBytes(ob); } catch (final UnsupportedEncodingException e) { throw new IllegalArgumentException( String.format( "Cannot convert %s to a Persistable: %s", ob.toString(), e.getLocalizedMessage())); } } @Override public Persistable convert(final Serializable ob) throws Exception { if (ob instanceof byte[]) { return fromBytes((byte[]) ob); } throw new IllegalArgumentException( String.format("Cannot convert %s to Persistable", ob.toString())); } @Override public Class baseClass() { return Persistable.class; } } private boolean containsPropertyValue(final ParameterEnum property) { return ((nestProperties != null) && nestProperties.containsPropertyValue(property)) || localProperties.containsKey(property); } private Serializable getPropertyValue(final ParameterEnum property) { final Serializable val = localProperties != null ? localProperties.get(property) : null; if (val == null) { return nestProperties != null ? nestProperties.getPropertyValue(property) : null; } return val; } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/ScopedJobConfiguration.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic; import org.apache.hadoop.conf.Configuration; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ScopedJobConfiguration { protected static final Logger LOGGER = LoggerFactory.getLogger(ScopedJobConfiguration.class); private final Configuration jobConfiguration; private final Class scope; private Logger logger = LOGGER; public ScopedJobConfiguration(final Configuration jobConfiguration, final Class scope) { super(); this.jobConfiguration = jobConfiguration; this.scope = scope; } public ScopedJobConfiguration( final Configuration jobConfiguration, final Class scope, final Logger logger) { super(); this.jobConfiguration = jobConfiguration; this.scope = scope; this.logger = logger; } public int getInt(final Enum property, final int defaultValue) { final String propName = GeoWaveConfiguratorBase.enumToConfKey(scope, property); if (jobConfiguration.getRaw(propName) == null) { logger.warn("Using default for property " + propName); } final int v = jobConfiguration.getInt(propName, defaultValue); return v; } public String getString(final Enum property, final String defaultValue) { final String propName = GeoWaveConfiguratorBase.enumToConfKey(scope, property); if (jobConfiguration.getRaw(propName) == null) { logger.warn("Using default for property " + propName); } return jobConfiguration.get(propName, defaultValue); } public T getInstance( final Enum property, final Class iface, final Class defaultValue) throws InstantiationException, IllegalAccessException { try { final String propName = GeoWaveConfiguratorBase.enumToConfKey(scope, property); if (jobConfiguration.getRaw(propName) == null) { if (defaultValue == null) { return null; } logger.warn("Using default for property " + propName); } return jobConfiguration.getClass( GeoWaveConfiguratorBase.enumToConfKey(scope, property), defaultValue, iface).newInstance(); } catch (final Exception ex) { logger.error("Cannot instantiate " + GeoWaveConfiguratorBase.enumToConfKey(scope, property)); throw ex; } } public double getDouble(final Enum property, final double defaultValue) { final String propName = GeoWaveConfiguratorBase.enumToConfKey(scope, property); if (jobConfiguration.getRaw(propName) == null) { logger.warn("Using default for property " + propName); } return jobConfiguration.getDouble(propName, defaultValue); } public byte[] getBytes(final Enum property) { final String propName = GeoWaveConfiguratorBase.enumToConfKey(scope, property); final String data = jobConfiguration.getRaw(propName); if (data == null) { logger.error(propName + " not found "); } return ByteArrayUtils.byteArrayFromString(data); } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/SerializableAdapterStore.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic; import java.io.IOException; import java.io.Serializable; import org.locationtech.geowave.core.store.adapter.TransientAdapterStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Support for adapter stores that are Serializable. Rather than for an adapter store to serialize * its state, wrap an adapter store. If the adapter store is not serializable, then log a warning * message upon serialization. */ public class SerializableAdapterStore implements TransientAdapterStore, Serializable { /** */ private static final long serialVersionUID = 1L; static final Logger LOGGER = LoggerFactory.getLogger(SerializableAdapterStore.class); transient TransientAdapterStore adapterStore; public SerializableAdapterStore() {} public SerializableAdapterStore(final TransientAdapterStore adapterStore) { super(); this.adapterStore = adapterStore; } private TransientAdapterStore getAdapterStore() { if (adapterStore == null) { throw new IllegalStateException("AdapterStore has not been initialized"); } return adapterStore; } @Override public void addAdapter(final DataTypeAdapter adapter) { getAdapterStore().addAdapter(adapter); } @Override public DataTypeAdapter getAdapter(final String typeName) { return getAdapterStore().getAdapter(typeName); } @Override public boolean adapterExists(final String typeName) { return getAdapterStore().adapterExists(typeName); } @Override public DataTypeAdapter[] getAdapters() { return getAdapterStore().getAdapters(); } @Override public void removeAll() { getAdapterStore().removeAll(); } private void writeObject(final java.io.ObjectOutputStream out) throws IOException { if (adapterStore instanceof Serializable) { out.writeBoolean(true); out.writeObject(adapterStore); } else { out.writeBoolean(false); } } private void readObject(final java.io.ObjectInputStream in) throws IOException, ClassNotFoundException { if (in.readBoolean()) { adapterStore = (TransientAdapterStore) in.readObject(); } else { LOGGER.warn("Unable to initialized AdapterStore; the store is not serializable"); } } @Override public void removeAdapter(final String typeName) { getAdapterStore().removeAdapter(typeName); } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/ShapefileTool.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic; import java.io.File; import java.io.IOException; import java.io.Serializable; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.io.FileUtils; import org.geotools.data.DefaultTransaction; import org.geotools.data.FeatureWriter; import org.geotools.data.Transaction; import org.geotools.data.shapefile.ShapefileDataStore; import org.geotools.data.shapefile.ShapefileDataStoreFactory; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.feature.simple.SimpleFeatureTypeBuilder; import org.geotools.referencing.crs.DefaultGeographicCRS; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.Point; import org.locationtech.jts.geom.Polygon; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.AttributeDescriptor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ShapefileTool { private static final Logger LOGGER = LoggerFactory.getLogger(ShapefileTool.class); private static SimpleFeatureType createFeatureType(final String typeName, final boolean isPoint) { final SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder(); builder.setName(typeName); builder.setCRS(DefaultGeographicCRS.WGS84); // <- Coordinate reference // system // add attributes in order builder.add("the_geom", isPoint ? Point.class : Polygon.class); builder.length(15).add("Name", String.class); // <- 15 chars width for name field // build the type return builder.buildFeatureType(); } @edu.umd.cs.findbugs.annotations.SuppressFBWarnings( value = "RV_RETURN_VALUE_IGNORED_BAD_PRACTICE", justification = "Directories may alreadybe there") public static void writeShape(final String typeName, final File dir, final Geometry[] shapes) throws IOException { FileUtils.deleteDirectory(dir); dir.mkdirs(); final SimpleFeatureBuilder featureBuilder = new SimpleFeatureBuilder(createFeatureType(typeName, shapes[0] instanceof Point)); final ShapefileDataStoreFactory dataStoreFactory = new ShapefileDataStoreFactory(); final Map params = new HashMap<>(); params.put("url", new File(dir.getAbsolutePath() + "/" + typeName + ".shp").toURI().toURL()); params.put("create spatial index", Boolean.TRUE); final ShapefileDataStore newDataStore = (ShapefileDataStore) dataStoreFactory.createNewDataStore(params); newDataStore.createSchema(createFeatureType(typeName, shapes[0] instanceof Point)); final Transaction transaction = new DefaultTransaction("create"); try (final FeatureWriter writer = newDataStore.getFeatureWriterAppend(typeName, transaction)) { final int i = 1; for (final Geometry shape : shapes) { featureBuilder.add(shape); featureBuilder.add(Integer.valueOf(i)); final SimpleFeature feature = featureBuilder.buildFeature(null); final SimpleFeature copy = writer.next(); for (final AttributeDescriptor attrD : feature.getFeatureType().getAttributeDescriptors()) { // the null case should only happen for geometry if (copy.getFeatureType().getDescriptor(attrD.getName()) != null) { copy.setAttribute(attrD.getName(), feature.getAttribute(attrD.getName())); } } // shape files force geometry name to be 'the_geom'. So isolate // this change copy.setDefaultGeometry(feature.getDefaultGeometry()); writer.write(); } } catch (final IOException e) { LOGGER.warn("Problem with the FeatureWritter", e); transaction.rollback(); } finally { transaction.commit(); transaction.close(); } } @edu.umd.cs.findbugs.annotations.SuppressFBWarnings( value = "RV_RETURN_VALUE_IGNORED_BAD_PRACTICE", justification = "Directories may alreadybe there") public static void writeShape(final File dir, final List shapes) throws IOException { FileUtils.deleteDirectory(dir); dir.mkdirs(); final ShapefileDataStoreFactory dataStoreFactory = new ShapefileDataStoreFactory(); final String typeName = shapes.get(0).getType().getTypeName(); final Map params = new HashMap<>(); params.put("url", new File(dir.getAbsolutePath() + "/" + typeName + ".shp").toURI().toURL()); params.put("create spatial index", Boolean.TRUE); final ShapefileDataStore newDataStore = (ShapefileDataStore) dataStoreFactory.createNewDataStore(params); newDataStore.createSchema(shapes.get(0).getFeatureType()); final Transaction transaction = new DefaultTransaction("create"); try (final FeatureWriter writer = newDataStore.getFeatureWriterAppend(typeName, transaction)) { for (final SimpleFeature shape : shapes) { final SimpleFeature copy = writer.next(); for (final AttributeDescriptor attrD : copy.getFeatureType().getAttributeDescriptors()) { // the null case should only happen for geometry if (copy.getFeatureType().getDescriptor(attrD.getName()) != null) { copy.setAttribute(attrD.getName(), shape.getAttribute(attrD.getName())); } } // shape files force geometry name to be 'the_geom'. So isolate // this change copy.setDefaultGeometry(shape.getDefaultGeometry()); writer.write(); } } catch (final IOException e) { LOGGER.warn("Problem with the FeatureWritter", e); transaction.rollback(); } finally { transaction.commit(); transaction.close(); } } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/SimpleFeatureItemWrapperFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic; import java.io.IOException; import java.util.UUID; import org.apache.hadoop.mapreduce.JobContext; import org.locationtech.geowave.analytic.AnalyticFeature.ClusterFeatureAttribute; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.opengis.feature.simple.SimpleFeature; import org.slf4j.Logger; public class SimpleFeatureItemWrapperFactory implements AnalyticItemWrapperFactory { @Override public AnalyticItemWrapper create(final SimpleFeature item) { return new SimpleFeatureAnalyticItemWrapper(item); } @Override public void initialize(final JobContext context, final Class scope, final Logger logger) throws IOException {} public static class SimpleFeatureAnalyticItemWrapper implements AnalyticItemWrapper { final SimpleFeature item; public SimpleFeatureAnalyticItemWrapper(final SimpleFeature item) { this.item = item; } @Override public String getID() { return item.getID(); } @Override public SimpleFeature getWrappedItem() { return item; } @Override public long getAssociationCount() { final Long countO = (Long) item.getAttribute(ClusterFeatureAttribute.COUNT.attrName()); return (countO != null) ? countO.longValue() : 0; } @Override public int getIterationID() { return ((Integer) item.getAttribute(ClusterFeatureAttribute.ITERATION.attrName())).intValue(); } @Override public String getGroupID() { return getAttribute(item, ClusterFeatureAttribute.GROUP_ID.attrName()); } @Override public void setGroupID(final String groupID) { item.setAttribute(ClusterFeatureAttribute.GROUP_ID.attrName(), groupID); } @Override public void resetAssociatonCount() { item.setAttribute(ClusterFeatureAttribute.COUNT.attrName(), 0); } @Override public void incrementAssociationCount(final long increment) { item.setAttribute( ClusterFeatureAttribute.COUNT.attrName(), getAssociationCount() + increment); } @Override public String toString() { return "SimpleFeatureCentroid [item=" + item.getID() + ", + group=" + getGroupID() + ", + count=" + getAssociationCount() + ", cost=" + getCost() + "]"; } @Override public double getCost() { final Double costO = (Double) item.getAttribute(ClusterFeatureAttribute.WEIGHT.attrName()); return (costO != null) ? costO.doubleValue() : 0.0; } @Override public void setCost(final double cost) { // GENERIC GEOMETRY HAS A DISTANCE, NOT A COST item.setAttribute(ClusterFeatureAttribute.WEIGHT.attrName(), cost); } @Override public String getName() { return item.getAttribute(ClusterFeatureAttribute.NAME.attrName()).toString(); } @Override public String[] getExtraDimensions() { return new String[0]; } @Override public double[] getDimensionValues() { return new double[0]; } @Override public Geometry getGeometry() { return (Geometry) item.getAttribute(ClusterFeatureAttribute.GEOMETRY.attrName()); } @Override public void setZoomLevel(final int level) { item.setAttribute(ClusterFeatureAttribute.ZOOM_LEVEL.attrName(), Integer.valueOf(level)); } @Override public int getZoomLevel() { return getIntAttribute(item, ClusterFeatureAttribute.ZOOM_LEVEL.attrName(), 1); } @Override public void setBatchID(final String batchID) { item.setAttribute(ClusterFeatureAttribute.BATCH_ID.attrName(), batchID); } @Override public String getBatchID() { return item.getAttribute(ClusterFeatureAttribute.BATCH_ID.attrName()).toString(); } } private static String getAttribute(final SimpleFeature feature, final String name) { final Object att = feature.getAttribute(name); return att == null ? null : att.toString(); } private static int getIntAttribute( final SimpleFeature feature, final String name, final int defaultValue) { final Object att = feature.getAttribute(name); return att == null ? defaultValue : (att instanceof Number ? ((Number) att).intValue() : Integer.parseInt(att.toString())); } /* * @see org.locationtech.geowave.analytics.tools.CentroidFactory#createNextCentroid * (java.lang.Object, org.locationtech.jts.geom.Coordinate, java.lang.String[], double[]) */ @Override public AnalyticItemWrapper createNextItem( final SimpleFeature feature, final String groupID, final Coordinate coordinate, final String[] extraNames, final double[] extraValues) { final Geometry geometry = (Geometry) feature.getAttribute(ClusterFeatureAttribute.GEOMETRY.attrName()); return new SimpleFeatureAnalyticItemWrapper( AnalyticFeature.createGeometryFeature( feature.getFeatureType(), feature.getAttribute(ClusterFeatureAttribute.BATCH_ID.attrName()).toString(), UUID.randomUUID().toString(), getAttribute(feature, ClusterFeatureAttribute.NAME.attrName()), groupID, ((Double) feature.getAttribute( ClusterFeatureAttribute.WEIGHT.attrName())).doubleValue(), geometry.getFactory().createPoint(coordinate), extraNames, extraValues, ((Integer) feature.getAttribute( ClusterFeatureAttribute.ZOOM_LEVEL.attrName())).intValue(), ((Integer) feature.getAttribute( ClusterFeatureAttribute.ITERATION.attrName())).intValue() + 1, ((Long) feature.getAttribute(ClusterFeatureAttribute.COUNT.attrName())).longValue())); } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/SimpleFeatureProjection.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.JobContext; import org.locationtech.jts.geom.Geometry; import org.opengis.feature.simple.SimpleFeature; /** Assumes two-dimensional simple feature without time dimensions. */ public class SimpleFeatureProjection implements Projection { @Override public Geometry getProjection(final SimpleFeature anItem) { return (Geometry) anItem.getDefaultGeometry(); } @Override public void initialize(final JobContext context, final Class scope) throws IOException {} @Override public void setup( final PropertyManagement runTimeProperties, final Class scope, final Configuration configuration) {} } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/clustering/CentroidItemWrapperFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.clustering; import java.io.IOException; import org.apache.hadoop.mapreduce.JobContext; import org.locationtech.geowave.analytic.AnalyticItemWrapper; import org.locationtech.geowave.analytic.AnalyticItemWrapperFactory; import org.locationtech.geowave.analytic.kmeans.AssociationNotification; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Determine the group ID for an item dynamically. * * @param */ public class CentroidItemWrapperFactory implements AnalyticItemWrapperFactory { static final Logger LOGGER = LoggerFactory.getLogger(CentroidItemWrapperFactory.class); private AnalyticItemWrapperFactory itemFactory; private NestedGroupCentroidAssignment nestedGroupCentroidAssignment; @Override public AnalyticItemWrapper create(final T item) { return new CentroidItemWrapper(item); } @Override public void initialize(final JobContext context, final Class scope, final Logger logger) throws IOException { try { nestedGroupCentroidAssignment = new NestedGroupCentroidAssignment<>(context, scope, logger); } catch (InstantiationException | IllegalAccessException e) { throw new IOException("Failed to instantiate", e); } itemFactory.initialize(context, scope, logger); } public AnalyticItemWrapperFactory getItemFactory() { return itemFactory; } public void setItemFactory(final AnalyticItemWrapperFactory itemFactory) { this.itemFactory = itemFactory; } public class CentroidItemWrapper implements AnalyticItemWrapper { final AnalyticItemWrapper wrappedItem; AnalyticItemWrapper centroidItem; public CentroidItemWrapper(final T item) { wrappedItem = itemFactory.create(item); try { nestedGroupCentroidAssignment.findCentroidForLevel( wrappedItem, new AssociationNotification() { @Override public void notify(final CentroidPairing pairing) { centroidItem = pairing.getCentroid(); } }); } catch (final IOException e) { LOGGER.error("Cannot resolve paired centroid for " + wrappedItem.getID(), e); centroidItem = wrappedItem; } } @Override public String getID() { return centroidItem.getID(); } @Override public T getWrappedItem() { return centroidItem.getWrappedItem(); } @Override public long getAssociationCount() { return centroidItem.getAssociationCount(); } @Override public int getIterationID() { return centroidItem.getIterationID(); } // this is not a mistake...the group id is the centroid itself @Override public String getGroupID() { return centroidItem.getID(); } @Override public void setGroupID(final String groupID) {} @Override public void resetAssociatonCount() {} @Override public void incrementAssociationCount(final long increment) {} @Override public double getCost() { return centroidItem.getCost(); } @Override public void setCost(final double cost) {} @Override public String getName() { return centroidItem.getName(); } @Override public String[] getExtraDimensions() { return new String[0]; } @Override public double[] getDimensionValues() { return new double[0]; } @Override public Geometry getGeometry() { return centroidItem.getGeometry(); } @Override public void setZoomLevel(final int level) {} @Override public int getZoomLevel() { return centroidItem.getZoomLevel(); } @Override public void setBatchID(final String batchID) {} @Override public String getBatchID() { return centroidItem.getBatchID(); } } /* * @see org.locationtech.geowave.analytics.tools.CentroidFactory#createNextCentroid * (java.lang.Object, org.locationtech.jts.geom.Coordinate, java.lang.String[], double[]) */ @Override public AnalyticItemWrapper createNextItem( final T feature, final String groupID, final Coordinate coordinate, final String[] extraNames, final double[] extraValues) { return this.itemFactory.createNextItem(feature, groupID, coordinate, extraNames, extraValues); } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/clustering/CentroidManager.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.clustering; import java.io.IOException; import java.util.List; import org.locationtech.geowave.analytic.AnalyticItemWrapper; import org.locationtech.geowave.analytic.clustering.exception.MatchingCentroidNotFoundException; import org.locationtech.jts.geom.Coordinate; /** * Manage centroids created per batch and per group of analytic processes. There can be multiple * groups per batch. A group is loosely interpreted as a set of item geometries under analysis. The * sets can be defined by shared characteristics. * * @param The type of item that is used to represent a centroid. */ public interface CentroidManager { /** * Creates a new centroid based on the old centroid with new coordinates and dimension values */ public AnalyticItemWrapper createNextCentroid( final T feature, final String groupID, final Coordinate coordinate, final String[] extraNames, final double[] extraValues); public AnalyticItemWrapper getCentroidById(final String id, final String groupID) throws IOException, MatchingCentroidNotFoundException; public void delete(final String[] dataIds) throws IOException; public List getAllCentroidGroups() throws IOException; public List> getCentroidsForGroup(final String groupID) throws IOException; public List> getCentroidsForGroup( final String batchID, final String groupID) throws IOException; public int processForAllGroups(CentroidProcessingFn fn) throws IOException; public static interface CentroidProcessingFn { public int processGroup(final String groupID, final List> centroids); } public AnalyticItemWrapper getCentroid(final String id); public void clear(); public String getDataTypeName(); public String getIndexName(); } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/clustering/CentroidManagerGeoWave.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.clustering; import java.io.IOException; import java.io.Serializable; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.collections.map.LRUMap; import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.JobContext; import org.geotools.data.DefaultTransaction; import org.geotools.data.FeatureWriter; import org.geotools.data.Transaction; import org.geotools.data.shapefile.ShapefileDataStore; import org.geotools.data.shapefile.ShapefileDataStoreFactory; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.feature.simple.SimpleFeatureTypeBuilder; import org.geotools.feature.type.BasicFeatureTypes; import org.geotools.filter.FilterFactoryImpl; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.analytic.AnalyticFeature; import org.locationtech.geowave.analytic.AnalyticFeature.ClusterFeatureAttribute; import org.locationtech.geowave.analytic.AnalyticItemWrapper; import org.locationtech.geowave.analytic.AnalyticItemWrapperFactory; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.ScopedJobConfiguration; import org.locationtech.geowave.analytic.clustering.exception.MatchingCentroidNotFoundException; import org.locationtech.geowave.analytic.param.CentroidParameters; import org.locationtech.geowave.analytic.param.GlobalParameters; import org.locationtech.geowave.analytic.param.ParameterEnum; import org.locationtech.geowave.analytic.param.StoreParameters; import org.locationtech.geowave.analytic.store.PersistableStore; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.QueryBuilder; import org.locationtech.geowave.core.store.api.Writer; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.Point; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.AttributeDescriptor; import org.opengis.feature.type.GeometryType; import org.opengis.filter.Filter; import org.opengis.filter.expression.Expression; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Manages the population of centroids by group id and batch id. * * @param The item type used to represent a centroid. */ public class CentroidManagerGeoWave implements CentroidManager { static final Logger LOGGER = LoggerFactory.getLogger(CentroidManagerGeoWave.class); private static final ParameterEnum[] MY_PARAMS = new ParameterEnum[] { StoreParameters.StoreParam.INPUT_STORE, GlobalParameters.Global.BATCH_ID, CentroidParameters.Centroid.DATA_TYPE_ID, CentroidParameters.Centroid.DATA_NAMESPACE_URI, CentroidParameters.Centroid.INDEX_NAME, CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS, CentroidParameters.Centroid.ZOOM_LEVEL}; private String batchId; private int level = 0; private AnalyticItemWrapperFactory centroidFactory; private GeotoolsFeatureDataAdapter adapter; private String centroidDataTypeId; private DataStore dataStore; private IndexStore indexStore; private Index index; public CentroidManagerGeoWave( final DataStore dataStore, final IndexStore indexStore, final PersistentAdapterStore adapterStore, final AnalyticItemWrapperFactory centroidFactory, final String centroidDataTypeId, final short centroidInternalAdapterId, final String indexName, final String batchId, final int level) { this.centroidFactory = centroidFactory; this.level = level; this.batchId = batchId; this.dataStore = dataStore; this.indexStore = indexStore; this.centroidDataTypeId = centroidDataTypeId; index = indexStore.getIndex(indexName); adapter = (GeotoolsFeatureDataAdapter) adapterStore.getAdapter( centroidInternalAdapterId).getAdapter(); } public CentroidManagerGeoWave(final PropertyManagement properties) throws IOException { final Class scope = CentroidManagerGeoWave.class; final Configuration configuration = new Configuration(); properties.setJobConfiguration(configuration, scope); init(Job.getInstance(configuration), scope, LOGGER); } public CentroidManagerGeoWave(final JobContext context, final Class scope) throws IOException { this(context, scope, LOGGER); } public CentroidManagerGeoWave(final JobContext context, final Class scope, final Logger logger) throws IOException { init(context, scope, logger); } private void init(final JobContext context, final Class scope, final Logger logger) throws IOException { final ScopedJobConfiguration scopedJob = new ScopedJobConfiguration(context.getConfiguration(), scope, logger); try { centroidFactory = (AnalyticItemWrapperFactory) CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS.getHelper().getValue( context, scope, CentroidItemWrapperFactory.class); centroidFactory.initialize(context, scope, logger); } catch (final Exception e1) { LOGGER.error( "Cannot instantiate " + GeoWaveConfiguratorBase.enumToConfKey( this.getClass(), CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS)); throw new IOException(e1); } this.level = scopedJob.getInt(CentroidParameters.Centroid.ZOOM_LEVEL, 1); centroidDataTypeId = scopedJob.getString(CentroidParameters.Centroid.DATA_TYPE_ID, "centroid"); batchId = scopedJob.getString( GlobalParameters.Global.BATCH_ID, Long.toString(Calendar.getInstance().getTime().getTime())); final String indexName = scopedJob.getString( CentroidParameters.Centroid.INDEX_NAME, SpatialDimensionalityTypeProvider.createIndexFromOptions( new SpatialOptions()).getName()); final PersistableStore store = (PersistableStore) StoreParameters.StoreParam.INPUT_STORE.getHelper().getValue( context, scope, null); dataStore = store.getDataStoreOptions().createDataStore(); indexStore = store.getDataStoreOptions().createIndexStore(); index = indexStore.getIndex(indexName); final PersistentAdapterStore adapterStore = store.getDataStoreOptions().createAdapterStore(); adapter = (GeotoolsFeatureDataAdapter) adapterStore.getAdapter( store.getDataStoreOptions().createInternalAdapterStore().getAdapterId( centroidDataTypeId)).getAdapter(); } /** * Creates a new centroid based on the old centroid with new coordinates and dimension values */ @Override public AnalyticItemWrapper createNextCentroid( final T feature, final String groupID, final Coordinate coordinate, final String[] extraNames, final double[] extraValues) { return centroidFactory.createNextItem(feature, groupID, coordinate, extraNames, extraValues); } private final int capacity = 100; private final LRUMap groupToCentroid = new LRUMap(capacity); @Override public void clear() { groupToCentroid.clear(); } @Override public void delete(final String[] dataIds) throws IOException { for (final String dataId : dataIds) { if (dataId != null) { final QueryBuilder bldr = QueryBuilder.newBuilder().addTypeName(centroidDataTypeId).indexName(index.getName()); dataStore.delete( bldr.constraints( bldr.constraintsFactory().dataIds(StringUtils.stringToBinary(dataId))).build()); } } } @Override public List getAllCentroidGroups() throws IOException { final List groups = new ArrayList<>(); final CloseableIterator it = getRawCentroids(this.batchId, null); while (it.hasNext()) { final AnalyticItemWrapper item = centroidFactory.create(it.next()); final String groupID = item.getGroupID(); int pos = groups.indexOf(groupID); if (pos < 0) { pos = groups.size(); groups.add(groupID); } // cache the first set if (pos < capacity) { getCentroidsForGroup(groupID); } } it.close(); return groups; } @Override public List> getCentroidsForGroup(final String groupID) throws IOException { return getCentroidsForGroup(this.batchId, groupID); } @Override public List> getCentroidsForGroup( final String batchID, final String groupID) throws IOException { final String lookupGroup = (groupID == null) ? "##" : groupID; final Pair gid = Pair.of(batchID, lookupGroup); @SuppressWarnings("unchecked") List> centroids = (List>) groupToCentroid.get(gid); if (centroids == null) { centroids = groupID == null ? loadCentroids(batchID, null) : loadCentroids(batchID, groupID); groupToCentroid.put(gid, centroids); } return centroids; } @Override public AnalyticItemWrapper getCentroidById(final String id, final String groupID) throws IOException, MatchingCentroidNotFoundException { for (final AnalyticItemWrapper centroid : this.getCentroidsForGroup(groupID)) { if (centroid.getID().equals(id)) { return centroid; } } throw new MatchingCentroidNotFoundException(id); } private List> loadCentroids(final String batchID, final String groupID) throws IOException { final List> centroids = new ArrayList<>(); try { CloseableIterator it = null; try { it = this.getRawCentroids(batchID, groupID); while (it.hasNext()) { centroids.add(centroidFactory.create(it.next())); } return centroids; } finally { if (it != null) { it.close(); } } } catch (final IOException e) { LOGGER.error("Cannot load centroids"); throw new IOException(e); } } @Override @SuppressWarnings("unchecked") public AnalyticItemWrapper getCentroid(final String dataId) { final QueryBuilder bldr = (QueryBuilder) QueryBuilder.newBuilder().addTypeName(centroidDataTypeId).indexName( index.getName()); try (CloseableIterator it = dataStore.query( bldr.constraints( bldr.constraintsFactory().dataIds(StringUtils.stringToBinary(dataId))).build())) { if (it.hasNext()) { return centroidFactory.create(it.next()); } } return null; } @SuppressWarnings("unchecked") protected CloseableIterator getRawCentroids(final String batchId, final String groupID) throws IOException { final FilterFactoryImpl factory = new FilterFactoryImpl(); final Expression expB1 = factory.property(ClusterFeatureAttribute.BATCH_ID.attrName()); final Expression expB2 = factory.literal(batchId); final Filter batchIdFilter = factory.equal(expB1, expB2, false); Filter finalFilter = batchIdFilter; if (groupID != null) { final Expression exp1 = factory.property(ClusterFeatureAttribute.GROUP_ID.attrName()); final Expression exp2 = factory.literal(groupID); // ignore levels for group IDS finalFilter = factory.and(factory.equal(exp1, exp2, false), batchIdFilter); } else if (level > 0) { final Expression exp1 = factory.property(ClusterFeatureAttribute.ZOOM_LEVEL.attrName()); final Expression exp2 = factory.literal(level); finalFilter = factory.and(factory.equal(exp1, exp2, false), batchIdFilter); } final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName( index.getName()); return (CloseableIterator) dataStore.query( bldr.constraints(bldr.constraintsFactory().filterConstraints(finalFilter)).build()); } @SuppressWarnings("unchecked") public void transferBatch(final String fromBatchId, final String groupID) throws IOException { int count = 0; try (final CloseableIterator it = getRawCentroids(fromBatchId, groupID)) { dataStore.addType(adapter, index); try (final Writer indexWriter = dataStore.createWriter(adapter.getTypeName())) { while (it.hasNext()) { final AnalyticItemWrapper item = centroidFactory.create(it.next()); item.setBatchID(this.batchId); count++; indexWriter.write(item.getWrappedItem()); } // indexWriter.close(); } } LOGGER.info("Transfer " + count + " centroids"); } @Override public int processForAllGroups(final CentroidProcessingFn fn) throws IOException { List centroidGroups; try { centroidGroups = getAllCentroidGroups(); } catch (final IOException e) { throw new IOException(e); } int status = 0; for (final String groupID : centroidGroups) { status = fn.processGroup(groupID, getCentroidsForGroup(groupID)); if (status != 0) { break; } } return status; } public static Collection> getParameters() { return Arrays.asList(MY_PARAMS); } public static void setParameters( final Configuration config, final Class scope, final PropertyManagement runTimeProperties) { runTimeProperties.setConfig(MY_PARAMS, config, scope); } @Override public String getIndexName() { return index.getName(); } public String getBatchId() { return this.batchId; } private ToSimpleFeatureConverter getFeatureConverter( final List> items, final Class shapeClass) { return (adapter instanceof FeatureDataAdapter) ? new SimpleFeatureConverter((FeatureDataAdapter) adapter, shapeClass) : new NonSimpleFeatureConverter( items.isEmpty() ? new String[0] : items.get(0).getExtraDimensions(), shapeClass); } private interface ToSimpleFeatureConverter { SimpleFeatureType getFeatureType(); SimpleFeature toSimpleFeature(AnalyticItemWrapper item); } private static SimpleFeatureType createFeatureType( final SimpleFeatureType featureType, final Class shapeClass) { try { final SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder(); builder.setName(featureType.getName().getLocalPart()); builder.setNamespaceURI(featureType.getName().getNamespaceURI()); builder.setCRS(featureType.getCoordinateReferenceSystem()); for (final AttributeDescriptor attr : featureType.getAttributeDescriptors()) { if (attr.getType() instanceof GeometryType) { builder.add(attr.getLocalName(), shapeClass); } else { builder.add(attr.getLocalName(), attr.getType().getBinding()); } } return builder.buildFeatureType(); } catch (final Exception e) { LOGGER.warn("Schema Creation Error. Hint: Check the SRID.", e); } return null; } private static Geometry convert( final Geometry value, final Class shapeClass) { if (shapeClass.isInstance(value)) { return value; } if (shapeClass.isAssignableFrom(Point.class)) { return value.getCentroid(); } final Geometry hull = value.convexHull(); if (shapeClass.isInstance(hull)) { return hull; } return null; } private class SimpleFeatureConverter implements ToSimpleFeatureConverter { final SimpleFeatureType type; final Object[] defaults; final Class shapeClass; public SimpleFeatureConverter( final FeatureDataAdapter adapter, final Class shapeClass) { type = createFeatureType(adapter.getFeatureType(), shapeClass); int p = 0; this.shapeClass = shapeClass; final List descriptors = adapter.getFeatureType().getAttributeDescriptors(); defaults = new Object[descriptors.size()]; for (final AttributeDescriptor descriptor : descriptors) { defaults[p++] = descriptor.getDefaultValue(); } } @Override public SimpleFeatureType getFeatureType() { return type; } @Override public SimpleFeature toSimpleFeature(final AnalyticItemWrapper item) { final SimpleFeature newFeature = SimpleFeatureBuilder.build(type, defaults, item.getID()); int i = 0; for (final Object value : ((SimpleFeature) item.getWrappedItem()).getAttributes()) { if (value instanceof Geometry) { final Geometry newValue = convert((Geometry) value, shapeClass); if (newValue == null) { return null; } newFeature.setAttribute(i++, newValue); } else { newFeature.setAttribute(i++, value); } } return newFeature; } } private class NonSimpleFeatureConverter implements ToSimpleFeatureConverter { final SimpleFeatureType featureType; final Object[] defaults; final Class shapeClass; public NonSimpleFeatureConverter( final String[] extraDimensionNames, final Class shapeClass) { featureType = AnalyticFeature.createFeatureAdapter( centroidDataTypeId, extraDimensionNames, BasicFeatureTypes.DEFAULT_NAMESPACE, ClusteringUtils.CLUSTERING_CRS, ClusterFeatureAttribute.values(), shapeClass).getFeatureType(); this.shapeClass = shapeClass; final List descriptors = featureType.getAttributeDescriptors(); defaults = new Object[descriptors.size()]; int p = 0; for (final AttributeDescriptor descriptor : descriptors) { defaults[p++] = descriptor.getDefaultValue(); } } @Override public SimpleFeatureType getFeatureType() { return featureType; } @Override public SimpleFeature toSimpleFeature(final AnalyticItemWrapper item) { final Geometry value = item.getGeometry(); final Geometry newValue = convert(value, shapeClass); if (newValue == null) { return null; } return AnalyticFeature.createGeometryFeature( featureType, item.getBatchID(), item.getID(), item.getName(), item.getGroupID(), item.getCost(), newValue, item.getExtraDimensions(), item.getDimensionValues(), item.getZoomLevel(), item.getIterationID(), item.getAssociationCount()); } } public void toShapeFile(final String parentDir, final Class shapeClass) throws IOException { // File shp = new File(parentDir + "/" + this.batchId + ".shp"); // File shx = new File(parentDir + "/" + this.batchId + ".shx"); final ShapefileDataStoreFactory dataStoreFactory = new ShapefileDataStoreFactory(); final Map params = new HashMap<>(); try { params.put("url", new URL("file://" + parentDir + "/" + this.batchId + ".shp")); } catch (final MalformedURLException e) { LOGGER.error("Error creating URL", e); } params.put("create spatial index", Boolean.TRUE); final List> centroids = loadCentroids(batchId, null); final ToSimpleFeatureConverter converter = getFeatureConverter(centroids, shapeClass); final ShapefileDataStore newDataStore = (ShapefileDataStore) dataStoreFactory.createNewDataStore(params); newDataStore.createSchema(converter.getFeatureType()); final Transaction transaction = new DefaultTransaction("create"); final String typeName = newDataStore.getTypeNames()[0]; try (final FeatureWriter writer = newDataStore.getFeatureWriterAppend(typeName, transaction)) { for (final AnalyticItemWrapper item : centroids) { final SimpleFeature copy = writer.next(); final SimpleFeature newFeature = converter.toSimpleFeature(item); for (final AttributeDescriptor attrD : newFeature.getFeatureType().getAttributeDescriptors()) { // the null case should only happen for geometry if (copy.getFeatureType().getDescriptor(attrD.getName()) != null) { copy.setAttribute(attrD.getName(), newFeature.getAttribute(attrD.getName())); } } // shape files force geometry name to be 'the_geom'. So isolate // this change copy.setDefaultGeometry(newFeature.getDefaultGeometry()); writer.write(); } } catch (final IOException e) { LOGGER.warn("Problem with the FeatureWritter", e); transaction.rollback(); } finally { transaction.commit(); transaction.close(); } } @Override public String getDataTypeName() { return this.centroidDataTypeId; } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/clustering/CentroidPairing.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.clustering; import org.locationtech.geowave.analytic.AnalyticItemWrapper; public class CentroidPairing { private AnalyticItemWrapper centroid; private AnalyticItemWrapper pairedItem; private double distance; public CentroidPairing() {} public CentroidPairing( final AnalyticItemWrapper centroid, final AnalyticItemWrapper pairedItem, final double distance) { super(); this.centroid = centroid; this.pairedItem = pairedItem; this.distance = distance; } public AnalyticItemWrapper getCentroid() { return centroid; } public void setCentroid(final AnalyticItemWrapper centroid) { this.centroid = centroid; } public AnalyticItemWrapper getPairedItem() { return pairedItem; } public void setPairedItem(final AnalyticItemWrapper pairedItem) { this.pairedItem = pairedItem; } public double getDistance() { return distance; } public void setDistance(final double distance) { this.distance = distance; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((centroid == null) ? 0 : centroid.hashCode()); long temp; temp = Double.doubleToLongBits(distance); result = (prime * result) + (int) (temp ^ (temp >>> 32)); result = (prime * result) + ((pairedItem == null) ? 0 : pairedItem.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final CentroidPairing other = (CentroidPairing) obj; if (centroid == null) { if (other.centroid != null) { return false; } } else if (!centroid.equals(other.centroid)) { return false; } if (Double.doubleToLongBits(distance) != Double.doubleToLongBits(other.distance)) { return false; } if (pairedItem == null) { if (other.pairedItem != null) { return false; } } else if (!pairedItem.equals(other.pairedItem)) { return false; } return true; } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/clustering/ClusteringUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.clustering; import java.io.IOException; import java.util.LinkedList; import java.util.List; import org.geotools.feature.type.BasicFeatureTypes; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.analytic.AnalyticFeature; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.extract.DimensionExtractor; import org.locationtech.geowave.analytic.param.CentroidParameters; import org.locationtech.geowave.analytic.param.CommonParameters; import org.locationtech.geowave.analytic.param.StoreParameters; import org.locationtech.geowave.analytic.store.PersistableStore; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialQuery; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.QueryRanges; import org.locationtech.geowave.core.store.adapter.AdapterStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.util.DataStoreUtils; import org.locationtech.jts.geom.Polygon; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ClusteringUtils { public static final String CLUSTERING_CRS = "EPSG:4326"; static final Logger LOGGER = LoggerFactory.getLogger(ClusteringUtils.class); private static DataTypeAdapter createAdapter( final String sampleDataTypeId, final String sampleDataNamespaceURI, final AdapterStore adapterStore, final String[] dimensionNames) { final FeatureDataAdapter adapter = AnalyticFeature.createGeometryFeatureAdapter( sampleDataTypeId, dimensionNames, sampleDataNamespaceURI, CLUSTERING_CRS); final ByteArray dbId = new ByteArray(sampleDataTypeId); if (!adapterStore.adapterExists(dbId)) { adapterStore.addAdapter(adapter); return adapter; } else { return adapterStore.getAdapter(dbId); } } public static DataTypeAdapter[] getAdapters(final PropertyManagement propertyManagement) throws IOException { final PersistableStore store = (PersistableStore) StoreParameters.StoreParam.INPUT_STORE.getHelper().getValue( propertyManagement); final AdapterStore adapterStore = store.getDataStoreOptions().createAdapterStore(); return adapterStore.getAdapters(); } public static Index[] getIndices(final PropertyManagement propertyManagement) { final PersistableStore store = (PersistableStore) StoreParameters.StoreParam.INPUT_STORE.getHelper().getValue( propertyManagement); final IndexStore indexStore = store.getDataStoreOptions().createIndexStore(); try (final org.locationtech.geowave.core.store.CloseableIterator it = indexStore.getIndices()) { final List indices = new LinkedList<>(); while (it.hasNext()) { indices.add(it.next()); } final Index[] result = new Index[indices.size()]; indices.toArray(result); return result; } } /* * Method takes in a polygon and generates the corresponding ranges in a GeoWave spatial index */ protected static QueryRanges getGeoWaveRangesForQuery(final Polygon polygon) { final Index index = SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()); final QueryRanges ranges = DataStoreUtils.constraintsToQueryRanges( new ExplicitSpatialQuery(polygon).getIndexConstraints(index), index, null, -1); return ranges; } public static Index createIndex(final PropertyManagement propertyManagement) { final PersistableStore store = (PersistableStore) StoreParameters.StoreParam.INPUT_STORE.getHelper().getValue( propertyManagement); final IndexStore indexStore = store.getDataStoreOptions().createIndexStore(); return indexStore.getIndex( propertyManagement.getPropertyAsString(CentroidParameters.Centroid.INDEX_NAME)); } public static DataTypeAdapter createAdapter(final PropertyManagement propertyManagement) throws ClassNotFoundException, InstantiationException, IllegalAccessException { final Class dimensionExtractorClass = propertyManagement.getPropertyAsClass( CommonParameters.Common.DIMENSION_EXTRACT_CLASS, DimensionExtractor.class); return ClusteringUtils.createAdapter( propertyManagement.getPropertyAsString(CentroidParameters.Centroid.DATA_TYPE_ID), propertyManagement.getPropertyAsString( CentroidParameters.Centroid.DATA_NAMESPACE_URI, BasicFeatureTypes.DEFAULT_NAMESPACE), ((PersistableStore) StoreParameters.StoreParam.INPUT_STORE.getHelper().getValue( propertyManagement)).getDataStoreOptions().createAdapterStore(), dimensionExtractorClass.newInstance().getDimensionNames()); } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/clustering/DistortionGroupManagement.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.clustering; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.io.Writable; import org.locationtech.geowave.analytic.AnalyticItemWrapperFactory; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.adapter.FieldDescriptorBuilder; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.RowBuilder; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.QueryBuilder; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldUtils; import org.locationtech.geowave.core.store.index.CommonIndexModel; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.index.NullIndex; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.geowave.core.store.query.filter.QueryFilter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Find the max change in distortion between some k and k-1, picking the value k associated with * that change. * *

In a multi-group setting, each group may have a different optimal k. Thus, the optimal batch * may be different for each group. Each batch is associated with a different value k. * *

Choose the appropriate batch for each group. Then change the batch identifier for group * centroids to a final provided single batch identifier ( parent batch ). */ public class DistortionGroupManagement { static final Logger LOGGER = LoggerFactory.getLogger(DistortionGroupManagement.class); public static final Index DISTORTIONS_INDEX = new NullIndex("DISTORTIONS"); public static final String[] DISTORTIONS_INDEX_ARRAY = new String[] {DISTORTIONS_INDEX.getName()}; final DataStore dataStore; final IndexStore indexStore; final PersistentAdapterStore adapterStore; final InternalAdapterStore internalAdapterStore; public DistortionGroupManagement(final DataStorePluginOptions dataStoreOptions) { dataStore = dataStoreOptions.createDataStore(); indexStore = dataStoreOptions.createIndexStore(); adapterStore = dataStoreOptions.createAdapterStore(); internalAdapterStore = dataStoreOptions.createInternalAdapterStore(); final DistortionDataAdapter adapter = new DistortionDataAdapter(); dataStore.addType(adapter, DISTORTIONS_INDEX); } public static class BatchIdFilter implements QueryFilter { String batchId; public BatchIdFilter() {} public BatchIdFilter(final String batchId) { super(); this.batchId = batchId; } @Override public boolean accept( final CommonIndexModel indexModel, final IndexedPersistenceEncoding persistenceEncoding) { return new DistortionEntry(persistenceEncoding.getDataId(), 0.0).batchId.equals(batchId); } @Override public byte[] toBinary() { return StringUtils.stringToBinary(batchId); } @Override public void fromBinary(final byte[] bytes) { batchId = StringUtils.stringFromBinary(bytes); } } public static class BatchIdQuery implements QueryConstraints { String batchId; public BatchIdQuery() {} public BatchIdQuery(final String batchId) { super(); this.batchId = batchId; } @Override public List createFilters(final Index index) { return Collections.singletonList(new BatchIdFilter(batchId)); } @Override public List getIndexConstraints(final Index index) { return Collections.emptyList(); } @Override public byte[] toBinary() { return StringUtils.stringToBinary(batchId); } @Override public void fromBinary(final byte[] bytes) { batchId = StringUtils.stringFromBinary(bytes); } } public int retainBestGroups( final AnalyticItemWrapperFactory itemWrapperFactory, final String dataTypeId, final String indexId, final String batchId, final int level) { try { final Map groupDistortions = new HashMap<>(); // row id is group id // colQual is cluster count try (CloseableIterator it = (CloseableIterator) dataStore.query( QueryBuilder.newBuilder().addTypeName( DistortionDataAdapter.ADAPTER_TYPE_NAME).indexName( DISTORTIONS_INDEX.getName()).constraints( new BatchIdQuery(batchId)).build())) { while (it.hasNext()) { final DistortionEntry entry = it.next(); final String groupID = entry.getGroupId(); final Integer clusterCount = entry.getClusterCount(); final Double distortion = entry.getDistortionValue(); DistortionGroup grp = groupDistortions.get(groupID); if (grp == null) { grp = new DistortionGroup(groupID); groupDistortions.put(groupID, grp); } grp.addPair(clusterCount, distortion); } } final CentroidManagerGeoWave centroidManager = new CentroidManagerGeoWave<>( dataStore, indexStore, adapterStore, itemWrapperFactory, dataTypeId, internalAdapterStore.getAdapterId(dataTypeId), indexId, batchId, level); for (final DistortionGroup grp : groupDistortions.values()) { final int optimalK = grp.bestCount(); final String kbatchId = batchId + "_" + optimalK; centroidManager.transferBatch(kbatchId, grp.getGroupID()); } } catch (final RuntimeException ex) { throw ex; } catch (final Exception ex) { LOGGER.error("Cannot determine groups for batch", ex); return 1; } return 0; } public static class DistortionEntry implements Writable { private String groupId; private String batchId; private Integer clusterCount; private Double distortionValue; public DistortionEntry() {} public DistortionEntry( final String groupId, final String batchId, final Integer clusterCount, final Double distortionValue) { this.groupId = groupId; this.batchId = batchId; this.clusterCount = clusterCount; this.distortionValue = distortionValue; } private DistortionEntry(final byte[] dataId, final Double distortionValue) { final String dataIdStr = StringUtils.stringFromBinary(dataId); final String[] split = dataIdStr.split("/"); batchId = split[0]; groupId = split[1]; clusterCount = Integer.parseInt(split[2]); this.distortionValue = distortionValue; } public String getGroupId() { return groupId; } public Integer getClusterCount() { return clusterCount; } public Double getDistortionValue() { return distortionValue; } private byte[] getDataId() { return StringUtils.stringToBinary(batchId + "/" + groupId + "/" + clusterCount); } @Override public void write(final DataOutput out) throws IOException { out.writeUTF(groupId); out.writeUTF(batchId); out.writeInt(clusterCount); out.writeDouble(distortionValue); } @Override public void readFields(final DataInput in) throws IOException { groupId = in.readUTF(); batchId = in.readUTF(); clusterCount = in.readInt(); distortionValue = in.readDouble(); } } private static class DistortionGroup { final String groupID; final List> clusterCountToDistortion = new ArrayList<>(); public DistortionGroup(final String groupID) { this.groupID = groupID; } public void addPair(final Integer count, final Double distortion) { clusterCountToDistortion.add(Pair.of(count, distortion)); } public String getGroupID() { return groupID; } public int bestCount() { Collections.sort(clusterCountToDistortion, new Comparator>() { @Override public int compare(final Pair arg0, final Pair arg1) { return arg0.getKey().compareTo(arg1.getKey()); } }); double maxJump = -1.0; Integer jumpIdx = -1; Double oldD = 0.0; // base case !? for (final Pair pair : clusterCountToDistortion) { final Double jump = pair.getValue() - oldD; if (jump > maxJump) { maxJump = jump; jumpIdx = pair.getKey(); } oldD = pair.getValue(); } return jumpIdx; } } public static class DistortionDataAdapter implements DataTypeAdapter { public static final String ADAPTER_TYPE_NAME = "distortion"; private static final String DISTORTION_FIELD_NAME = "distortion"; private static final FieldDescriptor DESC = new FieldDescriptorBuilder<>(Double.class).fieldName(DISTORTION_FIELD_NAME).build(); private static final FieldDescriptor[] DESC_ARRAY = new FieldDescriptor[] {DESC}; public DistortionDataAdapter() { super(); } @Override public String getTypeName() { return ADAPTER_TYPE_NAME; } @Override public byte[] getDataId(final DistortionEntry entry) { return entry.getDataId(); } @Override public FieldReader getReader(final String fieldId) { if (DISTORTION_FIELD_NAME.equals(fieldId)) { return (FieldReader) FieldUtils.getDefaultReaderForClass(Double.class); } return null; } @Override public byte[] toBinary() { return new byte[] {}; } @Override public void fromBinary(final byte[] bytes) {} @Override public Object getFieldValue(final DistortionEntry entry, final String fieldName) { return entry.getDistortionValue(); } @Override public Class getDataClass() { return DistortionEntry.class; } @Override public RowBuilder newRowBuilder( final FieldDescriptor[] outputFieldDescriptors) { return new RowBuilder() { Double fieldValue; @Override public void setField(final String fieldName, final Object fieldValue) { if (DISTORTION_FIELD_NAME.equals(fieldName) && (fieldValue instanceof Double)) { this.fieldValue = (Double) fieldValue; } } @Override public void setFields(final Map values) { values.entrySet().forEach((e) -> setField(e.getKey(), e.getValue())); } @Override public DistortionEntry buildRow(final byte[] dataId) { return new DistortionEntry(dataId, fieldValue); } }; } @Override public FieldDescriptor[] getFieldDescriptors() { return DESC_ARRAY; } @Override public FieldDescriptor getFieldDescriptor(final String fieldName) { return DESC; } } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/clustering/LongCentroid.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.clustering; import org.locationtech.geowave.analytic.AnalyticItemWrapper; import org.locationtech.jts.geom.Geometry; public class LongCentroid implements AnalyticItemWrapper { Long val; long count = 0; double cost = 0.0; String groupID = ""; @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + (int) (count ^ (count >>> 32)); result = (prime * result) + ((val == null) ? 0 : val.hashCode()); return result; } @Override public int getIterationID() { return 0; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final LongCentroid other = (LongCentroid) obj; if (count != other.count) { return false; } if (val == null) { if (other.val != null) { return false; } } else if (!val.equals(other.val)) { return false; } return true; } public LongCentroid(final long val, final String groupID, final int count) { super(); this.groupID = groupID; this.val = Long.valueOf(val); this.count = count; } @Override public String getGroupID() { return groupID; } @Override public String getID() { return val.toString(); } @Override public Long getWrappedItem() { return val; } @Override public long getAssociationCount() { return count; } @Override public void resetAssociatonCount() { count = 0; } @Override public void incrementAssociationCount(final long increment) { count++; } @Override public double getCost() { return cost; } @Override public void setCost(final double cost) { this.cost = cost; } @Override public String toString() { return "LongCentroid [val=" + val + ", count=" + count + ", cost=" + cost + "]"; } @Override public String getName() { return Long.toString(val); } @Override public String[] getExtraDimensions() { return new String[0]; } @Override public double[] getDimensionValues() { return new double[0]; } @Override public Geometry getGeometry() { // TODO Auto-generated method stub return null; } @Override public void setZoomLevel(final int level) { // TODO Auto-generated method stub } @Override public int getZoomLevel() { // TODO Auto-generated method stub return 1; } @Override public void setBatchID(final String batchID) { // TODO Auto-generated method stub } @Override public String getBatchID() { // TODO Auto-generated method stub return null; } @Override public void setGroupID(final String groupID) { this.groupID = groupID; } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/clustering/NeighborData.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.clustering; import org.apache.commons.codec.binary.Hex; import org.locationtech.geowave.core.index.ByteArray; public class NeighborData implements Comparable> { private T element; private ByteArray id; private double distance; public NeighborData() {} public NeighborData(final T element, final ByteArray id, final double distance) { super(); this.element = element; this.id = id; this.distance = distance; } public NeighborData(final NeighborData element, final double distance) { super(); this.element = element.getElement(); this.id = element.getId(); this.distance = distance; } public ByteArray getId() { return id; } protected void setId(final ByteArray id) { this.id = id; } public double getDistance() { return distance; } public void setDistance(final double distance) { this.distance = distance; } public T getElement() { return element; } protected void setElement(final T neighbor) { this.element = neighbor; } @Override public int hashCode() { return ((element == null) ? 0 : element.hashCode()); } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } @SuppressWarnings("unchecked") final NeighborData other = (NeighborData) obj; if (element == null) { if (other.element != null) { return false; } } else if (!element.equals(other.element)) { return false; } return true; } @Override public int compareTo(final NeighborData otherNNData) { final int dist = Double.compare(distance, otherNNData.distance); // do not care about the ordering based on the neighbor data. // just need to force some ordering if they are not the same. return dist == 0 ? hashCode() - otherNNData.hashCode() : dist; } @Override public String toString() { return (id == null ? "" : Hex.encodeHexString(id.getBytes()) + ":") + element.toString() + "(" + this.distance + ")"; } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/clustering/NestedGroupCentroidAssignment.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.clustering; import java.io.IOException; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Set; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.JobContext; import org.locationtech.geowave.analytic.AnalyticItemWrapper; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.ScopedJobConfiguration; import org.locationtech.geowave.analytic.distance.DistanceFn; import org.locationtech.geowave.analytic.distance.FeatureCentroidDistanceFn; import org.locationtech.geowave.analytic.kmeans.AssociationNotification; import org.locationtech.geowave.analytic.kmeans.CentroidAssociationFn; import org.locationtech.geowave.analytic.param.CentroidParameters; import org.locationtech.geowave.analytic.param.CommonParameters; import org.locationtech.geowave.analytic.param.GlobalParameters; import org.locationtech.geowave.analytic.param.ParameterEnum; import org.slf4j.Logger; /** * A helper class that finds the closest centroid to a point at a specific zoom level. * *

If the starting level does match the specified level, then the centroid tree is 'walked' down. * Walking up to higher levels is not supported. * *

Levels are number 1 to n where 1 is the top tier. The current tier being computed may have a * different batch ID (temporary) than all upper level tiers. In this case, a parent batch id is * provided to resolve groups for those tiers. This approach is often used in speculative * computation at each tier. * *

Parameters include: * * *

"NestedGroupCentroidAssignment.Global.ParentBatchId" -> Parent Tier Batch IDs. If not * present then assume value NestedGroupCentroidAssignment.Global.BatchId *

"NestedGroupCentroidAssignment.Global.BatchId" -> batch id for current tier. *

"NestedGroupCentroidAssignment.Global.ZoomLevel" -> current tier (level) *

"NestedGroupCentroidAssignment.Common.DistanceFunctionClass" -> distance function used for * association of data points to centroid. * @see org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave * * @param */ public class NestedGroupCentroidAssignment { private final CentroidAssociationFn associationdFunction = new CentroidAssociationFn<>(); private final CentroidManager centroidManager; private final int endZoomLevel; private final String parentBatchID; public NestedGroupCentroidAssignment( final CentroidManager centroidManager, final int endZoomLevel, final String parentBatchID, final DistanceFn distanceFunction) { super(); this.centroidManager = centroidManager; this.endZoomLevel = endZoomLevel; this.parentBatchID = parentBatchID; this.associationdFunction.setDistanceFunction(distanceFunction); } public NestedGroupCentroidAssignment( final JobContext context, final Class scope, final Logger logger) throws InstantiationException, IllegalAccessException, IOException { final ScopedJobConfiguration config = new ScopedJobConfiguration(context.getConfiguration(), scope, logger); endZoomLevel = config.getInt(CentroidParameters.Centroid.ZOOM_LEVEL, 1); parentBatchID = config.getString( GlobalParameters.Global.PARENT_BATCH_ID, config.getString(GlobalParameters.Global.BATCH_ID, null)); @SuppressWarnings("unchecked") final DistanceFn distanceFunction = config.getInstance( CommonParameters.Common.DISTANCE_FUNCTION_CLASS, DistanceFn.class, FeatureCentroidDistanceFn.class); this.associationdFunction.setDistanceFunction(distanceFunction); centroidManager = new CentroidManagerGeoWave<>(context, scope); } /** * Override zoomLevel from parameters */ public static void setZoomLevel( final Configuration config, final Class scope, final int zoomLevel) { CentroidParameters.Centroid.ZOOM_LEVEL.getHelper().setValue(config, scope, zoomLevel); } /** * Override parent batch ID from parameters */ public static void setParentBatchID( final Configuration config, final Class scope, final String parentID) { GlobalParameters.Global.PARENT_BATCH_ID.getHelper().setValue(config, scope, parentID); } public static Collection> getParameters() { final Set> params = new HashSet<>(); params.addAll(CentroidManagerGeoWave.getParameters()); params.addAll( Arrays.asList( new ParameterEnum[] { CentroidParameters.Centroid.ZOOM_LEVEL, GlobalParameters.Global.PARENT_BATCH_ID, CommonParameters.Common.DISTANCE_FUNCTION_CLASS})); return params; } public List> getCentroidsForGroup(final String groupID) throws IOException { return centroidManager.getCentroidsForGroup(groupID); } /** Get the associated group id from the current zoom level */ public String getGroupForLevel(final AnalyticItemWrapper item) throws IOException { final GroupHolder group = new GroupHolder(); group.setGroupID(item.getGroupID()); int currentLevel = item.getZoomLevel(); while (endZoomLevel != currentLevel) { final List> centroids = centroidManager.getCentroidsForGroup(parentBatchID, group.getGroupID()); if (centroids.size() == 0) { throw new IOException("Cannot find group " + group.getGroupID()); } associationdFunction.compute(item, centroids, new AssociationNotification() { @Override public void notify(final CentroidPairing pairing) { group.setGroupID(pairing.getCentroid().getID()); } }); currentLevel = centroids.get(0).getZoomLevel() + 1; } return group.getGroupID(); } public double findCentroidForLevel( final AnalyticItemWrapper item, final AssociationNotification associationNotification) throws IOException { final GroupHolder group = new GroupHolder(); group.setGroupID(item.getGroupID()); double currentDistance = Double.NaN; int currentLevel = item.getZoomLevel(); boolean atEndLevel = false; // force one time through while (!atEndLevel) { // save status as 'final' to use in the following closure. final boolean reachedEndLevel = currentLevel == endZoomLevel; atEndLevel = reachedEndLevel; // only use the parent batch ID for upper levels, otherwise use the // current batch ID. final List> centroids = (currentLevel == endZoomLevel) ? centroidManager.getCentroidsForGroup(group.getGroupID()) : centroidManager.getCentroidsForGroup(parentBatchID, group.getGroupID()); if (centroids.size() == 0) { throw new IOException("Cannot find group " + group.getGroupID()); } currentDistance = associationdFunction.compute(item, centroids, new AssociationNotification() { @Override public void notify(final CentroidPairing pairing) { group.setGroupID(pairing.getCentroid().getID()); if (reachedEndLevel) { associationNotification.notify(pairing); } } }); // update for next loop currentLevel = centroids.get(0).getZoomLevel() + 1; } return currentDistance; } public static void setParameters( final Configuration config, final Class scope, final PropertyManagement runTimeProperties) { CentroidManagerGeoWave.setParameters(config, scope, runTimeProperties); runTimeProperties.setConfig( new ParameterEnum[] { CommonParameters.Common.DISTANCE_FUNCTION_CLASS, CentroidParameters.Centroid.ZOOM_LEVEL, GlobalParameters.Global.BATCH_ID, GlobalParameters.Global.PARENT_BATCH_ID}, config, scope); } private class GroupHolder { private String groupID; public String getGroupID() { return groupID; } public void setGroupID(final String groupID) { this.groupID = groupID; } } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/clustering/exception/MatchingCentroidNotFoundException.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.clustering.exception; public class MatchingCentroidNotFoundException extends Exception { /** */ private static final long serialVersionUID = 1L; public MatchingCentroidNotFoundException() { super(); } public MatchingCentroidNotFoundException( final String arg0, final Throwable arg1, final boolean arg2, final boolean arg3) { super(arg0, arg1, arg2, arg3); } public MatchingCentroidNotFoundException(final String arg0, final Throwable arg1) { super(arg0, arg1); } public MatchingCentroidNotFoundException(final String arg0) { super(arg0); } public MatchingCentroidNotFoundException(final Throwable arg0) { super(arg0); } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/distance/CoordinateCircleDistanceFn.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.distance; import org.geotools.geometry.jts.JTS; import org.geotools.referencing.CRS; import org.geotools.referencing.GeodeticCalculator; import org.geotools.referencing.datum.DefaultEllipsoid; import org.locationtech.jts.geom.Coordinate; import org.opengis.referencing.FactoryException; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.opengis.referencing.operation.TransformException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class CoordinateCircleDistanceFn implements DistanceFn { private static final Logger LOGGER = LoggerFactory.getLogger(CoordinateCircleDistanceFn.class); /** */ private static final long serialVersionUID = -1245559892132762143L; protected static final CoordinateReferenceSystem DEFAULT_CRS; static { try { DEFAULT_CRS = CRS.decode("EPSG:4326", true); } catch (final FactoryException e) { throw new RuntimeException("Failed to load default EPSG:4326 coordinate reference system", e); } } @Override public double measure(final Coordinate c1, final Coordinate c2) { try { return JTS.orthodromicDistance(c1, c2, getCRS()); } catch (final TransformException e) { throw new RuntimeException("Failed to transform coordinates to provided CRS", e); } catch (final java.lang.AssertionError ae) { // weird error with orthodromic distance..when distance is too close // (0.05 meter), it fails the tolerance test LOGGER.info("when distance is too close(0.05 meter), it fails the tolerance test", ae); final GeodeticCalculator calc = new GeodeticCalculator(getCRS()); calc.setStartingGeographicPoint(c1.x, c1.y); calc.setDestinationGeographicPoint(c2.x, c2.y); return ((DefaultEllipsoid) calc.getEllipsoid()).orthodromicDistance( calc.getStartingGeographicPoint().getX(), calc.getStartingGeographicPoint().getY(), calc.getDestinationGeographicPoint().getX(), calc.getDestinationGeographicPoint().getY()); } } protected CoordinateReferenceSystem getCRS() { return DEFAULT_CRS; } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/distance/CoordinateCosineDistanceFn.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.distance; import org.locationtech.jts.geom.Coordinate; public class CoordinateCosineDistanceFn implements DistanceFn { /** */ private static final long serialVersionUID = 2074200104626591273L; @Override public double measure(final Coordinate x, final Coordinate y) { final double ab = (x.x * y.x) + (x.y * y.y) + (x.z * y.z); final double norma = Math.sqrt(Math.pow(x.x, 2) + Math.pow(x.y, 2) + Math.pow(x.z, 2)); final double normb = Math.sqrt(Math.pow(y.x, 2) + Math.pow(y.y, 2) + Math.pow(y.z, 2)); return ab / (norma * normb); } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/distance/CoordinateEuclideanDistanceFn.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.distance; import org.locationtech.jts.geom.Coordinate; public class CoordinateEuclideanDistanceFn implements DistanceFn { /** */ private static final long serialVersionUID = 888639577783179566L; @Override public double measure(final Coordinate x, final Coordinate y) { return Math.sqrt( Math.pow((x.x - y.x), 2) + Math.pow((x.y - y.y), 2) + Math.pow((filter(x.z) - filter(y.z)), 2)); } private static double filter(final double x) { return (Double.isNaN(x)) ? 0 : x; } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/distance/DistanceFn.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.distance; import java.io.Serializable; /** * Determine the distance between two objects. * * @param */ public interface DistanceFn extends Serializable { double measure(T x, T y); } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/distance/FeatureCentroidDistanceFn.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.distance; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.opengis.feature.simple.SimpleFeature; /** * Calculate distance between two SimpleFeatures, assuming has a Geometry. * * @see org.opengis.feature.simple.SimpleFeature */ public class FeatureCentroidDistanceFn implements DistanceFn { /** */ private static final long serialVersionUID = 3824608959408031752L; private DistanceFn coordinateDistanceFunction = new CoordinateEuclideanDistanceFn(); public FeatureCentroidDistanceFn() {} public FeatureCentroidDistanceFn(final DistanceFn coordinateDistanceFunction) { super(); this.coordinateDistanceFunction = coordinateDistanceFunction; } public DistanceFn getCoordinateDistanceFunction() { return coordinateDistanceFunction; } public void setCoordinateDistanceFunction( final DistanceFn coordinateDistanceFunction) { this.coordinateDistanceFunction = coordinateDistanceFunction; } private Geometry getGeometry(final SimpleFeature x) { for (final Object attr : x.getAttributes()) { if (attr instanceof Geometry) { return (Geometry) attr; } } return (Geometry) x.getDefaultGeometry(); } @Override public double measure(final SimpleFeature x, final SimpleFeature y) { return coordinateDistanceFunction.measure( getGeometry(x).getCentroid().getCoordinate(), getGeometry(y).getCentroid().getCoordinate()); } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/distance/FeatureCentroidOrthodromicDistanceFn.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.distance; import org.opengis.feature.simple.SimpleFeature; public class FeatureCentroidOrthodromicDistanceFn extends FeatureCentroidDistanceFn implements DistanceFn { private static final long serialVersionUID = -9077135292765517738L; public FeatureCentroidOrthodromicDistanceFn() { super(new CoordinateCircleDistanceFn()); } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/distance/FeatureDistanceFn.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.distance; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.operation.distance.DistanceOp; import org.opengis.feature.simple.SimpleFeature; /** * Calculate distance between two SimpleFeatures. The distance is planar distance between to two * closest sides. * * @see org.opengis.feature.simple.SimpleFeature */ public class FeatureDistanceFn implements DistanceFn { /** */ private static final long serialVersionUID = 3824608959408031752L; private DistanceFn coordinateDistanceFunction = new CoordinateCircleDistanceFn(); public FeatureDistanceFn() {} public FeatureDistanceFn(final DistanceFn coordinateDistanceFunction) { super(); this.coordinateDistanceFunction = coordinateDistanceFunction; } public DistanceFn getCoordinateDistanceFunction() { return coordinateDistanceFunction; } public void setCoordinateDistanceFunction( final DistanceFn coordinateDistanceFunction) { this.coordinateDistanceFunction = coordinateDistanceFunction; } private Geometry getGeometry(final SimpleFeature x) { for (final Object attr : x.getAttributes()) { if (attr instanceof Geometry) { return (Geometry) attr; } } return (Geometry) x.getDefaultGeometry(); } @Override public double measure(final SimpleFeature x, final SimpleFeature y) { double dist = Double.MAX_VALUE; final Coordinate[] coords = new DistanceOp(getGeometry(x), getGeometry(y)).nearestPoints(); for (int i = 0; i < coords.length; i++) { for (int j = i + 1; j < coords.length; j++) { dist = Math.min(dist, coordinateDistanceFunction.measure(coords[j], coords[i])); } } return dist; } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/distance/FeatureGeometryDistanceFn.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.distance; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.operation.distance.DistanceOp; import org.opengis.feature.simple.SimpleFeature; /** * Calculate distance between two SimpleFeatures, assuming each has a Geometry. * * @see org.opengis.feature.simple.SimpleFeature */ public class FeatureGeometryDistanceFn implements DistanceFn { /** */ private static final long serialVersionUID = 3824608959408031752L; private DistanceFn coordinateDistanceFunction = new CoordinateCircleDistanceFn(); public FeatureGeometryDistanceFn() {} public FeatureGeometryDistanceFn(final DistanceFn coordinateDistanceFunction) { super(); this.coordinateDistanceFunction = coordinateDistanceFunction; } public DistanceFn getCoordinateDistanceFunction() { return coordinateDistanceFunction; } public void setCoordinateDistanceFunction( final DistanceFn coordinateDistanceFunction) { this.coordinateDistanceFunction = coordinateDistanceFunction; } private Geometry getGeometry(final SimpleFeature x) { for (final Object attr : x.getAttributes()) { if (attr instanceof Geometry) { return (Geometry) attr; } } return (Geometry) x.getDefaultGeometry(); } @Override public double measure(final SimpleFeature x, final SimpleFeature y) { final Geometry xGeo = getGeometry(x); final Geometry yGeo = getGeometry(y); final DistanceOp op = new DistanceOp(xGeo, yGeo); final Coordinate[] points = op.nearestPoints(); return coordinateDistanceFunction.measure(points[0], points[1]); } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/distance/GeometryCentroidDistanceFn.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.distance; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; /** * Calculate distance between two geometries. * * @see org.locationtech.jts.geom.Geometry */ public class GeometryCentroidDistanceFn implements DistanceFn { /** */ private static final long serialVersionUID = -4340689267509659236L; private DistanceFn coordinateDistanceFunction = new CoordinateEuclideanDistanceFn(); public GeometryCentroidDistanceFn() {} public GeometryCentroidDistanceFn(final DistanceFn coordinateDistanceFunction) { super(); this.coordinateDistanceFunction = coordinateDistanceFunction; } public DistanceFn getCoordinateDistanceFunction() { return coordinateDistanceFunction; } public void setCoordinateDistanceFunction( final DistanceFn coordinateDistanceFunction) { this.coordinateDistanceFunction = coordinateDistanceFunction; } @Override public double measure(final Geometry x, final Geometry y) { return coordinateDistanceFunction.measure( x.getCentroid().getCoordinate(), y.getCentroid().getCoordinate()); } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/extract/CentroidExtractor.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.extract; import org.locationtech.jts.geom.Point; /** * Strategy to extract a representative centroid from some Geospatial object * * @param */ public interface CentroidExtractor { /** * @param anObject -- an object with Geospatial properties * @return A Point that must have the SRID set for a valid CRS. */ public Point getCentroid(T anObject); } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/extract/DimensionExtractor.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.extract; import org.locationtech.jts.geom.Geometry; /** * Strategy to extract a representative dimensions and Geometry for an Object * * @param */ public interface DimensionExtractor extends java.io.Serializable { /** @param anObject -- */ public double[] getDimensions(T anObject); /** * @return Dimension names in the same order as dimentions returns from the * {@link DimensionExtractor#getDimensions(Object)} */ public String[] getDimensionNames(); /** * @param anObject -- an object with Geospatial properties * @return A Point that must have the SRID set for a valid CRS. */ public Geometry getGeometry(T anObject); /** * @param anObject the object to get the group ID from * @return An assigned group ID, if one exists, otherwise {@code null} */ public String getGroupID(T anObject); } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/extract/EmptyDimensionExtractor.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.extract; import org.locationtech.jts.geom.Geometry; public abstract class EmptyDimensionExtractor implements DimensionExtractor { /** * */ private static final long serialVersionUID = 1L; private static final double[] EMPTY_VAL = new double[0]; private static final String[] EMPTY_NAME = new String[0]; @Override public double[] getDimensions(final T anObject) { return EMPTY_VAL; } @Override public String[] getDimensionNames() { return EMPTY_NAME; } @Override public abstract Geometry getGeometry(T anObject); @Override public abstract String getGroupID(T anObject); } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/extract/SimpleFeatureCentroidExtractor.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.extract; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.Point; import org.opengis.feature.simple.SimpleFeature; /** * Extract a set of points representing critical points for a simple feature that me be * representative or compared to centroids. */ public class SimpleFeatureCentroidExtractor implements CentroidExtractor { @Override public Point getCentroid(final SimpleFeature anObject) { final Geometry geometry = (Geometry) anObject.getDefaultGeometry(); final int srid = SimpleFeatureGeometryExtractor.getSRID(anObject); final Point point = geometry.getCentroid(); point.setSRID(srid); return point; } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/extract/SimpleFeatureGeometryExtractor.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.extract; import java.util.Iterator; import org.locationtech.jts.geom.Geometry; import org.opengis.feature.simple.SimpleFeature; import org.opengis.referencing.ReferenceIdentifier; import org.opengis.referencing.crs.CoordinateReferenceSystem; /** Extract a Geometry from a Simple Feature. */ public class SimpleFeatureGeometryExtractor extends EmptyDimensionExtractor implements DimensionExtractor { /** * */ private static final long serialVersionUID = 1L; @Override public Geometry getGeometry(final SimpleFeature anObject) { final Geometry geometry = (Geometry) anObject.getDefaultGeometry(); final int srid = getSRID(anObject); geometry.setSRID(srid); return geometry; } protected static int getSRID(final SimpleFeature geometryFeature) { final CoordinateReferenceSystem crs = geometryFeature.getDefaultGeometryProperty().getDescriptor().getCoordinateReferenceSystem(); if (crs == null) { return 4326; } final ReferenceIdentifier id = getFirst(crs.getIdentifiers()); if (id == null) { return 4326; } return Integer.parseInt(id.getCode()); } protected static final ReferenceIdentifier getFirst( final Iterable iterable) { if (iterable == null) { return null; } final Iterator it = iterable.iterator(); if (it.hasNext()) { final ReferenceIdentifier id = it.next(); if ("EPSG".equals(id.getCodeSpace())) { return id; } } return null; } @Override public String getGroupID(final SimpleFeature anObject) { final Object v = anObject.getAttribute("GroupID"); return v == null ? null : v.toString(); } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/extract/SimpleFeatureInteriorPointExtractor.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.extract; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.Point; import org.opengis.feature.simple.SimpleFeature; /** * Extract a set of points representing critical points for a simple feature that me be * representative or compared to centroids. */ public class SimpleFeatureInteriorPointExtractor extends SimpleFeatureCentroidExtractor implements CentroidExtractor { @Override public Point getCentroid(final SimpleFeature anObject) { final Geometry geometry = (Geometry) anObject.getDefaultGeometry(); final int srid = SimpleFeatureGeometryExtractor.getSRID(anObject); final Point point = geometry.getInteriorPoint(); point.setSRID(srid); return point; } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/extract/TimeDimensionExtractor.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.extract; import java.util.Calendar; import java.util.Date; import org.locationtech.geowave.core.geotime.util.TimeUtils; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.type.AttributeDescriptor; /** A default implementation that averages all time attributes. */ public class TimeDimensionExtractor extends SimpleFeatureGeometryExtractor implements DimensionExtractor { /** * */ private static final long serialVersionUID = 1L; private static final String[] TIME_NAME = new String[] {"time"}; @Override public double[] getDimensions(final SimpleFeature anObject) { final double[] timeVal = new double[1]; double count = 0.0; for (final AttributeDescriptor attr : anObject.getFeatureType().getAttributeDescriptors()) { if (TimeUtils.isTemporal(attr.getType().getClass())) { final Object o = anObject.getAttribute(attr.getName()); count += 1.0; if (o instanceof Date) { timeVal[0] += ((Date) o).getTime(); } else if (o instanceof Calendar) { timeVal[0] += ((Calendar) o).getTime().getTime(); } } } if (count > 0) { timeVal[0] = timeVal[0] / count; } return timeVal; } @Override public String[] getDimensionNames() { return TIME_NAME; } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/kmeans/AssociationNotification.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.kmeans; import org.locationtech.geowave.analytic.clustering.CentroidPairing; /** * Callback with the pairing of a point to its closest centroid at a zoom level. * * @see CentroidAssociationFn * @param */ public interface AssociationNotification { public void notify(CentroidPairing pairing); } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/kmeans/CentroidAssociationFn.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.kmeans; import org.locationtech.geowave.analytic.AnalyticItemWrapper; import org.locationtech.geowave.analytic.clustering.CentroidPairing; import org.locationtech.geowave.analytic.distance.DistanceFn; /** * Compute the distance of a points to the closest centroid, providing the resulting distance using * a provided distance function. */ public class CentroidAssociationFn { private DistanceFn distanceFunction; public DistanceFn getDistanceFunction() { return distanceFunction; } public void setDistanceFunction(final DistanceFn distanceFunction) { this.distanceFunction = distanceFunction; } public double compute( final AnalyticItemWrapper point, final Iterable> targetSet, final AssociationNotification associationNotification) { final CentroidPairing pairing = new CentroidPairing<>(null, point, Double.POSITIVE_INFINITY); for (final AnalyticItemWrapper y : targetSet) { final double distance = distanceFunction.measure(point.getWrappedItem(), y.getWrappedItem()); if (distance < pairing.getDistance()) { pairing.setDistance(distance); pairing.setCentroid(y); } } associationNotification.notify(pairing); return pairing.getDistance(); } public double compute( final Iterable> pointSet, final Iterable> targetSet, final AssociationNotification associationNotification) { double sum = 0.0; for (final AnalyticItemWrapper point : pointSet) { sum += this.compute(point, targetSet, associationNotification); } return sum; } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/kmeans/serial/AnalyticStats.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.kmeans.serial; public interface AnalyticStats { public static enum StatValue { COST, COUNT } public void notify(StatValue stat, double amount); public void reset(); } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/kmeans/serial/KMeansParallelInitialize.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.kmeans.serial; import java.util.ArrayList; import java.util.List; import org.apache.commons.lang3.tuple.Pair; import org.locationtech.geowave.analytic.AnalyticItemWrapper; import org.locationtech.geowave.analytic.AnalyticItemWrapperFactory; import org.locationtech.geowave.analytic.clustering.CentroidPairing; import org.locationtech.geowave.analytic.kmeans.AssociationNotification; import org.locationtech.geowave.analytic.kmeans.CentroidAssociationFn; import org.locationtech.geowave.analytic.sample.SampleNotification; import org.locationtech.geowave.analytic.sample.Sampler; public class KMeansParallelInitialize { private CentroidAssociationFn centroidAssociationFn = new CentroidAssociationFn<>(); private double psi = 5.0; private final Sampler sampler = new Sampler<>(); private AnalyticItemWrapperFactory centroidFactory; private final AnalyticStats stats = new StatsMap(); public CentroidAssociationFn getCentroidAssociationFn() { return centroidAssociationFn; } public void setCentroidAssociationFn(final CentroidAssociationFn centroidAssociationFn) { this.centroidAssociationFn = centroidAssociationFn; } public double getPsi() { return psi; } public void setPsi(final double psi) { this.psi = psi; } public Sampler getSampler() { return sampler; } public AnalyticItemWrapperFactory getCentroidFactory() { return centroidFactory; } public void setCentroidFactory(final AnalyticItemWrapperFactory centroidFactory) { this.centroidFactory = centroidFactory; } public AnalyticStats getStats() { return stats; } public Pair>, List>> runLocal( final Iterable> pointSet) { stats.reset(); final List> sampleSet = new ArrayList<>(); sampleSet.add(pointSet.iterator().next()); final List> pairingSet = new ArrayList<>(); final AssociationNotification assocFn = new AssociationNotification() { @Override public void notify(final CentroidPairing pairing) { pairingSet.add(pairing); pairing.getCentroid().incrementAssociationCount(1); } }; // combine to get pairing? double normalizingConstant = centroidAssociationFn.compute(pointSet, sampleSet, assocFn); stats.notify(AnalyticStats.StatValue.COST, normalizingConstant); final int logPsi = Math.max(1, (int) (Math.log(psi) / Math.log(2))); for (int i = 0; i < logPsi; i++) { sampler.sample(pairingSet, new SampleNotification() { @Override public void notify(final T item, final boolean partial) { sampleSet.add(centroidFactory.create(item)); } }, normalizingConstant); pairingSet.clear(); for (final AnalyticItemWrapper centroid : sampleSet) { centroid.resetAssociatonCount(); } normalizingConstant = centroidAssociationFn.compute(pointSet, sampleSet, assocFn); stats.notify(AnalyticStats.StatValue.COST, normalizingConstant); } return Pair.of(pairingSet, sampleSet); } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/kmeans/serial/StatsMap.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.kmeans.serial; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; public class StatsMap implements AnalyticStats { Map> stats = new HashMap<>(); @Override public void notify(final StatValue stat, final double amount) { List list = stats.get(stat); if (list == null) { list = new ArrayList<>(); stats.put(stat, list); } list.add(amount); } public List getStats(final StatValue stat) { return stats.get(stat); } @Override public void reset() { stats.clear(); } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/kryo/FeatureSerializer.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.kryo; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import org.locationtech.geowave.adapter.vector.FeatureWritable; import org.opengis.feature.simple.SimpleFeature; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.esotericsoftware.kryo.Kryo; import com.esotericsoftware.kryo.Serializer; import com.esotericsoftware.kryo.io.Input; import com.esotericsoftware.kryo.io.Output; public class FeatureSerializer extends Serializer { static final Logger LOGGER = LoggerFactory.getLogger(FeatureSerializer.class); @Override public SimpleFeature read(final Kryo arg0, final Input arg1, final Class arg2) { final FeatureWritable fw = new FeatureWritable(); final byte[] data = arg1.readBytes(arg1.readInt()); try (DataInputStream is = new DataInputStream(new ByteArrayInputStream(data))) { fw.readFields(is); } catch (final IOException e) { LOGGER.error("Cannot deserialize Simple Feature", e); return null; } return fw.getFeature(); } @Override public void write(final Kryo arg0, final Output arg1, final SimpleFeature arg2) { final FeatureWritable fw = new FeatureWritable(arg2.getFeatureType()); fw.setFeature(arg2); final ByteArrayOutputStream bos = new ByteArrayOutputStream(); try (DataOutputStream os = new DataOutputStream(bos)) { fw.write(os); os.flush(); final byte[] data = bos.toByteArray(); arg1.writeInt(data.length); arg1.write(data); } catch (final IOException e) { LOGGER.error("Cannot serialize Simple Feature", e); } } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/kryo/GridCoverageWritableSerializer.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.kryo; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import org.locationtech.geowave.adapter.raster.adapter.GridCoverageWritable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.esotericsoftware.kryo.Kryo; import com.esotericsoftware.kryo.Serializer; import com.esotericsoftware.kryo.io.Input; import com.esotericsoftware.kryo.io.Output; public class GridCoverageWritableSerializer extends Serializer { static final Logger LOGGER = LoggerFactory.getLogger(FeatureSerializer.class); @Override public GridCoverageWritable read( final Kryo arg0, final Input arg1, final Class arg2) { final GridCoverageWritable gcw = new GridCoverageWritable(); final byte[] data = arg1.readBytes(arg1.readInt()); try (DataInputStream is = new DataInputStream(new ByteArrayInputStream(data))) { gcw.readFields(is); } catch (final IOException e) { LOGGER.error("Cannot deserialize GridCoverageWritable", e); return null; } return gcw; } @Override public void write(final Kryo arg0, final Output arg1, final GridCoverageWritable arg2) { final ByteArrayOutputStream bos = new ByteArrayOutputStream(); try (DataOutputStream os = new DataOutputStream(bos)) { arg2.write(os); os.flush(); final byte[] data = bos.toByteArray(); arg1.writeInt(data.length); arg1.write(data); } catch (final IOException e) { LOGGER.error("Cannot serialize GridCoverageWritable", e); } } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/kryo/PersistableSerializer.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.kryo; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import com.esotericsoftware.kryo.Kryo; import com.esotericsoftware.kryo.Serializer; import com.esotericsoftware.kryo.io.Input; import com.esotericsoftware.kryo.io.Output; public class PersistableSerializer extends Serializer { @Override public Persistable read(final Kryo kryo, final Input input, final Class classTag) { // Read object byte count and allocate buffer to read object data final int byteCount = input.readInt(); final byte[] bytes = new byte[byteCount]; final int bytesRead = input.read(bytes); // TODO: This was only added for findbugs warning, not really necessary // check if (bytesRead < 0) { return null; } return PersistenceUtils.fromBinary(bytes); } @Override public void write(final Kryo kryo, final Output output, final Persistable object) { // Persistence utils includes classId as short in front of persistable // object. final byte[] serializedObj = PersistenceUtils.toBinary(object); final int objLength = serializedObj.length; output.writeInt(objLength); output.write(serializedObj); } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/model/IndexModelBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.model; import org.locationtech.geowave.core.store.index.CommonIndexModel; public interface IndexModelBuilder extends java.io.Serializable { public CommonIndexModel buildModel(); } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/model/SpatialIndexModelBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.model; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.core.store.index.CommonIndexModel; /** Builds an index model with longitude and latitude. */ public class SpatialIndexModelBuilder implements IndexModelBuilder { /** * */ private static final long serialVersionUID = 1L; @Override public CommonIndexModel buildModel() { return SpatialDimensionalityTypeProvider.createIndexFromOptions( new SpatialOptions()).getIndexModel(); } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/nn/DefaultNeighborList.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.nn; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import org.locationtech.geowave.core.index.ByteArray; public class DefaultNeighborList implements NeighborList { private final Map list = new HashMap<>(); @Override public boolean add( final DistanceProfile distanceProfile, final ByteArray id, final NNTYPE value) { if (infer(id, value) == InferType.NONE) { list.put(id, value); return true; } return false; } @Override public InferType infer(final ByteArray id, final NNTYPE value) { if (list.containsKey(id)) { return InferType.SKIP; } return InferType.NONE; } @Override public void clear() { list.clear(); } @Override public Iterator> iterator() { return list.entrySet().iterator(); } @Override public int size() { return list.size(); } public static class DefaultNeighborListFactory implements NeighborListFactory { @Override public NeighborList buildNeighborList(final ByteArray centerId, final NNTYPE center) { return new DefaultNeighborList<>(); } } @Override public boolean isEmpty() { return list.isEmpty(); } public NNTYPE get(final ByteArray key) { return list.get(key); } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/nn/DistanceProfile.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.nn; /** Retain distance information. */ public class DistanceProfile { private double distance; private CONTEXT_TYPE context; public DistanceProfile() {} public DistanceProfile(final double distance, final CONTEXT_TYPE context) { super(); this.distance = distance; this.context = context; } public double getDistance() { return distance; } public void setDistance(final double distance) { this.distance = distance; } /** distance function specific information */ public CONTEXT_TYPE getContext() { return context; } public void setContext(final CONTEXT_TYPE context) { this.context = context; } @Override public String toString() { return "DistanceProfile [distance=" + distance + ", context=" + context + "]"; } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/nn/DistanceProfileGenerateFn.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.nn; public interface DistanceProfileGenerateFn { /* * Compute distance profile for given items. */ public DistanceProfile computeProfile(ITEM item1, ITEM item2); } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/nn/NNProcessor.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.nn; import java.io.IOException; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Set; import org.locationtech.geowave.analytic.nn.NeighborList.InferType; import org.locationtech.geowave.analytic.partitioner.Partitioner; import org.locationtech.geowave.analytic.partitioner.Partitioner.PartitionData; import org.locationtech.geowave.analytic.partitioner.Partitioner.PartitionDataCallback; import org.locationtech.geowave.core.index.ByteArray; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class is designed to support secondary partitioning. * *

(1) Partition added data using a partitioner. * *

(2) Process data, perform the O(N^2) (e.g. ~ n^2/2) comparisons within those partitions. * *

Custom plug-ins include (1) A factory for the neighbor list to track those pairings of data * whose distance feel under the provided minimum. (2) A complete notification callback callback for * each primary data. * *

The loop algorithms is For each primary compare to all remaining primary and all secondary * data items * *

A powerful performance enhancing tool is the inference mechanism associated with the * neighborhood lists. A list can have intelligence to decide that a particular neighbor can be * inferred and, therefore, can be removed from the set of primaries to be inspected. This has no * effect on secondaries. * *

The processor can be called multiple times, as the 'process' algorithm does not alter its * internal state. The notification callback can be used to alter the internal state (e.g. calling * 'add' or 'remove' methods). Caution should used to alter internal state within the neighbor list. * * @param * @param @See Partitioner @See Partitioner.PartitionData */ public class NNProcessor { protected static final Logger LOGGER = LoggerFactory.getLogger(NNProcessor.class); final Map uniqueSetOfPartitions = new HashMap<>(); final Map> partitionsToIds = new HashMap<>(); final Map> idsToPartition = new HashMap<>(); final Map primaries = new HashMap<>(); final Map others = new HashMap<>(); protected final Partitioner partitioner; protected final TypeConverter typeConverter; protected final DistanceProfileGenerateFn distanceProfileFn; protected final double maxDistance; protected final PartitionData parentPartition; private int upperBoundPerPartition = DEFAULT_UPPER_BOUND_PARTIION_SIZE; public static final int DEFAULT_UPPER_BOUND_PARTIION_SIZE = 75000; /** Run State */ protected ByteArray startingPoint; protected NeighborIndex index; public NNProcessor( final Partitioner partitioner, final TypeConverter typeConverter, final DistanceProfileGenerateFn distanceProfileFn, final double maxDistance, final PartitionData parentPartition) { super(); this.partitioner = partitioner; this.typeConverter = typeConverter; this.distanceProfileFn = distanceProfileFn; this.maxDistance = maxDistance; this.parentPartition = parentPartition; } private PartitionData add(final PartitionData pd, final ByteArray itemId) { PartitionData singleton = uniqueSetOfPartitions.get(pd); if (singleton == null) { uniqueSetOfPartitions.put(pd, pd); singleton = pd; } Set idsSet = partitionsToIds.get(singleton); if (idsSet == null) { idsSet = new HashSet<>(); partitionsToIds.put(singleton, idsSet); } if (idsSet.size() > upperBoundPerPartition) { return null; } if (idsSet.size() == upperBoundPerPartition) { LOGGER.warn("At upper bound on partition. Increase the bounds or condense the data."); } idsSet.add(itemId); Set partitionSet = idsToPartition.get(itemId); if (partitionSet == null) { partitionSet = new HashSet<>(); idsToPartition.put(itemId, partitionSet); } partitionSet.add(singleton); return singleton; } public void remove(final ByteArray id) { final Set partitionSet = idsToPartition.remove(id); if (partitionSet != null) { for (final PartitionData pd : partitionSet) { final Set idSet = partitionsToIds.get(pd); if (idSet != null) { idSet.remove(id); } } } primaries.remove(id); others.remove(id); if (index != null) { index.empty(id); } } public void add(final ByteArray id, final boolean isPrimary, final PARTITION_VALUE partitionValue) throws IOException { final STORE_VALUE storeValue = this.typeConverter.convert(id, partitionValue); try { partitioner.partition(partitionValue, new PartitionDataCallback() { @Override public void partitionWith(final PartitionData partitionData) throws Exception { final PartitionData singleton = add(partitionData, id); if (singleton != null) { singleton.setPrimary(partitionData.isPrimary() || singleton.isPrimary()); if (isPrimary) { primaries.put(id, storeValue); } else { others.put(id, storeValue); } } } }); } catch (final Exception e) { throw new IOException(e); } if (isPrimary) { if (startingPoint == null) { startingPoint = id; } } } public interface CompleteNotifier { public void complete(ByteArray id, STORE_VALUE value, NeighborList list) throws IOException, InterruptedException; } public int size() { return primaries.size() + others.size(); } /** * @param size the minimum size of a partition to be processed * @return true if all partitions are emptt */ public boolean trimSmallPartitions(final int size) { final Iterator>> it = partitionsToIds.entrySet().iterator(); while (it.hasNext()) { final Map.Entry> entry = it.next(); if (entry.getValue().size() < size) { for (final ByteArray id : entry.getValue()) { final Set partitionsForId = idsToPartition.get(id); partitionsForId.remove(entry.getKey()); if (partitionsForId.isEmpty()) { this.primaries.remove(id); this.others.remove(id); } } it.remove(); } } return partitionsToIds.isEmpty(); } public void process( final NeighborListFactory listFactory, final CompleteNotifier notification) throws IOException, InterruptedException { LOGGER.info( "Processing " + parentPartition.toString() + " with primary = " + primaries.size() + " and other = " + others.size()); LOGGER.info( "Processing " + parentPartition.toString() + " with sub-partitions = " + uniqueSetOfPartitions.size()); index = new NeighborIndex<>(listFactory); double farthestDistance = 0; ByteArray farthestNeighbor = null; ByteArray nextStart = startingPoint; final Set inspectionSet = new HashSet<>(); inspectionSet.addAll(primaries.keySet()); if ((inspectionSet.size() > 0) && (nextStart == null)) { nextStart = inspectionSet.iterator().next(); } while (nextStart != null) { inspectionSet.remove(nextStart); farthestDistance = 0; final Set partition = idsToPartition.get(nextStart); final STORE_VALUE primary = primaries.get(nextStart); final ByteArray primaryId = nextStart; nextStart = null; farthestNeighbor = null; if (LOGGER.isTraceEnabled()) { LOGGER.trace("processing " + primaryId); } if (primary == null) { if (inspectionSet.size() > 0) { nextStart = inspectionSet.iterator().next(); } continue; } final NeighborList primaryList = index.init(primaryId, primary); for (final PartitionData pd : partition) { for (final ByteArray neighborId : partitionsToIds.get(pd)) { if (neighborId.equals(primaryId)) { continue; } boolean isAPrimary = true; STORE_VALUE neighbor = primaries.get(neighborId); if (neighbor == null) { neighbor = others.get(neighborId); isAPrimary = false; } else // prior processed primary if (!inspectionSet.contains(neighborId)) { continue; } if (neighbor == null) { continue; } final InferType inferResult = primaryList.infer(neighborId, neighbor); if (inferResult == InferType.NONE) { final DistanceProfile distanceProfile = distanceProfileFn.computeProfile(primary, neighbor); final double distance = distanceProfile.getDistance(); if (distance <= maxDistance) { index.add(distanceProfile, primaryId, primary, neighborId, neighbor, isAPrimary); if (LOGGER.isTraceEnabled()) { LOGGER.trace("Neighbor " + neighborId); } } if ((distance > farthestDistance) && inspectionSet.contains(neighborId)) { farthestDistance = distance; farthestNeighbor = neighborId; } } else if (inferResult == InferType.REMOVE) { inspectionSet.remove(neighborId); } } } notification.complete(primaryId, primary, primaryList); index.empty(primaryId); if ((farthestNeighbor == null) && (inspectionSet.size() > 0)) { nextStart = inspectionSet.iterator().next(); } else { nextStart = farthestNeighbor; } } } public int getUpperBoundPerPartition() { return upperBoundPerPartition; } public void setUpperBoundPerPartition(final int upperBoundPerPartition) { this.upperBoundPerPartition = upperBoundPerPartition; } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/nn/NeighborIndex.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.nn; import java.util.HashMap; import java.util.Map; import org.locationtech.geowave.core.index.ByteArray; /** * Maintain an association between an ID of any item and its neighbors, as they are discovered. The * index supports a bi-directional association, forming a graph of adjacency lists. * * @param */ public class NeighborIndex { private final Map> index = new HashMap<>(); private final NeighborListFactory listFactory; private final NullList nullList = new NullList<>(); public NeighborIndex(final NeighborListFactory listFactory) { super(); this.listFactory = listFactory; } /** * Invoked when the provided node is being inspected to find neighbors. Creates the associated * neighbor list, if not already created. Notifies the neighbor list that it is formally * initialized. The neighbor list may already exist and have associated neighbors. This occurs * when those relationships are discovered through traversing the neighbor. * *

This method is designed for neighbor lists do some optimizations just prior to the neighbor * discovery process. */ public NeighborList init(final ByteArray id, final NNTYPE value) { NeighborList neighbors = index.get(id); if (neighbors == null) { neighbors = listFactory.buildNeighborList(id, value); index.put(id, neighbors); } return neighbors; } public void add( final DistanceProfile distanceProfile, final ByteArray centerId, final NNTYPE centerValue, final ByteArray neighborId, final NNTYPE neighborValue, final boolean addReciprical) { this.addToList(distanceProfile, centerId, centerValue, neighborId, neighborValue); if (addReciprical) { this.addToList(distanceProfile, neighborId, neighborValue, centerId, centerValue); } } public void empty(final ByteArray id) { index.put(id, nullList); } private void addToList( final DistanceProfile distanceProfile, final ByteArray centerId, final NNTYPE centerValue, final ByteArray neighborId, final NNTYPE neighborValue) { NeighborList neighbors = index.get(centerId); if (neighbors == null) { neighbors = listFactory.buildNeighborList(centerId, centerValue); index.put(centerId, neighbors); } neighbors.add(distanceProfile, neighborId, neighborValue); } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/nn/NeighborList.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.nn; import java.util.Map.Entry; import org.locationtech.geowave.core.index.ByteArray; public interface NeighborList extends Iterable> { public enum InferType { NONE, SKIP, // distance measure is skipped REMOVE // skipped and removed from future selection }; /** * May be called prior to init() when discovered by entry itself. */ public boolean add(DistanceProfile distanceProfile, ByteArray id, NNTYPE value); /** * See if the entries relationships have already been inferred */ public InferType infer(final ByteArray id, final NNTYPE value); /** Clear the contents. */ public void clear(); public int size(); public boolean isEmpty(); } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/nn/NeighborListFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.nn; import org.locationtech.geowave.core.index.ByteArray; public interface NeighborListFactory { public NeighborList buildNeighborList(ByteArray cnterId, NNTYPE center); } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/nn/NullList.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.nn; import java.util.Collections; import java.util.Iterator; import java.util.Map.Entry; import org.locationtech.geowave.core.index.ByteArray; public class NullList implements NeighborList { @Override public boolean add( final DistanceProfile distanceProfile, final ByteArray id, final NNTYPE value) { return false; } @Override public InferType infer(final ByteArray id, final NNTYPE value) { return InferType.SKIP; } @Override public void clear() {} @Override public Iterator> iterator() { return Collections.emptyIterator(); } @Override public int size() { return 0; } @Override public boolean isEmpty() { return true; } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/nn/TypeConverter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.nn; import org.locationtech.geowave.core.index.ByteArray; /** * Convert object consumed by NN to a 'smaller' object pertinent to any subclass algorithms * * @param */ public interface TypeConverter { public TYPE convert(ByteArray id, Object o); } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/param/BasicParameterHelper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.param; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.JobContext; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.ScopedJobConfiguration; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class BasicParameterHelper implements ParameterHelper { /** * */ private static final long serialVersionUID = 1L; static final Logger LOGGER = LoggerFactory.getLogger(BasicParameterHelper.class); private final ParameterEnum parent; private final Class baseClass; private final boolean isClass; public BasicParameterHelper( final ParameterEnum parent, final Class baseClass, final String name, final String description, final boolean isClass, final boolean hasArg) { this.baseClass = baseClass; this.parent = parent; this.isClass = isClass; } @Override public Class getBaseClass() { return baseClass; } @Override public void setValue(final Configuration config, final Class scope, final Object value) { setParameter(config, scope, value, parent); } private static final void setParameter( final Configuration config, final Class scope, final Object val, final ParameterEnum configItem) { if (val != null) { if (val instanceof Long) { config.setLong( GeoWaveConfiguratorBase.enumToConfKey(scope, configItem.self()), ((Long) val)); } else if (val instanceof Double) { config.setDouble( GeoWaveConfiguratorBase.enumToConfKey(scope, configItem.self()), ((Double) val)); } else if (val instanceof Boolean) { config.setBoolean( GeoWaveConfiguratorBase.enumToConfKey(scope, configItem.self()), ((Boolean) val)); } else if (val instanceof Integer) { config.setInt( GeoWaveConfiguratorBase.enumToConfKey(scope, configItem.self()), ((Integer) val)); } else if (val instanceof Class) { config.setClass( GeoWaveConfiguratorBase.enumToConfKey(scope, configItem.self()), ((Class) val), ((Class) val)); } else if (val instanceof byte[]) { config.set( GeoWaveConfiguratorBase.enumToConfKey(scope, configItem.self()), ByteArrayUtils.byteArrayToString((byte[]) val)); } else { config.set(GeoWaveConfiguratorBase.enumToConfKey(scope, configItem.self()), val.toString()); } } } @Override public Object getValue( final JobContext context, final Class scope, final Object defaultValue) { final ScopedJobConfiguration scopedConfig = new ScopedJobConfiguration(context.getConfiguration(), scope); if (baseClass.isAssignableFrom(Integer.class)) { return Integer.valueOf( scopedConfig.getInt(parent.self(), ((Integer) defaultValue).intValue())); } else if (baseClass.isAssignableFrom(String.class)) { return scopedConfig.getString(parent.self(), defaultValue.toString()); } else if (baseClass.isAssignableFrom(Double.class)) { return scopedConfig.getDouble(parent.self(), (Double) defaultValue); } else if (baseClass.isAssignableFrom(byte[].class)) { return scopedConfig.getBytes(parent.self()); } else if ((defaultValue == null) || (defaultValue instanceof Class)) { try { return scopedConfig.getInstance(parent.self(), baseClass, (Class) defaultValue); } catch (InstantiationException | IllegalAccessException e) { LOGGER.error("Unable to get instance from job context", e); } } return null; } @Override public Object getValue(final PropertyManagement propertyManagement) { try { return propertyManagement.getProperty(parent); } catch (final Exception e) { LOGGER.error("Unable to deserialize property '" + parent.toString() + "'", e); return null; } } @Override public void setValue(final PropertyManagement propertyManagement, final Object value) { Object storeValue = value; if (isClass && (value instanceof String)) { try { storeValue = Class.forName(value.toString()); } catch (final ClassNotFoundException e) { LOGGER.error("Class " + value.toString() + " for property " + parent + " is not found", e); } } propertyManagement.store(parent, storeValue); } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/param/CentroidParameters.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.param; import org.locationtech.geowave.analytic.AnalyticItemWrapperFactory; import org.locationtech.geowave.analytic.extract.CentroidExtractor; public class CentroidParameters { public enum Centroid implements ParameterEnum { INDEX_NAME(String.class, "cid", "Index Identifier for Centroids", false, true), DATA_TYPE_ID(String.class, "cdt", "Data Type ID for a centroid item", false, true), DATA_NAMESPACE_URI(String.class, "cns", "Data Type Namespace for centroid item", false, true), CONXVERGANCE_TOLERANCE(Double.class, "cct", "The alpha parameter measure the minimum covergence to reach before ", false, true), EXTRACTOR_CLASS(CentroidExtractor.class, "cce", "Centroid Exractor Class implements org.locationtech.geowave.analytics.extract.CentroidExtractor", true, true), WRAPPER_FACTORY_CLASS(AnalyticItemWrapperFactory.class, "cfc", "A factory class that implements org.locationtech.geowave.analytics.tools.AnalyticItemWrapperFactory", true, true), ZOOM_LEVEL(Integer.class, "czl", "Zoom Level Number", true, true); private final ParameterHelper helper; private Centroid( final Class baseClass, final String name, final String description, final boolean isClass, final boolean hasArg) { helper = new BasicParameterHelper(this, baseClass, name, description, isClass, hasArg); } @Override public Enum self() { return this; } @Override public ParameterHelper getHelper() { return helper; } } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/param/ClusteringParameters.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.param; public class ClusteringParameters { public enum Clustering implements ParameterEnum { MAX_REDUCER_COUNT(Integer.class, "crc", "Maximum Clustering Reducer Count", false, true), RETAIN_GROUP_ASSIGNMENTS(Boolean.class, "ga", "Retain Group assignments during execution", false, false), MINIMUM_SIZE(Integer.class, "cms", "Minimum Cluster Size", false, true), MAX_ITERATIONS(Integer.class, "cmi", "Maximum number of iterations when finding optimal clusters", false, true), CONVERGANCE_TOLERANCE(Double.class, "cct", "Convergence Tolerance", false, true), ZOOM_LEVELS(Integer.class, "zl", "Number of Zoom Levels to Process", false, true); private final ParameterHelper helper; private Clustering( final Class baseClass, final String name, final String description, final boolean isClass, final boolean hasArg) { helper = new BasicParameterHelper(this, baseClass, name, description, isClass, hasArg); } @Override public Enum self() { return this; } @Override public ParameterHelper getHelper() { return helper; } } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/param/CommonParameters.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.param; import org.locationtech.geowave.analytic.distance.DistanceFn; import org.locationtech.geowave.analytic.extract.DimensionExtractor; import org.locationtech.geowave.analytic.model.IndexModelBuilder; public class CommonParameters { public enum Common implements ParameterEnum { DIMENSION_EXTRACT_CLASS(DimensionExtractor.class, "dde", "Dimension Extractor Class implements org.locationtech.geowave.analytics.extract.DimensionExtractor", true, true), DISTANCE_FUNCTION_CLASS(DistanceFn.class, "cdf", "Distance Function Class implements org.locationtech.geowave.analytics.distance.DistanceFn", true, true), INDEX_MODEL_BUILDER_CLASS(IndexModelBuilder.class, "cim", "Class implements org.locationtech.geowave.analytics.tools.model.IndexModelBuilder", true, true); private final ParameterHelper helper; Common( final Class baseClass, final String name, final String description, final boolean isClass, final boolean hasArg) { helper = new BasicParameterHelper(this, baseClass, name, description, isClass, hasArg); } @Override public Enum self() { return this; } @Override public ParameterHelper getHelper() { return helper; } } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/param/ExtractParameters.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.param; import org.locationtech.geowave.analytic.extract.DimensionExtractor; import org.locationtech.geowave.core.store.api.Query; public class ExtractParameters { public enum Extract implements ParameterEnum { OUTPUT_DATA_TYPE_ID(String.class, "eot", "Output Data Type ID", false, true), DATA_NAMESPACE_URI(String.class, "ens", "Output Data Namespace URI", false, true), REDUCER_COUNT(Integer.class, "erc", "Number of Reducers For initial data extraction and de-duplication", false, true), DIMENSION_EXTRACT_CLASS(DimensionExtractor.class, "ede", "Class to extract dimensions into a simple feature output", true, true), QUERY(Query.class, "eq", "Query", false, true), MAX_INPUT_SPLIT(Integer.class, "emx", "Maximum input split size", false, true), MIN_INPUT_SPLIT(Integer.class, "emn", "Minimum input split size", false, true), GROUP_ID(String.class, "eg", "Group ID assigned to extracted data", false, true); private final transient ParameterHelper helper; private Extract( final Class baseClass, final String name, final String description, final boolean isClass, final boolean hasArg) { helper = new BasicParameterHelper(this, baseClass, name, description, isClass, hasArg); } @Override public Enum self() { return this; } @Override public ParameterHelper getHelper() { return helper; } } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/param/FormatConfiguration.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.param; import java.util.Collection; import org.apache.hadoop.conf.Configuration; import org.locationtech.geowave.analytic.PropertyManagement; public interface FormatConfiguration { public void setup(PropertyManagement runTimeProperties, Configuration configuration) throws Exception; public Class getFormatClass(); /** * If the format supports only one option, then 'setting' the data has no effect. * * @return true if the data is a Hadoop Writable or an POJO. */ public boolean isDataWritable(); public void setDataIsWritable(boolean isWritable); public Collection> getParameters(); } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/param/GlobalParameters.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.param; public class GlobalParameters { public enum Global implements ParameterEnum { PARENT_BATCH_ID(String.class, "pb", "Batch ID", true), CRS_ID(String.class, "crs", "CRS ID", true), BATCH_ID(String.class, "b", "Batch ID", true); private final ParameterHelper helper; private Global( final Class baseClass, final String name, final String description, final boolean hasArg) { helper = new BasicParameterHelper(this, baseClass, name, description, false, hasArg); } @Override public Enum self() { return this; } @Override public ParameterHelper getHelper() { return helper; } } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/param/GroupParameterEnum.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.param; import java.util.Set; import org.apache.commons.cli.Option; public interface GroupParameterEnum extends ParameterEnum { public void fillOptions(Set

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.param; import org.locationtech.geowave.analytic.AnalyticItemWrapperFactory; import org.locationtech.geowave.analytic.Projection; import org.locationtech.geowave.analytic.extract.CentroidExtractor; public class HullParameters { public enum Hull implements ParameterEnum { INDEX_NAME(String.class, "hid", "Index Identifier for Centroids", false, true), DATA_TYPE_ID(String.class, "hdt", "Data Type ID for a centroid item", false, true), DATA_NAMESPACE_URI(String.class, "hns", "Data Type Namespace for a centroid item", false, true), REDUCER_COUNT(Integer.class, "hrc", "Centroid Reducer Count", false, true), PROJECTION_CLASS(Projection.class, "hpe", "Class to project on to 2D space. Implements org.locationtech.geowave.analytics.tools.Projection", true, true), EXTRACTOR_CLASS(CentroidExtractor.class, "hce", "Centroid Exractor Class implements org.locationtech.geowave.analytics.extract.CentroidExtractor", true, true), WRAPPER_FACTORY_CLASS(AnalyticItemWrapperFactory.class, "hfc", "Class to create analytic item to capture hulls. Implements org.locationtech.geowave.analytics.tools.AnalyticItemWrapperFactory", true, true), ITERATION(Integer.class, "hi", "The iteration of the hull calculation", false, true), HULL_BUILDER(Projection.class, "hhb", "Hull Builder", true, true), ZOOM_LEVEL(Integer.class, "hzl", "Zoom Level Number", false, true); private final ParameterHelper helper; private Hull( final Class baseClass, final String name, final String description, final boolean isClass, final boolean hasArg) { helper = new BasicParameterHelper(this, baseClass, name, description, isClass, hasArg); } @Override public Enum self() { return this; } @Override public ParameterHelper getHelper() { return helper; } } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/param/InputParameters.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.param; import org.apache.hadoop.fs.Path; public class InputParameters { public enum Input implements ParameterEnum { INPUT_FORMAT(FormatConfiguration.class, "ifc", "Input Format Class", true, true), HDFS_INPUT_PATH(Path.class, "iip", "Input HDFS File Path", false, true); private final ParameterHelper helper; private Input( final Class baseClass, final String name, final String description, final boolean isClass, final boolean hasArg) { helper = new BasicParameterHelper(this, baseClass, name, description, isClass, hasArg); } @Override public Enum self() { return this; } @Override public ParameterHelper getHelper() { return helper; } } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/param/InputStoreParameterHelper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.param; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.JobContext; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.store.PersistableStore; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.mapreduce.input.GeoWaveInputFormat; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class InputStoreParameterHelper implements ParameterHelper { /** */ private static final long serialVersionUID = 1L; static final Logger LOGGER = LoggerFactory.getLogger(InputStoreParameterHelper.class); @Override public Class getBaseClass() { return PersistableStore.class; } @Override public void setValue( final Configuration config, final Class scope, final PersistableStore value) { final DataStorePluginOptions options = value.getDataStoreOptions(); GeoWaveInputFormat.setStoreOptions(config, options); } @Override public PersistableStore getValue( final JobContext context, final Class scope, final PersistableStore defaultValue) { final DataStorePluginOptions pluginOptions = GeoWaveInputFormat.getStoreOptions(context); if (pluginOptions != null) { return new PersistableStore(pluginOptions); } else { return defaultValue; } } @Override public PersistableStore getValue(final PropertyManagement propertyManagement) { try { return (PersistableStore) propertyManagement.getProperty( StoreParameters.StoreParam.INPUT_STORE); } catch (final Exception e) { LOGGER.error("Unable to deserialize data store", e); return null; } } @Override public void setValue(final PropertyManagement propertyManagement, final PersistableStore value) { propertyManagement.store(StoreParameters.StoreParam.INPUT_STORE, value); } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/param/JumpParameters.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.param; import org.locationtech.geowave.core.index.numeric.NumericRange; public class JumpParameters { public enum Jump implements ParameterEnum { RANGE_OF_CENTROIDS(NumericRange.class, "jrc", "Comma-separated range of centroids (e.g. 2,100)", true), KPLUSPLUS_MIN(Integer.class, "jkp", "The minimum k when K means ++ takes over sampling.", true), COUNT_OF_CENTROIDS(Integer.class, "jcc", "Set the count of centroids for one run of kmeans.", true); private final ParameterHelper helper; private Jump( final Class baseClass, final String name, final String description, final boolean hasArg) { helper = new BasicParameterHelper(this, baseClass, name, description, false, hasArg); } @Override public Enum self() { return this; } @Override public ParameterHelper getHelper() { return helper; } } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/param/MapReduceParameters.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.param; import java.util.Arrays; import java.util.Collection; public class MapReduceParameters { public enum MRConfig implements ParameterEnum { CONFIG_FILE(String.class, "conf", "MapReduce Configuration", true), HDFS_HOST_PORT(String.class, "hdfs", "HDFS hostname and port in the format hostname:port", true), HDFS_BASE_DIR(String.class, "hdfsbase", "Fully qualified path to the base directory in hdfs", true), YARN_RESOURCE_MANAGER(String.class, "resourceman", "Yarn resource manager hostname and port in the format hostname:port", true), JOBTRACKER_HOST_PORT(String.class, "jobtracker", "Hadoop job tracker hostname and port in the format hostname:port", true); private final ParameterHelper helper; private MRConfig( final Class baseClass, final String name, final String description, final boolean hasArg) { helper = new BasicParameterHelper(this, baseClass, name, description, false, hasArg); } @Override public Enum self() { return this; } @Override public ParameterHelper getHelper() { return helper; } } public static final Collection> getParameters() { return Arrays.asList( new ParameterEnum[] { MRConfig.CONFIG_FILE, MRConfig.HDFS_BASE_DIR, MRConfig.HDFS_HOST_PORT, MRConfig.JOBTRACKER_HOST_PORT, MRConfig.YARN_RESOURCE_MANAGER}); } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/param/OutputParameters.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.param; import org.apache.hadoop.fs.Path; public class OutputParameters { public enum Output implements ParameterEnum { REDUCER_COUNT(Integer.class, "orc", "Number of Reducers For Output", false, true), OUTPUT_FORMAT(FormatConfiguration.class, "ofc", "Output Format Class", true, true), INDEX_ID(String.class, "oid", "Output Index ID for objects that will be written to GeoWave", false, true), DATA_TYPE_ID(String.class, "odt", "Output Data ID assigned to objects that will be written to GeoWave", false, true), DATA_NAMESPACE_URI(String.class, "ons", "Output namespace for objects that will be written to GeoWave", false, true), HDFS_OUTPUT_PATH(Path.class, "oop", "Output HDFS File Path", false, true); private final ParameterHelper helper; private Output( final Class baseClass, final String name, final String description, final boolean isClass, final boolean hasArg) { helper = new BasicParameterHelper(this, baseClass, name, description, isClass, hasArg); } @Override public Enum self() { return this; } @Override public ParameterHelper getHelper() { return helper; } } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/param/OutputStoreParameterHelper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.param; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.JobContext; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.store.PersistableStore; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputFormat; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class OutputStoreParameterHelper implements ParameterHelper { /** */ private static final long serialVersionUID = 1L; static final Logger LOGGER = LoggerFactory.getLogger(OutputStoreParameterHelper.class); @Override public Class getBaseClass() { return PersistableStore.class; } @Override public void setValue( final Configuration config, final Class scope, final PersistableStore value) { final DataStorePluginOptions options = value.getDataStoreOptions(); GeoWaveOutputFormat.setStoreOptions(config, options); } @Override public PersistableStore getValue( final JobContext context, final Class scope, final PersistableStore defaultValue) { final DataStorePluginOptions pluginOptions = GeoWaveOutputFormat.getStoreOptions(context); if (pluginOptions != null) { return new PersistableStore(pluginOptions); } else { return defaultValue; } } @Override public PersistableStore getValue(final PropertyManagement propertyManagement) { try { return (PersistableStore) propertyManagement.getProperty( StoreParameters.StoreParam.OUTPUT_STORE); } catch (final Exception e) { LOGGER.error("Unable to deserialize data store", e); return null; } } @Override public void setValue(final PropertyManagement propertyManagement, final PersistableStore value) { propertyManagement.store(StoreParameters.StoreParam.OUTPUT_STORE, value); } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/param/ParameterEnum.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.param; import java.io.Serializable; public interface ParameterEnum extends Serializable { public ParameterHelper getHelper(); public Enum self(); } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/param/ParameterHelper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.param; import java.io.Serializable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.JobContext; import org.locationtech.geowave.analytic.PropertyManagement; public interface ParameterHelper extends Serializable { public Class getBaseClass(); public T getValue(PropertyManagement propertyManagement); public void setValue(PropertyManagement propertyManagement, T value); public void setValue(Configuration config, Class scope, T value); public T getValue(JobContext context, Class scope, T defaultValue); } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/param/PartitionParameters.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.param; import org.locationtech.geowave.analytic.partitioner.Partitioner; public class PartitionParameters { public enum Partition implements ParameterEnum { MAX_DISTANCE(Double.class, "pmd", "Partition Max Distance", false, true), PARTITION_PRECISION(Double.class, "pp", "Partition Precision", false, true), GEOMETRIC_DISTANCE_UNIT(String.class, "du", "Geometric distance unit (m=meters,km=kilometers, see symbols for javax.units.BaseUnit)", false, true), DISTANCE_THRESHOLDS(String.class, "dt", "Comma separated list of distance thresholds, per dimension", false, true), PARTITION_DECREASE_RATE(Double.class, "pdr", "Rate of decrease for precision(within (0,1])", false, true), MAX_MEMBER_SELECTION(Integer.class, "pms", "Maximum number of members selected from a partition", false, true), SECONDARY_PARTITIONER_CLASS(Partitioner.class, "psp", "Perform secondary partitioning with the provided class", true, false), PARTITIONER_CLASS(Partitioner.class, "pc", "Index Identifier for Centroids", true, true); private final ParameterHelper helper; private Partition( final Class baseClass, final String name, final String description, final boolean isClass, final boolean hasArg) { helper = new BasicParameterHelper(this, baseClass, name, description, isClass, hasArg); } @Override public Enum self() { return this; } @Override public ParameterHelper getHelper() { return helper; } } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/param/SampleParameters.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.param; import org.locationtech.geowave.analytic.sample.SampleProbabilityFn; import org.locationtech.geowave.analytic.sample.function.SamplingRankFunction; public class SampleParameters { public enum Sample implements ParameterEnum { SAMPLE_SIZE(Integer.class, "sss", "Sample Size", false, true), MIN_SAMPLE_SIZE(Integer.class, "sms", "Minimum Sample Size", false, true), MAX_SAMPLE_SIZE(Integer.class, "sxs", "Max Sample Size", false, true), DATA_TYPE_NAME(String.class, "sdt", "Sample Data Type Id", false, true), INDEX_NAME(String.class, "sdt", "Sample Index Type Id", false, true), SAMPLE_ITERATIONS(Integer.class, "ssi", "Minimum number of sample iterations", false, true), PROBABILITY_FUNCTION(SampleProbabilityFn.class, "spf", "The PDF determines the probability for samping an item. Used by specific sample rank functions, such as CentroidDistanceBasedSamplingRankFunction.", true, true), SAMPLE_RANK_FUNCTION(SamplingRankFunction.class, "srf", "The rank function used when sampling the first N highest rank items.", true, true); private final transient ParameterHelper helper; private Sample( final Class baseClass, final String name, final String description, final boolean isClass, final boolean hasArg) { helper = new BasicParameterHelper(this, baseClass, name, description, isClass, hasArg); } @Override public Enum self() { return this; } @Override public ParameterHelper getHelper() { return helper; } } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/param/StoreParameters.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.param; public class StoreParameters { public enum StoreParam implements ParameterEnum { INPUT_STORE(new InputStoreParameterHelper()), OUTPUT_STORE(new OutputStoreParameterHelper()),; private final ParameterHelper helper; private StoreParam(final ParameterHelper helper) { this.helper = helper; } @Override public Enum self() { return this; } @Override public ParameterHelper getHelper() { return helper; } } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/param/annotations/CentroidParameter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.param.annotations; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import org.locationtech.geowave.analytic.param.CentroidParameters.Centroid; /** * This is a stop-gap measure to allow using JCommander with Analytics, until we figure out how to * deal with PropertyEnum. */ @Retention(RetentionPolicy.RUNTIME) @Target({ElementType.FIELD, ElementType.METHOD}) public @interface CentroidParameter { Centroid[] value(); } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/param/annotations/ClusteringParameter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.param.annotations; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import org.locationtech.geowave.analytic.param.ClusteringParameters.Clustering; /** * This is a stop-gap measure to allow using JCommander with Analytics, until we figure out how to * deal with PropertyEnum. */ @Retention(RetentionPolicy.RUNTIME) @Target({ElementType.FIELD, ElementType.METHOD}) public @interface ClusteringParameter { Clustering[] value(); } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/param/annotations/CommonParameter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.param.annotations; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import org.locationtech.geowave.analytic.param.CommonParameters.Common; /** * This is a stop-gap measure to allow using JCommander with Analytics, until we figure out how to * deal with PropertyEnum. */ @Retention(RetentionPolicy.RUNTIME) @Target({ElementType.FIELD, ElementType.METHOD}) public @interface CommonParameter { Common[] value(); } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/param/annotations/ExtractParameter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.param.annotations; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import org.locationtech.geowave.analytic.param.ExtractParameters.Extract; /** * This is a stop-gap measure to allow using JCommander with Analytics, until we figure out how to * deal with PropertyEnum. */ @Retention(RetentionPolicy.RUNTIME) @Target({ElementType.FIELD, ElementType.METHOD}) public @interface ExtractParameter { Extract[] value(); } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/param/annotations/GlobalParameter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.param.annotations; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import org.locationtech.geowave.analytic.param.GlobalParameters.Global; /** * This is a stop-gap measure to allow using JCommander with Analytics, until we figure out how to * deal with PropertyEnum. */ @Retention(RetentionPolicy.RUNTIME) @Target({ElementType.FIELD, ElementType.METHOD}) public @interface GlobalParameter { Global[] value(); } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/param/annotations/HullParameter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.param.annotations; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import org.locationtech.geowave.analytic.param.HullParameters.Hull; /** * This is a stop-gap measure to allow using JCommander with Analytics, until we figure out how to * deal with PropertyEnum. */ @Retention(RetentionPolicy.RUNTIME) @Target({ElementType.FIELD, ElementType.METHOD}) public @interface HullParameter { Hull[] value(); } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/param/annotations/InputParameter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.param.annotations; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import org.locationtech.geowave.analytic.param.InputParameters.Input; /** * This is a stop-gap measure to allow using JCommander with Analytics, until we figure out how to * deal with PropertyEnum. */ @Retention(RetentionPolicy.RUNTIME) @Target({ElementType.FIELD, ElementType.METHOD}) public @interface InputParameter { Input[] value(); } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/param/annotations/JumpParameter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.param.annotations; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import org.locationtech.geowave.analytic.param.JumpParameters.Jump; /** * This is a stop-gap measure to allow using JCommander with Analytics, until we figure out how to * deal with PropertyEnum. */ @Retention(RetentionPolicy.RUNTIME) @Target({ElementType.FIELD, ElementType.METHOD}) public @interface JumpParameter { Jump[] value(); } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/param/annotations/MapReduceParameter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.param.annotations; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import org.locationtech.geowave.analytic.param.MapReduceParameters.MRConfig; /** * This is a stop-gap measure to allow using JCommander with Analytics, until we figure out how to * deal with PropertyEnum. */ @Retention(RetentionPolicy.RUNTIME) @Target({ElementType.FIELD, ElementType.METHOD}) public @interface MapReduceParameter { MRConfig[] value(); } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/param/annotations/OutputParameter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.param.annotations; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import org.locationtech.geowave.analytic.param.OutputParameters.Output; /** * This is a stop-gap measure to allow using JCommander with Analytics, until we figure out how to * deal with PropertyEnum. */ @Retention(RetentionPolicy.RUNTIME) @Target({ElementType.FIELD, ElementType.METHOD}) public @interface OutputParameter { Output[] value(); } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/param/annotations/PartitionParameter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.param.annotations; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import org.locationtech.geowave.analytic.param.PartitionParameters.Partition; /** * This is a stop-gap measure to allow using JCommander with Analytics, until we figure out how to * deal with PropertyEnum. */ @Retention(RetentionPolicy.RUNTIME) @Target({ElementType.FIELD, ElementType.METHOD}) public @interface PartitionParameter { Partition[] value(); } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/param/annotations/SampleParameter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.param.annotations; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import org.locationtech.geowave.analytic.param.SampleParameters.Sample; /** * This is a stop-gap measure to allow using JCommander with Analytics, until we figure out how to * deal with PropertyEnum. */ @Retention(RetentionPolicy.RUNTIME) @Target({ElementType.FIELD, ElementType.METHOD}) public @interface SampleParameter { Sample[] value(); } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/partitioner/AbstractPartitioner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.partitioner; import java.io.IOException; import java.io.ObjectOutputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.JobContext; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.ScopedJobConfiguration; import org.locationtech.geowave.analytic.model.IndexModelBuilder; import org.locationtech.geowave.analytic.model.SpatialIndexModelBuilder; import org.locationtech.geowave.analytic.param.CommonParameters; import org.locationtech.geowave.analytic.param.ParameterEnum; import org.locationtech.geowave.analytic.param.PartitionParameters; import org.locationtech.geowave.analytic.param.PartitionParameters.Partition; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.SinglePartitionInsertionIds; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.index.sfc.SFCFactory.SFCType; import org.locationtech.geowave.core.index.sfc.tiered.TieredSFCIndexFactory; import org.locationtech.geowave.core.index.sfc.tiered.TieredSFCIndexStrategy; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.dimension.NumericDimensionField; import org.locationtech.geowave.core.store.index.CommonIndexModel; import org.locationtech.geowave.core.store.index.IndexImpl; /** * Basic support class for Partitioners (e.g {@link Partitioner} * * @param */ public abstract class AbstractPartitioner implements Partitioner { /** */ private static final long serialVersionUID = 1L; private transient Index index = null; private double[] distancePerDimension = null; private double precisionFactor = 1.0; public AbstractPartitioner() {} public AbstractPartitioner( final CommonIndexModel indexModel, final double[] distancePerDimension) { super(); this.distancePerDimension = distancePerDimension; this.initIndex(indexModel, distancePerDimension); } public AbstractPartitioner(final double[] distancePerDimension) { super(); this.distancePerDimension = distancePerDimension; } protected double[] getDistancePerDimension() { return distancePerDimension; } protected Index getIndex() { return index; } @Override public List getCubeIdentifiers(final T entry) { final Set partitionIdSet = new HashSet<>(); final NumericDataHolder numericData = getNumericData(entry); if (numericData == null) { return Collections.emptyList(); } addPartitions( partitionIdSet, getIndex().getIndexStrategy().getInsertionIds(numericData.primary), true); for (final MultiDimensionalNumericData expansionData : numericData.expansion) { addPartitions( partitionIdSet, getIndex().getIndexStrategy().getInsertionIds(expansionData), false); } return new ArrayList<>(partitionIdSet); } @Override public void partition(final T entry, final PartitionDataCallback callback) throws Exception { final NumericDataHolder numericData = getNumericData(entry); if (numericData == null) { return; } final InsertionIds primaryIds = getIndex().getIndexStrategy().getInsertionIds(numericData.primary); for (final SinglePartitionInsertionIds partitionInsertionIds : primaryIds.getPartitionKeys()) { for (final byte[] sortKey : partitionInsertionIds.getSortKeys()) { callback.partitionWith( new PartitionData( new ByteArray(partitionInsertionIds.getPartitionKey()), new ByteArray(sortKey), true)); } } for (final MultiDimensionalNumericData expansionData : numericData.expansion) { final InsertionIds expansionIds = getIndex().getIndexStrategy().getInsertionIds(expansionData); for (final SinglePartitionInsertionIds partitionInsertionIds : expansionIds.getPartitionKeys()) { for (final byte[] sortKey : partitionInsertionIds.getSortKeys()) { callback.partitionWith( new PartitionData( new ByteArray(partitionInsertionIds.getPartitionKey()), new ByteArray(sortKey), false)); } } } } protected static class NumericDataHolder { MultiDimensionalNumericData primary; MultiDimensionalNumericData[] expansion; } protected abstract NumericDataHolder getNumericData(final T entry); public MultiDimensionalNumericData getRangesForPartition(final PartitionData partitionData) { return index.getIndexStrategy().getRangeForId( partitionData.getPartitionKey().getBytes(), partitionData.getSortKey().getBytes()); } protected void addPartitions( final Set masterList, final InsertionIds insertionIds, final boolean isPrimary) { for (final SinglePartitionInsertionIds partitionInsertionIds : insertionIds.getPartitionKeys()) { for (final byte[] sortKey : partitionInsertionIds.getSortKeys()) { masterList.add( new PartitionData( new ByteArray(partitionInsertionIds.getPartitionKey()), new ByteArray(sortKey), isPrimary)); } } } private static double[] getDistances(final ScopedJobConfiguration config) { final String distances = config.getString(PartitionParameters.Partition.DISTANCE_THRESHOLDS, "0.000001"); final String distancesArray[] = distances.split(","); final double[] distancePerDimension = new double[distancesArray.length]; { int i = 0; for (final String eachDistance : distancesArray) { distancePerDimension[i++] = Double.valueOf(eachDistance); } } return distancePerDimension; } @Override public void initialize(final JobContext context, final Class scope) throws IOException { initialize(new ScopedJobConfiguration(context.getConfiguration(), scope)); } public void initialize(final ScopedJobConfiguration config) throws IOException { distancePerDimension = getDistances(config); this.precisionFactor = config.getDouble(Partition.PARTITION_PRECISION, 1.0); if ((precisionFactor < 0) || (precisionFactor > 1.0)) { throw new IllegalArgumentException( String.format("Precision value must be between 0 and 1: %.6f", precisionFactor)); } try { final IndexModelBuilder builder = config.getInstance( CommonParameters.Common.INDEX_MODEL_BUILDER_CLASS, IndexModelBuilder.class, SpatialIndexModelBuilder.class); final CommonIndexModel model = builder.buildModel(); if (model.getDimensions().length > distancePerDimension.length) { final double[] newDistancePerDimension = new double[model.getDimensions().length]; for (int j = 0; j < newDistancePerDimension.length; j++) { newDistancePerDimension[j] = distancePerDimension[j < distancePerDimension.length ? j : (distancePerDimension.length - 1)]; } distancePerDimension = newDistancePerDimension; } this.initIndex(model, distancePerDimension); } catch (InstantiationException | IllegalAccessException e) { throw new IOException(e); } } @Override public void setup( final PropertyManagement runTimeProperties, final Class scope, final Configuration configuration) { final ParameterEnum[] params = new ParameterEnum[] { CommonParameters.Common.INDEX_MODEL_BUILDER_CLASS, PartitionParameters.Partition.DISTANCE_THRESHOLDS, Partition.PARTITION_PRECISION}; runTimeProperties.setConfig(params, configuration, scope); } protected void initIndex( final CommonIndexModel indexModel, final double[] distancePerDimensionForIndex) { // truncating to lower precision final NumericDimensionField[] dimensions = indexModel.getDimensions(); int totalRequestedPrecision = 0; final int[] dimensionPrecision = new int[indexModel.getDimensions().length]; for (int i = 0; i < dimensionPrecision.length; i++) { final double distance = distancePerDimensionForIndex[i] * 2.0; // total // width...(radius) // adjust by precision factory (0 to 1.0) dimensionPrecision[i] = (int) (precisionFactor * Math.abs((int) (Math.log(dimensions[i].getRange() / distance) / Math.log(2)))); totalRequestedPrecision += dimensionPrecision[i]; } if (totalRequestedPrecision > 63) { final double rescale = 63.0 / totalRequestedPrecision; for (int i = 0; i < dimensionPrecision.length; i++) { dimensionPrecision[i] = (int) (rescale * dimensionPrecision[i]); } } final TieredSFCIndexStrategy indexStrategy = TieredSFCIndexFactory.createSingleTierStrategy( indexModel.getDimensions(), dimensionPrecision, SFCType.HILBERT); // Not relevant since this is a single tier strategy. // For now, just setting to a non-zero reasonable value indexStrategy.setMaxEstimatedDuplicateIdsPerDimension(2); index = new IndexImpl(indexStrategy, indexModel); } @Override public Collection> getParameters() { return Arrays.asList( new ParameterEnum[] { CommonParameters.Common.INDEX_MODEL_BUILDER_CLASS, PartitionParameters.Partition.DISTANCE_THRESHOLDS, Partition.PARTITION_PRECISION}); } private void writeObject(final ObjectOutputStream stream) throws IOException { final byte[] indexData = PersistenceUtils.toBinary(this.index); stream.writeInt(indexData.length); stream.write(indexData); stream.writeDouble(precisionFactor); stream.writeInt(distancePerDimension.length); for (final double v : distancePerDimension) { stream.writeDouble(v); } } private void readObject(final java.io.ObjectInputStream stream) throws IOException, ClassNotFoundException { final byte[] indexData = new byte[stream.readInt()]; stream.readFully(indexData); index = (Index) PersistenceUtils.fromBinary(indexData); precisionFactor = stream.readDouble(); distancePerDimension = new double[stream.readInt()]; for (int i = 0; i < distancePerDimension.length; i++) { distancePerDimension[i] = stream.readDouble(); } } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + Arrays.hashCode(distancePerDimension); result = (prime * result) + ((index == null) ? 0 : index.hashCode()); long temp; temp = Double.doubleToLongBits(precisionFactor); result = (prime * result) + (int) (temp ^ (temp >>> 32)); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final AbstractPartitioner other = (AbstractPartitioner) obj; if (!Arrays.equals(distancePerDimension, other.distancePerDimension)) { return false; } if (index == null) { if (other.index != null) { return false; } } else if (!index.equals(other.index)) { return false; } if (Double.doubleToLongBits(precisionFactor) != Double.doubleToLongBits( other.precisionFactor)) { return false; } return true; } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/partitioner/BoundaryPartitioner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.partitioner; import java.io.IOException; import java.util.ArrayList; import java.util.List; import javax.measure.Unit; import javax.measure.quantity.Length; import org.locationtech.geowave.analytic.ScopedJobConfiguration; import org.locationtech.geowave.analytic.extract.DimensionExtractor; import org.locationtech.geowave.analytic.extract.EmptyDimensionExtractor; import org.locationtech.geowave.analytic.extract.SimpleFeatureGeometryExtractor; import org.locationtech.geowave.core.store.index.CommonIndexModel; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.opengis.feature.simple.SimpleFeature; import org.opengis.referencing.crs.CoordinateReferenceSystem; /** Partition on the boundary of polygons (the hull); not on the interior space. */ public class BoundaryPartitioner extends OrthodromicDistancePartitioner { /** */ private static final long serialVersionUID = 461679322447608507L; SimpleFeatureGeometryExtractor extractor = new SimpleFeatureGeometryExtractor(); public BoundaryPartitioner() { super(); } public BoundaryPartitioner( final CoordinateReferenceSystem crs, final CommonIndexModel indexModel, final DimensionExtractor dimensionExtractor, final double[] distancePerDimension, final Unit geometricDistanceUnit) { super(crs, indexModel, new EchoExtractor(), distancePerDimension, geometricDistanceUnit); } private static class EchoExtractor extends EmptyDimensionExtractor implements DimensionExtractor { /** */ private static final long serialVersionUID = 1L; @Override public Geometry getGeometry(final Object anObject) { return (Geometry) anObject; } @Override public String getGroupID(final Object anObject) { return "g"; } } @Override public List getCubeIdentifiers(final Object entry) { final Geometry geom = extractor.getGeometry((SimpleFeature) entry); final Coordinate[] coords = (geom.getCoordinates()); if (coords.length < 2) { return super.getCubeIdentifiers(geom); } else { final List r = new ArrayList<>(); for (int i = 0; i < (coords.length - 1); i++) { r.addAll( super.getCubeIdentifiers( geom.getFactory().createLineString(new Coordinate[] {coords[i], coords[i + 1]}))); } return r; } } @Override public void partition(final Object entry, final PartitionDataCallback callback) throws Exception { final Geometry geom = extractor.getGeometry((SimpleFeature) entry); final Coordinate[] coords = (geom.getCoordinates()); if (coords.length < 2) { super.partition(geom, callback); } else { for (int i = 0; i < (coords.length - 1); i++) { super.partition( geom.getFactory().createLineString(new Coordinate[] {coords[i], coords[i + 1]}), callback); } } } @Override public void initialize(final ScopedJobConfiguration config) throws IOException { super.initialize(config); super.dimensionExtractor = new EchoExtractor(); } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/partitioner/OrthodromicDistancePartitioner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.partitioner; import java.io.IOException; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Set; import javax.measure.Unit; import javax.measure.quantity.Length; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.JobContext; import org.geotools.referencing.CRS; import org.locationtech.geowave.analytic.GeometryCalculations; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.ScopedJobConfiguration; import org.locationtech.geowave.analytic.extract.DimensionExtractor; import org.locationtech.geowave.analytic.extract.SimpleFeatureGeometryExtractor; import org.locationtech.geowave.analytic.param.ExtractParameters; import org.locationtech.geowave.analytic.param.GlobalParameters; import org.locationtech.geowave.analytic.param.ParameterEnum; import org.locationtech.geowave.analytic.param.PartitionParameters; import org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition; import org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.numeric.BasicNumericDataset; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.store.dimension.NumericDimensionField; import org.locationtech.geowave.core.store.index.CommonIndexModel; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; import org.opengis.referencing.FactoryException; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import si.uom.SI; import tech.units.indriya.unit.Units; /* * Calculates distance use orthodromic distance to calculate the bounding box around each point. * * The approach is slow and more accurate, resulting in more partitions of smaller size. The class * requires {@link CoordinateReferenceSystem} for the distance calculation and {@link * DimensionExtractor} to extract geometries and other dimensions. * * The order of distances provided must match the order or dimensions extracted from the dimension * extractor. */ public class OrthodromicDistancePartitioner extends AbstractPartitioner implements Partitioner, java.io.Serializable { /** */ private static final long serialVersionUID = 1L; static final Logger LOGGER = LoggerFactory.getLogger(OrthodromicDistancePartitioner.class); private Unit geometricDistanceUnit = SI.METRE; private String crsName; private transient CoordinateReferenceSystem crs = null; private transient GeometryCalculations calculator; protected DimensionExtractor dimensionExtractor; private int latDimensionPosition; private int longDimensionPosition; public OrthodromicDistancePartitioner() {} public OrthodromicDistancePartitioner( final CoordinateReferenceSystem crs, final CommonIndexModel indexModel, final DimensionExtractor dimensionExtractor, final double[] distancePerDimension, final Unit geometricDistanceUnit) { super(distancePerDimension); this.crs = crs; this.crsName = crs.getIdentifiers().iterator().next().toString(); this.geometricDistanceUnit = geometricDistanceUnit; this.dimensionExtractor = dimensionExtractor; initIndex(indexModel, distancePerDimension); } @Override protected NumericDataHolder getNumericData(final T entry) { final NumericDataHolder numericDataHolder = new NumericDataHolder(); final Geometry entryGeometry = dimensionExtractor.getGeometry(entry); final double otherDimensionData[] = dimensionExtractor.getDimensions(entry); numericDataHolder.primary = getNumericData(entryGeometry.getEnvelope(), otherDimensionData); final List geometries = getGeometries(entryGeometry.getCentroid().getCoordinate(), getDistancePerDimension()); final MultiDimensionalNumericData[] values = new MultiDimensionalNumericData[geometries.size()]; int i = 0; for (final Geometry geometry : geometries) { values[i++] = getNumericData(geometry.getEnvelope(), otherDimensionData); } numericDataHolder.expansion = values; return numericDataHolder; } private MultiDimensionalNumericData getNumericData( final Geometry geometry, final double[] otherDimensionData) { final NumericDimensionField[] dimensionFields = getIndex().getIndexModel().getDimensions(); final NumericData[] numericData = new NumericData[dimensionFields.length]; final double[] distancePerDimension = getDistancePerDimension(); int otherIndex = 0; for (int i = 0; i < dimensionFields.length; i++) { final double minValue = (i == this.longDimensionPosition) ? geometry.getEnvelopeInternal().getMinX() : (i == this.latDimensionPosition ? geometry.getEnvelopeInternal().getMinY() : otherDimensionData[otherIndex] - distancePerDimension[i]); final double maxValue = (i == this.longDimensionPosition) ? geometry.getEnvelopeInternal().getMaxX() : (i == this.latDimensionPosition ? geometry.getEnvelopeInternal().getMaxY() : otherDimensionData[otherIndex] + distancePerDimension[i]); if ((i != this.longDimensionPosition) && (i != latDimensionPosition)) { otherIndex++; } numericData[i] = new NumericRange(minValue, maxValue); } return new BasicNumericDataset(numericData); } private static int findLongitude(final CommonIndexModel indexModel) { return indexOf(indexModel.getDimensions(), LongitudeDefinition.class); } private static int findLatitude(final CommonIndexModel indexModel) { return indexOf(indexModel.getDimensions(), LatitudeDefinition.class); } private static int indexOf( final NumericDimensionField fields[], final Class clazz) { for (int i = 0; i < fields.length; i++) { if (clazz.isInstance(fields[i].getBaseDefinition())) { return i; } } return -1; } private List getGeometries( final Coordinate coordinate, final double[] distancePerDimension) { return getCalculator().buildSurroundingGeometries( new double[] { distancePerDimension[longDimensionPosition], distancePerDimension[latDimensionPosition]}, geometricDistanceUnit == null ? Units.METRE : geometricDistanceUnit, coordinate); } private GeometryCalculations getCalculator() { if (calculator == null) { // this block would only occur in test or in failed initialization if (crs == null) { try { crs = CRS.decode(crsName, true); } catch (final FactoryException e) { LOGGER.error("CRS not providd and default EPSG:4326 cannot be instantiated", e); throw new RuntimeException(e); } } calculator = new GeometryCalculations(crs); } return calculator; } @Override protected void initIndex(final CommonIndexModel indexModel, final double[] distancePerDimension) { longDimensionPosition = findLongitude(indexModel); latDimensionPosition = findLatitude(indexModel); final List geos = getGeometries(new Coordinate(0, 0), distancePerDimension); final Envelope envelope = geos.get(0).getEnvelopeInternal(); // set up the distances based on geometry (orthodromic distance) final double[] distancePerDimensionForIndex = new double[distancePerDimension.length]; for (int i = 0; i < distancePerDimension.length; i++) { distancePerDimensionForIndex[i] = (i == longDimensionPosition) ? envelope.getWidth() / 2.0 : (i == latDimensionPosition ? envelope.getHeight() / 2.0 : distancePerDimension[i]); LOGGER.info("Dimension size {} is {} ", i, distancePerDimensionForIndex[i]); } super.initIndex(indexModel, distancePerDimensionForIndex); } @Override public void initialize(final JobContext context, final Class scope) throws IOException { this.initialize(context.getConfiguration(), scope); } public void initialize(final Configuration configuration, final Class scope) throws IOException { initialize(new ScopedJobConfiguration(configuration, scope)); } @Override public void initialize(final ScopedJobConfiguration config) throws IOException { crsName = config.getString(GlobalParameters.Global.CRS_ID, "EPSG:4326"); try { crs = CRS.decode(crsName, true); } catch (final FactoryException e) { throw new IOException("Cannot find CRS " + crsName, e); } try { dimensionExtractor = config.getInstance( ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS, DimensionExtractor.class, SimpleFeatureGeometryExtractor.class); } catch (final Exception ex) { throw new IOException( "Cannot find class for " + ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS.toString(), ex); } final String distanceUnit = config.getString(PartitionParameters.Partition.GEOMETRIC_DISTANCE_UNIT, "m"); this.geometricDistanceUnit = GeometryUtils.lookup(distanceUnit); super.initialize(config); } @Override public Collection> getParameters() { final Set> params = new HashSet<>(); params.addAll(super.getParameters()); params.addAll( Arrays.asList( new ParameterEnum[] { PartitionParameters.Partition.GEOMETRIC_DISTANCE_UNIT, ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS})); return params; } @Override public void setup( final PropertyManagement runTimeProperties, final Class scope, final Configuration configuration) { super.setup(runTimeProperties, scope, configuration); final ParameterEnum[] params = new ParameterEnum[] { GlobalParameters.Global.CRS_ID, ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS, PartitionParameters.Partition.GEOMETRIC_DISTANCE_UNIT}; runTimeProperties.setConfig(params, configuration, scope); } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/partitioner/Partitioner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.partitioner; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.io.Serializable; import java.util.Collection; import java.util.List; import org.apache.commons.codec.binary.Hex; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.Writable; import org.apache.hadoop.mapreduce.JobContext; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.param.ParameterEnum; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.ByteArrayUtils; /** * Provide a partition for a data item. * *

Multiple partitions are permitted. Only one partition is consider primary. A primary * partition is the partition for an item in which the item is processed on behalf of itself. All * other partitions are those partitions that require visibility to the a specific item for other * items to reference. This approach supports nearest neighbor type queries. Consider that an item * can only discover neighbors in its partition. However, the item can be discovered as a nearest * neighbor in those partitions in which the item participates as a none primary. * * @param */ public interface Partitioner extends Serializable { public void initialize(final JobContext context, final Class scope) throws IOException; public List getCubeIdentifiers(final T entry); public void partition(T entry, PartitionDataCallback callback) throws Exception; public Collection> getParameters(); public void setup( PropertyManagement runTimeProperties, Class scope, Configuration configuration); public static interface PartitionDataCallback { void partitionWith(PartitionData data) throws Exception; } /** * Represents a partition associated with a specific item. The partition is marked as primary or * secondary. A secondary partition is a neighboring partition to an item. The intent is inspect * neighbor partitions to handle edge cases. */ public static class PartitionData implements Serializable, Writable { /** */ private static final long serialVersionUID = 1L; private ByteArray partitionKey; private ByteArray sortKey; private ByteArray groupId = null; private boolean isPrimary; public ByteArray getPartitionKey() { return partitionKey; } public ByteArray getSortKey() { return sortKey; } public ByteArray getCompositeKey() { return new ByteArray( ByteArrayUtils.combineArrays(partitionKey.getBytes(), sortKey.getBytes())); } public ByteArray getGroupId() { return groupId; } public void setGroupId(final ByteArray groupId) { this.groupId = groupId; } public boolean isPrimary() { return isPrimary; } public PartitionData() {} public PartitionData( final ByteArray partitionKey, final ByteArray sortKey, final boolean primary) { super(); this.partitionKey = partitionKey; this.sortKey = sortKey; isPrimary = primary; } @Override public String toString() { return "PartitionData [partitionKey=" + Hex.encodeHexString(partitionKey.getBytes()) + ", sortKey=" + Hex.encodeHexString(sortKey.getBytes()) + ", groupId=" + (groupId == null ? "null" : groupId.getString()) + ", isPrimary=" + isPrimary + "]"; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((partitionKey == null) ? 0 : partitionKey.hashCode()); result = (prime * result) + ((sortKey == null) ? 0 : sortKey.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final PartitionData other = (PartitionData) obj; if (partitionKey == null) { if (other.partitionKey != null) { return false; } } else if (!partitionKey.equals(other.partitionKey)) { return false; } if (sortKey == null) { if (other.sortKey != null) { return false; } } else if (!sortKey.equals(other.sortKey)) { return false; } return true; } @Override public void readFields(final DataInput dInput) throws IOException { final int partitionKeySize = dInput.readInt(); final byte[] partitionKeyBytes = new byte[partitionKeySize]; dInput.readFully(partitionKeyBytes); partitionKey = new ByteArray(partitionKeyBytes); final int sortKeySize = dInput.readInt(); final byte[] sortKeyBytes = new byte[sortKeySize]; dInput.readFully(sortKeyBytes); sortKey = new ByteArray(sortKeyBytes); final int groupIdSize = dInput.readInt(); if (groupIdSize > 0) { final byte[] groupIdIdBytes = new byte[groupIdSize]; dInput.readFully(groupIdIdBytes); groupId = new ByteArray(groupIdIdBytes); } isPrimary = dInput.readBoolean(); } @Override public void write(final DataOutput dOutput) throws IOException { final byte[] outputPartitionKey = partitionKey.getBytes(); dOutput.writeInt(outputPartitionKey.length); dOutput.write(outputPartitionKey); final byte[] outputSortKey = sortKey.getBytes(); dOutput.writeInt(outputSortKey.length); dOutput.write(outputSortKey); if (groupId != null) { final byte[] groupOutputId = groupId.getBytes(); dOutput.writeInt(groupOutputId.length); dOutput.write(groupOutputId); } else { dOutput.writeInt(0); } dOutput.writeBoolean(isPrimary); } public void setPrimary(final boolean isPrimary) { this.isPrimary = isPrimary; } } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/sample/BahmanEtAlSampleProbabilityFn.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.sample; /** * l * d^2(y,C)/phi_x(C) y is some point, C is a set of centroids and l is an oversampling factor. * As documented in section 3.3 in * *

Bahmani, Kumar, Moseley, Vassilvitskii and Vattani. Scalable K-means++. VLDB Endowment Vol. * 5, No. 7. 2012. */ public class BahmanEtAlSampleProbabilityFn implements SampleProbabilityFn { @Override public double getProbability( final double weight, final double normalizingConstant, final int sampleSize) { return ((sampleSize) * weight) / normalizingConstant; } @Override public boolean requiresConstant() { return true; } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/sample/RandomProbabilitySampleFn.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.sample; import java.util.Random; public class RandomProbabilitySampleFn implements SampleProbabilityFn { final Random random = new Random(); @Override public double getProbability( final double weight, final double normalizingConstant, final int sampleSize) { // HP Fortify "Insecure Randomness" false positive // This random number is not used for any purpose // related to security or cryptography return Math.log(random.nextDouble()) / (weight / normalizingConstant); } @Override public boolean requiresConstant() { return false; } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/sample/SampleNotification.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.sample; public interface SampleNotification { public void notify(T item, boolean partial); } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/sample/SampleProbabilityFn.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.sample; public interface SampleProbabilityFn { public boolean requiresConstant(); public double getProbability(double weight, double normalizingConstant, int sampleSize); } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/sample/Sampler.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.sample; import java.util.Collection; import java.util.SortedMap; import org.locationtech.geowave.analytic.clustering.CentroidPairing; import com.google.common.collect.Maps; public class Sampler { private int sampleSize = 1; private int putLimit = 100000; private SampleProbabilityFn sampleProbabilityFn; public SampleProbabilityFn getSampleProbabilityFn() { return sampleProbabilityFn; } public void setSampleProbabilityFn(final SampleProbabilityFn sampleProbabilityFn) { this.sampleProbabilityFn = sampleProbabilityFn; } public int getSampleSize() { return sampleSize; } public void setSampleSize(final int sampleSize) { this.sampleSize = sampleSize; } public int getPutLimit() { return putLimit; } public void setPutLimit(final int putLimit) { this.putLimit = putLimit; } public void sample( final Iterable> pairings, final SampleNotification notification, final double normalizingConstant) { int putCounter = 0; final SortedMap reservoir = Maps.newTreeMap(); for (final CentroidPairing pairing : pairings) { final double weight = pairing.getDistance(); if (weight > 0.0) { final double score = sampleProbabilityFn.getProbability(weight, normalizingConstant, sampleSize); // could add extra to make sure new point is far enough away // from the rest if (reservoir.size() < sampleSize) { reservoir.put(score, pairing.getPairedItem().getWrappedItem()); putCounter++; } else if (score > reservoir.firstKey()) { reservoir.remove(reservoir.firstKey()); reservoir.put(score, pairing.getPairedItem().getWrappedItem()); } if (putCounter > putLimit) { // On the off-chance this gets huge, cleanup // Can occur if sampleSize > PUT_LIMIT notifyAll(notification, reservoir.values(), true); reservoir.clear(); putCounter = 0; } } } notifyAll(notification, reservoir.values(), false); } private void notifyAll( final SampleNotification notification, final Collection items, final boolean partial) { for (final T item : items) { notification.notify(item, partial); } } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/sample/function/CentroidDistanceBasedSamplingRankFunction.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.sample.function; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.JobContext; import org.locationtech.geowave.analytic.AnalyticItemWrapper; import org.locationtech.geowave.analytic.AnalyticItemWrapperFactory; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.ScopedJobConfiguration; import org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory; import org.locationtech.geowave.analytic.clustering.CentroidPairing; import org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment; import org.locationtech.geowave.analytic.distance.DistanceFn; import org.locationtech.geowave.analytic.kmeans.AssociationNotification; import org.locationtech.geowave.analytic.param.CentroidParameters; import org.locationtech.geowave.analytic.param.ParameterEnum; import org.locationtech.geowave.analytic.param.SampleParameters; import org.locationtech.geowave.analytic.sample.RandomProbabilitySampleFn; import org.locationtech.geowave.analytic.sample.SampleProbabilityFn; import org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Rank objects using their distance to the closest centroid of a set of centroids. The specific * rank is determined by the probability of the point meeting being a centroid, modeled in the * implementation of {@link SampleProbabilityFn}. * *

The farther the distance, the higher the rank. * * Properties: *

"CentroidDistanceBasedSamplingRankFunction.KMeansConfig.data_store_configuration" - The * class used to determine the prefix class name for te GeoWave Data Store parameters for a * connection to collect the starting set of centroids. Defaults to {@link * CentroidDistanceBasedSamplingRankFunction}. *

"CentroidDistanceBasedSamplingRankFunction.KMeansConfig.probability_function" - * implementation of {@link SampleProbabilityFn} *

"CentroidDistanceBasedSamplingRankFunction.KMeansConfig.distance_function" - {@link * DistanceFn} *

"CentroidDistanceBasedSamplingRankFunction.KMeansConfig.centroid_factory" - {@link * AnalyticItemWrapperFactory} to wrap the centroid data with the appropriate centroid wrapper * {@link AnalyticItemWrapper} * *

See {@link GeoWaveConfiguratorBase} for information for configuration GeoWave Data Store * for consumption of starting set of centroids. * @param The data type for the object being sampled */ public class CentroidDistanceBasedSamplingRankFunction implements SamplingRankFunction { protected static final Logger LOGGER = LoggerFactory.getLogger(CentroidDistanceBasedSamplingRankFunction.class); private SampleProbabilityFn sampleProbabilityFn; private NestedGroupCentroidAssignment nestedGroupCentroidAssigner; private final Map groupToConstant = new HashMap<>(); protected AnalyticItemWrapperFactory itemWrapperFactory;; public static void setParameters( final Configuration config, final Class scope, final PropertyManagement runTimeProperties) { NestedGroupCentroidAssignment.setParameters(config, scope, runTimeProperties); runTimeProperties.setConfig( new ParameterEnum[] { SampleParameters.Sample.PROBABILITY_FUNCTION, CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS,}, config, scope); } @SuppressWarnings("unchecked") @Override public void initialize(final JobContext context, final Class scope, final Logger logger) throws IOException { final ScopedJobConfiguration config = new ScopedJobConfiguration(context.getConfiguration(), scope); try { sampleProbabilityFn = config.getInstance( SampleParameters.Sample.PROBABILITY_FUNCTION, SampleProbabilityFn.class, RandomProbabilitySampleFn.class); } catch (final Exception e) { throw new IOException(e); } try { itemWrapperFactory = config.getInstance( CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS, AnalyticItemWrapperFactory.class, SimpleFeatureItemWrapperFactory.class); itemWrapperFactory.initialize(context, scope, logger); } catch (final Exception e1) { throw new IOException(e1); } try { nestedGroupCentroidAssigner = new NestedGroupCentroidAssignment<>(context, scope, logger); } catch (final Exception e1) { throw new IOException(e1); } } /** */ @Override public double rank(final int sampleSize, final T value) { final AnalyticItemWrapper item = itemWrapperFactory.create(value); final List> centroids = new ArrayList<>(); double weight; try { weight = nestedGroupCentroidAssigner.findCentroidForLevel(item, new AssociationNotification() { @Override public void notify(final CentroidPairing pairing) { try { centroids.addAll( nestedGroupCentroidAssigner.getCentroidsForGroup( pairing.getCentroid().getGroupID())); } catch (final IOException e) { throw new RuntimeException(e); } } }); } catch (final IOException e) { throw new RuntimeException(e); } return sampleProbabilityFn.getProbability( weight, getNormalizingConstant(centroids.get(0).getGroupID(), centroids), sampleSize); } private double getNormalizingConstant( final String groupID, final List> centroids) { if (!groupToConstant.containsKey(groupID)) { double constant = 0.0; for (final AnalyticItemWrapper centroid : centroids) { constant += centroid.getCost(); } groupToConstant.put(groupID, constant); } return groupToConstant.get(groupID).doubleValue(); } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/sample/function/RandomSamplingRankFunction.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.sample.function; import java.io.IOException; import java.util.Random; import org.apache.hadoop.mapreduce.JobContext; import org.slf4j.Logger; /** * Pick any object at random by assigning a random weight over a uniform distribution. * * @param */ public class RandomSamplingRankFunction implements SamplingRankFunction { private final Random random = new Random(); @Override public void initialize(final JobContext context, final Class scope, final Logger logger) throws IOException {} @Override public double rank(final int sampleSize, final T value) { // HP Fortify "Insecure Randomness" false positive // This random number is not used for any purpose // related to security or cryptography return random.nextDouble(); } } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/sample/function/SamplingRankFunction.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.sample.function; import java.io.IOException; import org.apache.hadoop.mapreduce.JobContext; import org.slf4j.Logger; /** * Used to rank an object for selection in the sample set. The top K highest ranked objects are * sampled. Rank is between 0.0 and 1.0 inclusive. */ public interface SamplingRankFunction { public void initialize(final JobContext context, Class scope, Logger logger) throws IOException; public double rank(final int sampleSize, T value); } ================================================ FILE: analytics/api/src/main/java/org/locationtech/geowave/analytic/store/PersistableStore.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.store; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import java.util.Properties; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; public class PersistableStore implements Persistable { // Using this here instead of raw DataStorePluginOptions, so we can // use the convenient methods private DataStorePluginOptions pluginOptions; public PersistableStore() {} public PersistableStore(final DataStorePluginOptions options) { pluginOptions = options; } public DataStorePluginOptions getDataStoreOptions() { return pluginOptions; } @Override public byte[] toBinary() { // Persist final Properties strOptions = new Properties(); pluginOptions.save(strOptions, null); final List strOptionsBinary = new ArrayList<>(strOptions.size()); int optionsLength = 0; for (final String key : strOptions.stringPropertyNames()) { final byte[] keyBinary = StringUtils.stringToBinary(key); final byte[] valueBinary = StringUtils.stringToBinary(strOptions.getProperty(key)); final int entryLength = keyBinary.length + valueBinary.length + VarintUtils.unsignedIntByteLength(keyBinary.length) + VarintUtils.unsignedIntByteLength(valueBinary.length); final ByteBuffer buf = ByteBuffer.allocate(entryLength); VarintUtils.writeUnsignedInt(keyBinary.length, buf); buf.put(keyBinary); VarintUtils.writeUnsignedInt(valueBinary.length, buf); buf.put(valueBinary); strOptionsBinary.add(buf.array()); optionsLength += entryLength; } optionsLength += VarintUtils.unsignedIntByteLength(strOptionsBinary.size()); final ByteBuffer buf = ByteBuffer.allocate(optionsLength); VarintUtils.writeUnsignedInt(strOptionsBinary.size(), buf); for (final byte[] strOption : strOptionsBinary) { buf.put(strOption); } return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int configOptionLength = VarintUtils.readUnsignedInt(buf); final Properties configOptions = new Properties(); for (int i = 0; i < configOptionLength; i++) { final int keyLength = VarintUtils.readUnsignedInt(buf); final byte[] keyBinary = new byte[keyLength]; buf.get(keyBinary); final int valueLength = VarintUtils.readUnsignedInt(buf); final byte[] valueBinary = new byte[valueLength]; buf.get(valueBinary); configOptions.put( StringUtils.stringFromBinary(keyBinary), StringUtils.stringFromBinary(valueBinary)); } pluginOptions = new DataStorePluginOptions(); pluginOptions.load(configOptions, null); } } ================================================ FILE: analytics/api/src/main/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi ================================================ org.locationtech.geowave.analytic.AnalyticPersistableRegistry ================================================ FILE: analytics/api/src/test/java/org/locationtech/geowave/analytic/AnalyticFeatureTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import org.geotools.feature.type.BasicFeatureTypes; import org.geotools.filter.text.cql2.CQLException; import org.geotools.filter.text.ecql.ECQL; import org.junit.Test; import org.locationtech.geowave.analytic.AnalyticFeature.ClusterFeatureAttribute; import org.locationtech.geowave.analytic.clustering.ClusteringUtils; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.io.ParseException; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.filter.Filter; import org.opengis.geometry.MismatchedDimensionException; import org.opengis.referencing.FactoryException; import org.opengis.referencing.NoSuchAuthorityCodeException; public class AnalyticFeatureTest { @Test public void testGeometryCreation() throws MismatchedDimensionException, NoSuchAuthorityCodeException, FactoryException, CQLException, ParseException { final SimpleFeatureType ftype = AnalyticFeature.createGeometryFeatureAdapter( "centroid", new String[] {"extra1"}, BasicFeatureTypes.DEFAULT_NAMESPACE, ClusteringUtils.CLUSTERING_CRS).getFeatureType(); final GeometryFactory factory = new GeometryFactory(); SimpleFeature feature = AnalyticFeature.createGeometryFeature( ftype, "b1", "123", "fred", "NA", 20.30203, factory.createPoint(new Coordinate(02.33, 0.23)), new String[] {"extra1"}, new double[] {0.022}, 1, 1, 0); assertEquals( new Coordinate(02.33, 0.23), ((Geometry) feature.getDefaultGeometry()).getCoordinate()); System.out.println(((Geometry) feature.getDefaultGeometry()).getPrecisionModel()); System.out.println(((Geometry) feature.getDefaultGeometry()).getEnvelope()); feature = AnalyticFeature.createGeometryFeature( ftype, "b1", "123", "fred", "NA", 20.30203, factory.createPoint(new Coordinate(02.33, 0.23)), new String[] {"extra1"}, new double[] {0.022}, 10, 1, 0); assertEquals( new Coordinate(02.33, 0.23), ((Geometry) feature.getDefaultGeometry()).getCoordinate()); assertEquals( "geometry", feature.getFeatureType().getGeometryDescriptor().getName().getLocalPart()); assertEquals( new Integer(10), feature.getAttribute(ClusterFeatureAttribute.ZOOM_LEVEL.attrName())); Filter gtFilter = ECQL.toFilter("BBOX(geometry,2,0,3,1) and level = 10"); assertTrue(gtFilter.evaluate(feature)); gtFilter = ECQL.toFilter("BBOX(geometry,2,0,3,1) and level = 9"); assertFalse(gtFilter.evaluate(feature)); gtFilter = ECQL.toFilter("BBOX(geometry,2,0,3,1) and batchID = 'b1'"); assertTrue(gtFilter.evaluate(feature)); } } ================================================ FILE: analytics/api/src/test/java/org/locationtech/geowave/analytic/GeometryCalculationsTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.util.List; import org.geotools.geometry.jts.JTS; import org.geotools.referencing.CRS; import org.junit.Test; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; import org.opengis.referencing.FactoryException; import org.opengis.referencing.NoSuchAuthorityCodeException; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.opengis.referencing.operation.TransformException; import tech.units.indriya.unit.Units; public class GeometryCalculationsTest { @Test public void test() throws NoSuchAuthorityCodeException, FactoryException, TransformException { final CoordinateReferenceSystem crs = CRS.decode("EPSG:4326", true); final GeometryCalculations calculator = new GeometryCalculations(crs); List geos = calculator.buildSurroundingGeometries( new double[] {50000, 50000}, Units.METRE, new Coordinate(30, 30)); assertEquals(1, geos.size()); Geometry geo = geos.get(0); double lastDist = Double.NaN; Coordinate lastCoord = null; for (final Coordinate coord : geo.getCoordinates()) { if (lastCoord != null) { final double dist = JTS.orthodromicDistance(lastCoord, coord, crs); // scaling on the globe...so not perfect square assertEquals(Math.abs(dist), 100000, 500); } final double dist = JTS.orthodromicDistance(geo.getCentroid().getCoordinate(), coord, crs); // distances are roughly even to all corners if (!Double.isNaN(lastDist)) { assertTrue(Math.abs(dist - lastDist) < 200); } lastDist = dist; lastCoord = coord; } Envelope envelope = geo.getEnvelopeInternal(); assertTrue(envelope.getMaxX() > 30); assertTrue(envelope.getMinX() < 30); assertTrue(envelope.getMaxY() > 30); assertTrue(envelope.getMinX() < 30); geos = calculator.buildSurroundingGeometries( new double[] {100000, 100000}, Units.METRE, new Coordinate(179.9999999996, 0)); assertEquals(2, geos.size()); geo = geos.get(0); envelope = geo.getEnvelopeInternal(); assertTrue((envelope.getMaxX() < -179) && (envelope.getMaxX() > -180)); assertEquals(-180.0, envelope.getMinX(), 0.0000001); geo = geos.get(1); envelope = geo.getEnvelopeInternal(); assertTrue((envelope.getMinX() < 180) && (envelope.getMinX() > 179)); assertEquals(180.0, envelope.getMaxX(), 0.0000001); } } ================================================ FILE: analytics/api/src/test/java/org/locationtech/geowave/analytic/GeometryDataSetGeneratorTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic; import static org.junit.Assert.assertEquals; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.feature.simple.SimpleFeatureTypeBuilder; import org.geotools.referencing.crs.DefaultGeographicCRS; import org.junit.Test; import org.locationtech.geowave.analytic.distance.FeatureCentroidDistanceFn; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; public class GeometryDataSetGeneratorTest { private SimpleFeatureBuilder getBuilder() { final SimpleFeatureTypeBuilder typeBuilder = new SimpleFeatureTypeBuilder(); typeBuilder.setName("test"); typeBuilder.setCRS(DefaultGeographicCRS.WGS84); // <- Coordinate // reference // add attributes in order typeBuilder.add("geom", Geometry.class); typeBuilder.add("name", String.class); typeBuilder.add("count", Long.class); // build the type return new SimpleFeatureBuilder(typeBuilder.buildFeatureType()); } @Test public void test() { final GeometryDataSetGenerator dataGenerator = new GeometryDataSetGenerator(new FeatureCentroidDistanceFn(), getBuilder()); final Geometry region = dataGenerator.getBoundingRegion(); final Coordinate[] coordinates = region.getBoundary().getCoordinates(); assertEquals(5, coordinates.length); assertEquals("POLYGON ((-180 -90, 180 -90, 180 90, -180 90, -180 -90))", region.toString()); } } ================================================ FILE: analytics/api/src/test/java/org/locationtech/geowave/analytic/GeometryGenerator.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic; import java.util.Iterator; import java.util.List; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.CoordinateList; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; public class GeometryGenerator { public static interface DistortationFn { double distort(); } /** * @param count * @param distanceactors * @param distortationFn * @param delta * @param env * @return */ public static Iterator generate( final int count, final List distanceactors, final DistortationFn distortationFn, final double delta, final Envelope env) { // Create the star-ellipses for intersections later on return new Iterator() { int currentCount = 0; GeometryFactory geometryFactory = new GeometryFactory(); @Override public boolean hasNext() { return currentCount < count; } @Override public Geometry next() { // Thanks to Chris Bennight for the foundations of this code. currentCount++; final double cx = env.centre().x * distortationFn.distort(); final double cy = env.centre().y * distortationFn.distort(); final double dx = env.getWidth() * distortationFn.distort(); final double dy = env.getHeight() * distortationFn.distort(); // We will use a coordinate list to build the linear ring final CoordinateList clist = new CoordinateList(); double angle = 0.0; for (int i = 0; angle < 360; angle += (delta * distortationFn.distort()) + delta, i++) { final double a = distanceactors.get(i % distanceactors.size()) * dx * distortationFn.distort(); // double b = distanceactors.get(i % distanceactors.size()) // * dy * distortationFn.distort(); clist.add( new Coordinate( cx + (a * Math.sin(Math.toRadians(angle))), cy + (a * Math.cos(Math.toRadians(angle))))); } clist.add(clist.get(0)); return geometryFactory.createPolygon(clist.toCoordinateArray()); } @Override public void remove() {} }; } } ================================================ FILE: analytics/api/src/test/java/org/locationtech/geowave/analytic/GeometryHullToolTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.io.File; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Random; import org.junit.Test; import org.locationtech.geowave.analytic.GeometryDataSetGenerator.CurvedDensityDataGeneratorTool; import org.locationtech.geowave.analytic.GeometryGenerator.DistortationFn; import org.locationtech.geowave.analytic.distance.CoordinateCircleDistanceFn; import org.locationtech.jts.algorithm.ConvexHull; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.LineString; import org.locationtech.jts.geom.Point; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class GeometryHullToolTest { protected static final Logger LOGGER = LoggerFactory.getLogger(GeometryHullToolTest.class); GeometryFactory factory = new GeometryFactory(); @Test public void testDistance() { final double distance1 = GeometryHullTool.calcDistance( new Coordinate(3, 3), new Coordinate(6, 6), new Coordinate(5, 5.5)); final double distance2 = GeometryHullTool.calcDistance( new Coordinate(3, 3), new Coordinate(6, 6), new Coordinate(5, 4.5)); assertEquals(distance1, distance2, 0.0001); final double distance3 = GeometryHullTool.calcDistance( new Coordinate(4, 6), new Coordinate(6, 12), new Coordinate(5, 8)); assertTrue(distance3 > 0); final double distance4 = GeometryHullTool.calcDistance( new Coordinate(4, 6), new Coordinate(6, 12), new Coordinate(5, 9)); assertEquals(0.0, distance4, 0.001); final double distance5 = GeometryHullTool.calcDistance( new Coordinate(5, 7), new Coordinate(11, 3), new Coordinate(6, 10)); assertTrue(distance5 < 0); final double distance6 = GeometryHullTool.calcDistance( new Coordinate(5, 7), new Coordinate(11, 3), new Coordinate(7, 6.5)); final double distance7 = GeometryHullTool.calcDistance( new Coordinate(5, 7), new Coordinate(11, 3), new Coordinate(7, 5.0)); assertTrue(distance7 < distance6); } @Test public void testAngles() { assertTrue( GeometryHullTool.calcAngle( new Coordinate(39, 41.5), new Coordinate(41, 41), new Coordinate(38, 41.2)) > 0); assertTrue( GeometryHullTool.calcAngle( new Coordinate(39, 41.5), new Coordinate(41, 41), new Coordinate(38, 43)) < 0); assertTrue( GeometryHullTool.calcAngle( new Coordinate(39, 41.5), new Coordinate(41, 41), new Coordinate(38, 41.2)) < GeometryHullTool.calcAngle( new Coordinate(39, 41.5), new Coordinate(41, 41), new Coordinate(38, 41.1))); assertTrue( GeometryHullTool.calcAngle( new Coordinate(39, 41.5), new Coordinate(41, 41), new Coordinate(38, 43)) > GeometryHullTool.calcAngle( new Coordinate(39, 41.5), new Coordinate(41, 41), new Coordinate(38, 44))); assertTrue( GeometryHullTool.calcAngle( new Coordinate(42, 42), new Coordinate(41, 41), new Coordinate(42.5, 44)) > 0); assertTrue( GeometryHullTool.calcAngle( new Coordinate(42, 42), new Coordinate(41, 41), new Coordinate(42.5, 40.5)) < 0); assertEquals( -90.0, GeometryHullTool.calcAngle( new Coordinate(41, 42), new Coordinate(41, 41), new Coordinate(42, 41)), 0.001); assertEquals( 90.0, GeometryHullTool.calcAngle( new Coordinate(42, 41), new Coordinate(41, 41), new Coordinate(41, 42)), 0.001); assertEquals( -180, GeometryHullTool.calcAngle( new Coordinate(42, 42), new Coordinate(41, 41), new Coordinate(40, 40)), 0.001); assertEquals( 0, GeometryHullTool.calcAngle( new Coordinate(42, 42), new Coordinate(41, 41), new Coordinate(42, 42)), 0.001); assertEquals( -315, GeometryHullTool.calcAngle( new Coordinate(41, 41), new Coordinate(42, 41), new Coordinate(41, 40)), 0.001); assertEquals( -45, GeometryHullTool.calcAngle( new Coordinate(42, 41), new Coordinate(41, 41), new Coordinate(42, 40)), 0.001); assertEquals( -45, GeometryHullTool.calcAngle( new Coordinate(41, 42), new Coordinate(41, 41), new Coordinate(42, 42)), 0.001); } @Test public void testConcaveHullBulkTest() { long time = System.currentTimeMillis(); for (int i = 0; i < 10; i++) { assertTrue( getHull( factory.createLineString( new Coordinate[] {new Coordinate(41.2, 40.8), new Coordinate(40.8, 40.6)}), "po1", false, true).isSimple() || true); } System.out.println(System.currentTimeMillis() - time); time = System.currentTimeMillis(); for (int i = 0; i < 10; i++) { assertTrue( getHull( factory.createLineString( new Coordinate[] {new Coordinate(41.2, 40.8), new Coordinate(40.8, 40.6)}), "er1", false, false).isSimple() || true); } System.out.println(System.currentTimeMillis() - time); } private final Random r = new Random(7777); private Coordinate pickOneAndAugmentOne(final Coordinate[] list) { final Coordinate select = list[(Math.abs(r.nextInt()) % list.length)]; return new Coordinate(select.x + r.nextGaussian(), select.y + r.nextGaussian(), select.z); } final Coordinate[] poly1 = new Coordinate[] { new Coordinate(40, 40), new Coordinate(40.1, 40.1), new Coordinate(39.2, 41.2), // selected top (2) new Coordinate(39, 40.7), new Coordinate(38.7, 40.1), new Coordinate(38.4, 39.5), new Coordinate( // selected bottom (6) 39.3, 39.2), new Coordinate(40, 40)}; final Coordinate[] poly2 = new Coordinate[] { new Coordinate(40.2, 40), new Coordinate(40.5, 41), // selected // top // (1) new Coordinate(41.2, 40.8), new Coordinate(40.8, 40.6), new Coordinate(40.6, 39.6), new Coordinate(40.3, 39.8), // selected // bottom(5) new Coordinate(40.2, 40)}; @Test public void testLRPolygons() { final Geometry leftShape = factory.createPolygon(poly1); final Geometry rightShape = factory.createPolygon(poly2); assertTrue(GeometryHullTool.clockwise(leftShape.getCoordinates())); assertFalse(GeometryHullTool.clockwise(rightShape.getCoordinates())); final GeometryHullTool cg = new GeometryHullTool(); cg.setDistanceFnForCoordinate(new CoordinateCircleDistanceFn()); final Geometry geo = cg.connect(leftShape, rightShape); assertEquals( "POLYGON ((39.2 41.2, 39 40.7, 38.7 40.1, 38.4 39.5, 39.3 39.2, 40.6 39.6, 40.8 40.6, 41.2 40.8, 40.5 41, 39.2 41.2))", geo.toString()); } @Test public void testRLPolygons() { final Geometry leftShape = factory.createPolygon(poly2); final Geometry rightShape = factory.createPolygon(poly1); assertFalse(GeometryHullTool.clockwise(leftShape.getCoordinates())); assertTrue(GeometryHullTool.clockwise(rightShape.getCoordinates())); final GeometryHullTool cg = new GeometryHullTool(); cg.setDistanceFnForCoordinate(new CoordinateCircleDistanceFn()); final Geometry geo = cg.connect(leftShape, rightShape); assertEquals( "POLYGON ((39.2 41.2, 39 40.7, 38.7 40.1, 38.4 39.5, 39.3 39.2, 40.6 39.6, 40.8 40.6, 41.2 40.8, 40.5 41, 39.2 41.2))", geo.toString()); } public void testRandomConnect() throws IOException { final GeometryHullTool cg = new GeometryHullTool(); cg.setDistanceFnForCoordinate(new CoordinateCircleDistanceFn()); final Iterator it1 = GeometryGenerator.generate(1000, Arrays.asList(1.0), new DistortationFn() { final Random r = new Random(7777); @Override public double distort() { return 0.5 + (0.5 * r.nextDouble()); } }, 5, new Envelope(45, 55, 35, 45)); final Iterator it2 = GeometryGenerator.generate(1000, Arrays.asList(1.0), new DistortationFn() { final Random r = new Random(7777); @Override public double distort() { return 0.5 + (0.5 * r.nextDouble()); } }, 5, new Envelope(30, 47, 20, 37)); while (it1.hasNext()) { Geometry rightShape = it1.next(); Geometry leftShape = it2.next(); if (rightShape.intersects(leftShape)) { final Geometry inter = rightShape.intersection(leftShape); rightShape = rightShape.difference(inter); leftShape = leftShape.difference(inter); } ShapefileTool.writeShape( "test_random", new File("./target/test_randoms"), new Geometry[] {leftShape, rightShape}); Geometry geo = cg.connect(leftShape, rightShape); ShapefileTool.writeShape( "test_random", new File("./target/test_random"), new Geometry[] {geo}); if (!geo.isSimple()) { // assertTrue(false); geo = cg.connect(leftShape, rightShape); ShapefileTool.writeShape( "test_random2", new File("./target/test_random2"), new Geometry[] {geo}); } } } private Coordinate[] reversed(final Coordinate[] poly) { final Coordinate polyReversed[] = new Coordinate[poly.length]; for (int i = 0; i < poly.length; i++) { polyReversed[i] = poly[poly.length - i - 1]; } return polyReversed; } @Test public void interesectEdges() { final GeometryHullTool.Edge e1 = new GeometryHullTool.Edge(new Coordinate(20.0, 20.0), new Coordinate(21.5, 21), 0); final GeometryHullTool.Edge e2 = new GeometryHullTool.Edge(new Coordinate(20.4, 19.0), new Coordinate(21.0, 22), 0); assertTrue(GeometryHullTool.edgesIntersect(e1, e2)); final GeometryHullTool.Edge e3 = new GeometryHullTool.Edge(new Coordinate(20.4, 19.0), new Coordinate(21.0, 19.5), 0); assertTrue(!GeometryHullTool.edgesIntersect(e1, e3)); } @Test public void testRLSamePolygons() { final Geometry leftShape = factory.createPolygon(reversed(poly1)); final Geometry rightShape = factory.createPolygon(reversed(poly2)); assertFalse(GeometryHullTool.clockwise(leftShape.getCoordinates())); assertTrue(GeometryHullTool.clockwise(rightShape.getCoordinates())); final GeometryHullTool cg = new GeometryHullTool(); cg.setDistanceFnForCoordinate(new CoordinateCircleDistanceFn()); final Geometry geo = cg.connect(leftShape, rightShape); assertEquals( "POLYGON ((39.2 41.2, 39 40.7, 38.7 40.1, 38.4 39.5, 39.3 39.2, 40.6 39.6, 40.8 40.6, 41.2 40.8, 40.5 41, 39.2 41.2))", geo.toString()); } @Test public void testPolygonConnection() { final boolean save = true; final Geometry concave1 = getHull( factory.createLineString( new Coordinate[] {new Coordinate(41.2, 40.8), new Coordinate(40.8, 40.6)}), "p1", save, false); final Geometry concave2 = getHull( factory.createLineString( new Coordinate[] {new Coordinate(39.9, 40.6), new Coordinate(40.8, 40.6)}), "p2", save, false); final Geometry concave3 = getHull( factory.createLineString( new Coordinate[] {new Coordinate(42.0, 42.0), new Coordinate(41.2, 40.8)}), "p3", save, false); final Geometry hull = concave1.union(concave2).union(concave3); assertTrue(hull.isSimple()); writeToShapeFile("final_phull", hull); coversPoints(hull, concave1); coversPoints(hull, concave2); coversPoints(hull, concave3); } private Geometry getHull( final LineString str, final String name, final boolean save, final boolean parkandOh) { final List points = CurvedDensityDataGeneratorTool.generatePoints(str, 0.4, 1000); final GeometryHullTool cg = new GeometryHullTool(); cg.setDistanceFnForCoordinate(new CoordinateCircleDistanceFn()); final Coordinate[] coordinates = new Coordinate[points.size()]; int i = 0; for (final Point point : points) { coordinates[i++] = point.getCoordinate(); } final ConvexHull convexHull = new ConvexHull(coordinates, factory); final Geometry concaveHull = parkandOh ? cg.concaveHullParkOhMethod(convexHull.getConvexHull(), Arrays.asList(coordinates)) : cg.concaveHull(convexHull.getConvexHull(), Arrays.asList(coordinates)); if (save || !concaveHull.isSimple()) { writeToShapeFile("setx_" + name, points.toArray(new Geometry[points.size()])); writeToShapeFile("chullx_" + name, concaveHull); writeToShapeFile("hullx_" + name, convexHull.getConvexHull()); } // final Geometry concaveHull1 = cg.concaveHull1( // convexHull.getConvexHull(), // Arrays.asList(coordinates)); // if (save || !concaveHull1.isSimple()) { // writeToShapeFile( // "chull_" + name, // concaveHull1); // } return concaveHull; } private static void writeToShapeFile(final String name, final Geometry... geos) { if (true) { // LOGGER.isDebugEnabled()) { try { ShapefileTool.writeShape(name, new File("./target/test_" + name), geos); } catch (final IOException e) { e.printStackTrace(); } } } private static boolean coversPoints(final Geometry coverer, final Geometry pointsToCover) { for (final Coordinate coordinate : pointsToCover.getCoordinates()) { if (!coverer.covers(coverer.getFactory().createPoint(coordinate))) { return false; } } return true; } @Test public void testCreateHullFromGeometry() { final GeometryHullTool cg = new GeometryHullTool(); cg.setDistanceFnForCoordinate(new CoordinateCircleDistanceFn()); for (int i = 2; i < 10; i++) { final Coordinate[] coords = new Coordinate[i]; for (int p = 0; p < i; p++) { coords[p] = new Coordinate(p, p); } final Geometry lineString1 = factory.createLineString(coords); final Geometry concaveHull1 = cg.createHullFromGeometry(lineString1, Arrays.asList(coords[0]), true); assertEquals( "straigh line size=" + i + " geo=" + lineString1.toText(), 2, concaveHull1.getCoordinates().length); } final Geometry lineString3 = factory.createLineString( new Coordinate[] {new Coordinate(1, 1), new Coordinate(2, 2), new Coordinate(3, 1)}); final Geometry concaveHull3 = cg.createHullFromGeometry( lineString3, Arrays.asList(lineString3.getCoordinates()[0]), true); assertEquals( "expecting a triangle " + concaveHull3.toText(), 4, concaveHull3.getCoordinates().length); assertTrue("expecting a triangle " + concaveHull3.toText(), concaveHull3.getArea() > 0.0); assertTrue(concaveHull3.isSimple()); assertEquals( "expecting identical result", lineString3, cg.createHullFromGeometry(lineString3, Collections.emptyList(), true)); final Geometry[] newPoints = new Geometry[900]; for (int j = 0; j < 10; j++) { final Coordinate[] newCoords = new Coordinate[900]; final Coordinate[] geoCoords = new Coordinate[100]; final Random rand = new Random(73634 + j); for (int i = 0; i < 100; i++) { geoCoords[i] = new Coordinate(rand.nextGaussian() * 0.001, rand.nextGaussian() * 0.001); } for (int i = 0; i < 900; i++) { newCoords[i] = new Coordinate(rand.nextGaussian() * 0.01, rand.nextGaussian() * 0.01); newPoints[i] = factory.createPoint(newCoords[i]); } final ConvexHull hull = new ConvexHull(geoCoords, factory); final Geometry concaveHull = cg.createHullFromGeometry(hull.getConvexHull(), Arrays.asList(newCoords), true); assertTrue(concaveHull.isSimple()); int error = 0; for (final Geometry newPoint : newPoints) { error += concaveHull.intersects(newPoint) ? 0 : 1; } assertTrue(error < 3); final Geometry concaveHull2 = cg.createHullFromGeometry(hull.getConvexHull(), Arrays.asList(newCoords), false); assertTrue(concaveHull2.isSimple()); error = 0; for (final Geometry newPoint : newPoints) { error += concaveHull2.intersects(newPoint) ? 0 : 1; } assertTrue(error < 1); } } } ================================================ FILE: analytics/api/src/test/java/org/locationtech/geowave/analytic/PropertyManagementTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.io.ByteArrayInputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import org.apache.commons.io.output.ByteArrayOutputStream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.JobContext; import org.junit.Test; import org.locationtech.geowave.analytic.extract.EmptyDimensionExtractor; import org.locationtech.geowave.analytic.param.BasicParameterHelper; import org.locationtech.geowave.analytic.param.ExtractParameters; import org.locationtech.geowave.analytic.param.InputParameters.Input; import org.locationtech.geowave.analytic.param.ParameterEnum; import org.locationtech.geowave.analytic.param.ParameterHelper; import org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialQuery; import org.locationtech.geowave.core.store.api.Query; import org.locationtech.geowave.core.store.api.QueryBuilder; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; public class PropertyManagementTest { final GeometryFactory factory = new GeometryFactory(); @Test public void testBulk() throws Exception { final PropertyManagement pm = new PropertyManagement(); pm.storeAll( new ParameterEnum[] {ExtractParameters.Extract.DATA_NAMESPACE_URI}, new Serializable[] {"file:///foo"}); } @Test public void testInt() throws Exception { final PropertyManagement pm = new PropertyManagement(); pm.storeAll( new ParameterEnum[] {ExtractParameters.Extract.MAX_INPUT_SPLIT}, new Serializable[] {"3"}); assertEquals(new Integer(3), pm.getProperty(ExtractParameters.Extract.MAX_INPUT_SPLIT)); } @Test public void testClass() throws Exception { final PropertyManagement pm = new PropertyManagement(); pm.storeAll( new ParameterEnum[] {ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS}, new Serializable[] {"org.locationtech.geowave.analytic.extract.EmptyDimensionExtractor"}); assertEquals( EmptyDimensionExtractor.class, pm.getPropertyAsClass(ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS)); ((ParameterEnum) ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS).getHelper().setValue( pm, "org.locationtech.geowave.analytic.extract.EmptyDimensionExtractor"); assertEquals( EmptyDimensionExtractor.class, pm.getProperty(ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS)); } @Test(expected = IllegalArgumentException.class) public void testClassFailure() { final PropertyManagement pm = new PropertyManagement(); pm.storeAll( new ParameterEnum[] {ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS}, new Serializable[] { "org.locationtech.geowave.analytic.distance.CoordinateCircleDistanceFn"}); pm.getPropertyAsClass(ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS); } @Test public void testQuery() throws Exception { final Geometry testGeoFilter = factory.createPolygon( new Coordinate[] { new Coordinate(24, 33), new Coordinate(28, 33), new Coordinate(28, 31), new Coordinate(24, 31), new Coordinate(24, 33)}); final ExplicitSpatialQuery sq = new ExplicitSpatialQuery(testGeoFilter); final PropertyManagement pm = new PropertyManagement(); pm.store(ExtractParameters.Extract.QUERY, QueryBuilder.newBuilder().constraints(sq).build()); final Query q = pm.getPropertyAsQuery(ExtractParameters.Extract.QUERY); assertNotNull(q); final QueryConstraints c = q.getQueryConstraints(); assertNotNull(c); assertNotNull(((ExplicitSpatialQuery) c).getQueryGeometry()); assertEquals( "POLYGON ((24 33, 28 33, 28 31, 24 31, 24 33))", ((ExplicitSpatialQuery) c).getQueryGeometry().toText()); pm.store(ExtractParameters.Extract.QUERY, q); final Query q1 = (Query) pm.getPropertyAsPersistable(ExtractParameters.Extract.QUERY); assertNotNull(q1); final QueryConstraints c1 = q1.getQueryConstraints(); assertNotNull(c1); assertNotNull(((ExplicitSpatialQuery) c1).getQueryGeometry()); assertEquals( "POLYGON ((24 33, 28 33, 28 31, 24 31, 24 33))", ((ExplicitSpatialQuery) c1).getQueryGeometry().toText()); } @Test public void testPath() throws Exception { final PropertyManagement pm = new PropertyManagement(); final Path path1 = new Path("http://java.sun.com/j2se/1.3/foo"); pm.store(Input.HDFS_INPUT_PATH, path1); final Path path2 = pm.getPropertyAsPath(Input.HDFS_INPUT_PATH); assertEquals(path1, path2); pm.store(Input.HDFS_INPUT_PATH, "x/y/z"); assertEquals(new Path("x/y/z"), pm.getPropertyAsPath(Input.HDFS_INPUT_PATH)); } public static class NonSerializableExample { int v = 1; } enum MyLocalNSEnum implements ParameterEnum { ARG1; @Override public Enum self() { return this; } @Override public ParameterHelper getHelper() { return new ParameterHelper() { /** */ private static final long serialVersionUID = 1L; @Override public Class getBaseClass() { return NonSerializableExample.class; } @Override public void setValue( final Configuration config, final Class scope, final NonSerializableExample value) {} @Override public NonSerializableExample getValue( final JobContext context, final Class scope, final NonSerializableExample defaultValue) { return null; } @Override public NonSerializableExample getValue(final PropertyManagement propertyManagement) { return null; } @Override public void setValue( final PropertyManagement propertyManagement, final NonSerializableExample value) {} }; } } @Test public void testOtherConverter() throws Exception { final PropertyManagement.PropertyConverter converter = new PropertyManagement.PropertyConverter() { /** */ private static final long serialVersionUID = 1L; @Override public Serializable convert(final NonSerializableExample ob) throws Exception { return Integer.valueOf(1); } @Override public NonSerializableExample convert(final Serializable ob) throws Exception { assertTrue(ob instanceof Integer); return new NonSerializableExample(); } @Override public Class baseClass() { return NonSerializableExample.class; } }; final PropertyManagement pm = new PropertyManagement( new PropertyManagement.PropertyConverter[] {converter}, new ParameterEnum[] {MyLocalNSEnum.ARG1}, new Object[] {new NonSerializableExample()}); assertTrue(pm.getProperty(MyLocalNSEnum.ARG1, converter) instanceof NonSerializableExample); } @Test public void testStore() throws Exception { final PropertyManagement pm = new PropertyManagement(); pm.store( ExtractParameters.Extract.QUERY, QueryBuilder.newBuilder().addTypeName("adapterId").indexName("indexId").build()); assertEquals( QueryBuilder.newBuilder().addTypeName("adapterId").indexName("indexId").build(), pm.getPropertyAsQuery(ExtractParameters.Extract.QUERY)); final Path path1 = new Path("http://java.sun.com/j2se/1.3/foo"); pm.store(Input.HDFS_INPUT_PATH, path1); final ByteArrayOutputStream bos = new ByteArrayOutputStream(); try (ObjectOutputStream os = new ObjectOutputStream(bos)) { os.writeObject(pm); } final ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray()); try (ObjectInputStream is = new ObjectInputStream(bis)) { final PropertyManagement pm2 = (PropertyManagement) is.readObject(); assertEquals( QueryBuilder.newBuilder().addTypeName("adapterId").indexName("indexId").build(), pm2.getPropertyAsQuery(ExtractParameters.Extract.QUERY)); assertEquals(path1, pm2.getPropertyAsPath(Input.HDFS_INPUT_PATH)); } } enum MyLocalBoolEnum implements ParameterEnum { BOOLEAN_ARG1(Boolean.class, "mi", "test id", false), BOOLEAN_ARG2(Boolean.class, "rd", "test id", false); private final ParameterHelper helper; MyLocalBoolEnum( final Class baseClass, final String name, final String description, final boolean hasArg) { helper = new BasicParameterHelper(this, baseClass, name, description, false, hasArg); } @Override public Enum self() { return this; } @Override public ParameterHelper getHelper() { return helper; } } @Test public void testStoreWithEmbedded() throws Exception { final PropertyManagement pm1 = new PropertyManagement(); pm1.store( ExtractParameters.Extract.QUERY, QueryBuilder.newBuilder().addTypeName("adapterId").indexName("indexId").build()); final PropertyManagement pm2 = new PropertyManagement(pm1); assertEquals( QueryBuilder.newBuilder().addTypeName("adapterId").indexName("indexId").build(), pm2.getPropertyAsQuery(ExtractParameters.Extract.QUERY)); final Path path1 = new Path("http://java.sun.com/j2se/1.3/foo"); pm2.store(Input.HDFS_INPUT_PATH, path1); final ByteArrayOutputStream bos = new ByteArrayOutputStream(); try (ObjectOutputStream os = new ObjectOutputStream(bos)) { os.writeObject(pm2); } final ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray()); try (ObjectInputStream is = new ObjectInputStream(bis)) { final PropertyManagement pm3 = (PropertyManagement) is.readObject(); assertEquals( QueryBuilder.newBuilder().addTypeName("adapterId").indexName("indexId").build(), pm2.getPropertyAsQuery(ExtractParameters.Extract.QUERY)); assertEquals(path1, pm3.getPropertyAsPath(Input.HDFS_INPUT_PATH)); } } } ================================================ FILE: analytics/api/src/test/java/org/locationtech/geowave/analytic/SerializableAdapterStoreTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic; import static org.junit.Assert.assertNotNull; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import org.geotools.feature.type.BasicFeatureTypes; import org.junit.Test; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.analytic.clustering.ClusteringUtils; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.memory.MemoryAdapterStore; import org.opengis.feature.simple.SimpleFeatureType; public class SerializableAdapterStoreTest { @Test public void testSerialization() throws ClassNotFoundException, IOException { final SimpleFeatureType ftype = AnalyticFeature.createGeometryFeatureAdapter( "centroid", new String[] {"extra1"}, BasicFeatureTypes.DEFAULT_NAMESPACE, ClusteringUtils.CLUSTERING_CRS).getFeatureType(); final FeatureDataAdapter adapter = new FeatureDataAdapter(ftype); final SerializableAdapterStore store = new SerializableAdapterStore(new MemoryAdapterStore(new DataTypeAdapter[] {adapter})); final String id = "centroid"; assertNotNull(checkSerialization(store).getAdapter(id)); } private SerializableAdapterStore checkSerialization(final SerializableAdapterStore store) throws IOException, ClassNotFoundException { final ByteArrayOutputStream bos = new ByteArrayOutputStream(); try (ObjectOutputStream os = new ObjectOutputStream(bos)) { os.writeObject(store); os.flush(); } final ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray()); try (ObjectInputStream is = new ObjectInputStream(bis)) { return (SerializableAdapterStore) is.readObject(); } } } ================================================ FILE: analytics/api/src/test/java/org/locationtech/geowave/analytic/SimpleFeatureCentroidExractorTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic; import static org.junit.Assert.assertEquals; import java.util.List; import java.util.UUID; import org.geotools.data.DataUtilities; import org.geotools.feature.SchemaException; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.junit.Test; import org.locationtech.geowave.analytic.extract.SimpleFeatureCentroidExtractor; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.Point; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.AttributeDescriptor; public class SimpleFeatureCentroidExractorTest { SimpleFeatureCentroidExtractor extractor = new SimpleFeatureCentroidExtractor(); @Test public void test() throws SchemaException { final SimpleFeatureType schema = DataUtilities.createType("testGeo", "location:Point:srid=4326,name:String"); final List descriptors = schema.getAttributeDescriptors(); final Object[] defaults = new Object[descriptors.size()]; int p = 0; for (final AttributeDescriptor descriptor : descriptors) { defaults[p++] = descriptor.getDefaultValue(); } final SimpleFeature feature = SimpleFeatureBuilder.build(schema, defaults, UUID.randomUUID().toString()); final GeometryFactory geoFactory = new GeometryFactory(); feature.setAttribute("location", geoFactory.createPoint(new Coordinate(-45, 45))); final Point point = extractor.getCentroid(feature); assertEquals(4326, point.getSRID()); } } ================================================ FILE: analytics/api/src/test/java/org/locationtech/geowave/analytic/clustering/CentroidManagerTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.clustering; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.List; import org.geotools.feature.type.BasicFeatureTypes; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.analytic.AnalyticFeature; import org.locationtech.geowave.analytic.AnalyticItemWrapper; import org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory; import org.locationtech.geowave.analytic.clustering.CentroidManager.CentroidProcessingFn; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.core.store.StoreFactoryFamilySpi; import org.locationtech.geowave.core.store.StoreFactoryOptions; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.Writer; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.memory.MemoryStoreFactoryFamily; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.GeometryFactory; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; public class CentroidManagerTest { @Rule public TestName name = new TestName(); private void ingest( final DataStore dataStore, final FeatureDataAdapter adapter, final Index index, final SimpleFeature feature) throws IOException { dataStore.addType(adapter, index); try (Writer writer = dataStore.createWriter(adapter.getTypeName())) { writer.write(feature); } } @Test public void testSampleRecall() throws IOException { final SimpleFeatureType ftype = AnalyticFeature.createGeometryFeatureAdapter( "centroid", new String[] {"extra1"}, BasicFeatureTypes.DEFAULT_NAMESPACE, ClusteringUtils.CLUSTERING_CRS).getFeatureType(); final GeometryFactory factory = new GeometryFactory(); final String grp1 = "g1"; final String grp2 = "g2"; SimpleFeature feature = AnalyticFeature.createGeometryFeature( ftype, "b1", "123", "fred", grp1, 20.30203, factory.createPoint(new Coordinate(02.33, 0.23)), new String[] {"extra1"}, new double[] {0.022}, 1, 1, 0); final Index index = SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()); final FeatureDataAdapter adapter = new FeatureDataAdapter(ftype); final String namespace = "test_" + getClass().getName() + "_" + name.getMethodName(); final StoreFactoryFamilySpi storeFamily = new MemoryStoreFactoryFamily(); final StoreFactoryOptions opts = storeFamily.getDataStoreFactory().createOptionsInstance(); opts.setGeoWaveNamespace(namespace); final DataStore dataStore = storeFamily.getDataStoreFactory().createStore(opts); final IndexStore indexStore = storeFamily.getIndexStoreFactory().createStore(opts); final PersistentAdapterStore adapterStore = storeFamily.getAdapterStoreFactory().createStore(opts); final InternalAdapterStore internalAdapterStore = storeFamily.getInternalAdapterStoreFactory().createStore(opts); ingest(dataStore, adapter, index, feature); feature = AnalyticFeature.createGeometryFeature( ftype, "b1", "231", "flood", grp1, 20.30203, factory.createPoint(new Coordinate(02.33, 0.23)), new String[] {"extra1"}, new double[] {0.022}, 1, 1, 0); ingest(dataStore, adapter, index, feature); feature = AnalyticFeature.createGeometryFeature( ftype, "b1", "321", "flou", grp2, 20.30203, factory.createPoint(new Coordinate(02.33, 0.23)), new String[] {"extra1"}, new double[] {0.022}, 1, 1, 0); ingest(dataStore, adapter, index, feature); feature = AnalyticFeature.createGeometryFeature( ftype, "b2", "312", "flapper", grp2, 20.30203, factory.createPoint(new Coordinate(02.33, 0.23)), new String[] {"extra1"}, new double[] {0.022}, 1, 1, 0); ingest(dataStore, adapter, index, feature); // and one feature with a different zoom level feature = AnalyticFeature.createGeometryFeature( ftype, "b2", "312", "flapper", grp2, 20.30203, factory.createPoint(new Coordinate(02.33, 0.23)), new String[] {"extra1"}, new double[] {0.022}, 2, 1, 0); ingest(dataStore, adapter, index, feature); CentroidManagerGeoWave manager = new CentroidManagerGeoWave<>( dataStore, indexStore, adapterStore, new SimpleFeatureItemWrapperFactory(), adapter.getTypeName(), internalAdapterStore.getAdapterId(adapter.getTypeName()), index.getName(), "b1", 1); List> centroids = manager.getCentroidsForGroup(null); assertEquals(3, centroids.size()); feature = centroids.get(0).getWrappedItem(); assertEquals(0.022, (Double) feature.getAttribute("extra1"), 0.001); centroids = manager.getCentroidsForGroup(grp1); assertEquals(2, centroids.size()); centroids = manager.getCentroidsForGroup(grp2); assertEquals(1, centroids.size()); feature = centroids.get(0).getWrappedItem(); assertEquals(0.022, (Double) feature.getAttribute("extra1"), 0.001); manager = new CentroidManagerGeoWave<>( dataStore, indexStore, adapterStore, new SimpleFeatureItemWrapperFactory(), adapter.getTypeName(), internalAdapterStore.getAdapterId(adapter.getTypeName()), index.getName(), "b1", 1); manager.processForAllGroups(new CentroidProcessingFn() { @Override public int processGroup( final String groupID, final List> centroids) { if (groupID.equals(grp1)) { assertEquals(2, centroids.size()); } else if (groupID.equals(grp2)) { assertEquals(1, centroids.size()); } else { assertTrue("what group is this : " + groupID, false); } return 0; } }); } } ================================================ FILE: analytics/api/src/test/java/org/locationtech/geowave/analytic/clustering/DistortionGroupManagementTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.clustering; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.List; import org.geotools.feature.type.BasicFeatureTypes; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.analytic.AnalyticFeature; import org.locationtech.geowave.analytic.AnalyticItemWrapper; import org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory; import org.locationtech.geowave.analytic.clustering.DistortionGroupManagement.DistortionDataAdapter; import org.locationtech.geowave.analytic.clustering.DistortionGroupManagement.DistortionEntry; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.core.store.StoreFactoryOptions; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.Writer; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.memory.MemoryStoreFactoryFamily; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.GeometryFactory; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; public class DistortionGroupManagementTest { @Rule public TestName name = new TestName(); final GeometryFactory factory = new GeometryFactory(); final SimpleFeatureType ftype; final Index index = SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()); final FeatureDataAdapter adapter; final DataStorePluginOptions storePluginOptions; private void ingest(final DataTypeAdapter adapter, final Index index, final T entry) throws IOException { final DataStore store = storePluginOptions.createDataStore(); store.addType(adapter, index); try (Writer writer = store.createWriter(adapter.getTypeName())) { writer.write(entry); } } public DistortionGroupManagementTest() throws IOException { ftype = AnalyticFeature.createGeometryFeatureAdapter( "centroid", new String[] {"extra1"}, BasicFeatureTypes.DEFAULT_NAMESPACE, ClusteringUtils.CLUSTERING_CRS).getFeatureType(); adapter = new FeatureDataAdapter(ftype); final String namespace = "test_" + getClass().getName() + "_" + name.getMethodName(); final StoreFactoryOptions opts = new MemoryStoreFactoryFamily().getDataStoreFactory().createOptionsInstance(); opts.setGeoWaveNamespace(namespace); storePluginOptions = new DataStorePluginOptions(opts); final DataStore store = storePluginOptions.createDataStore(); store.addType(adapter, index); } private void addDistortion( final String grp, final String batchId, final int count, final Double distortion) throws IOException { ingest( new DistortionDataAdapter(), DistortionGroupManagement.DISTORTIONS_INDEX, new DistortionEntry(grp, batchId, count, distortion)); } @Before public void setup() throws IOException { // big jump for grp1 between batch 2 and 3 // big jump for grp2 between batch 1 and 2 // thus, the jump occurs for different groups between different batches! // b1 addDistortion("grp1", "b1", 1, 0.1); addDistortion("grp2", "b1", 1, 0.1); // b2 addDistortion("grp1", "b1", 2, 0.2); addDistortion("grp2", "b1", 2, 0.3); // b3 addDistortion("grp1", "b1", 3, 0.4); addDistortion("grp2", "b1", 3, 0.4); // another batch to catch wrong batch error case addDistortion("grp1", "b2", 3, 0.05); ingest( adapter, index, AnalyticFeature.createGeometryFeature( ftype, "b1_1", "123", "fred", "grp1", 20.30203, factory.createPoint(new Coordinate(02.33, 0.23)), new String[] {"extra1"}, new double[] {0.022}, 1, 1, 0)); ingest( adapter, index, AnalyticFeature.createGeometryFeature( ftype, "b1_1", "124", "barney", "grp1", 20.30203, factory.createPoint(new Coordinate(02.33, 0.23)), new String[] {"extra1"}, new double[] {0.022}, 1, 1, 0)); ingest( adapter, index, AnalyticFeature.createGeometryFeature( ftype, "b1_1", "125", "wilma", "grp2", 20.30203, factory.createPoint(new Coordinate(02.33, 0.23)), new String[] {"extra1"}, new double[] {0.022}, 1, 1, 0)); ingest( adapter, index, AnalyticFeature.createGeometryFeature( ftype, "b1_1", "126", "betty", "grp2", 20.30203, factory.createPoint(new Coordinate(02.33, 0.23)), new String[] {"extra1"}, new double[] {0.022}, 1, 1, 0)); ingest( adapter, index, AnalyticFeature.createGeometryFeature( ftype, "b1_2", "130", "dusty", "grp1", 20.30203, factory.createPoint(new Coordinate(02.33, 0.23)), new String[] {"extra1"}, new double[] {0.022}, 1, 1, 0)); ingest( adapter, index, AnalyticFeature.createGeometryFeature( ftype, "b1_2", "131", "dino", "grp1", 20.30203, factory.createPoint(new Coordinate(02.33, 0.23)), new String[] {"extra1"}, new double[] {0.022}, 1, 1, 0)); ingest( adapter, index, AnalyticFeature.createGeometryFeature( ftype, "b1_2", "127", "bamm-bamm", "grp2", 20.30203, factory.createPoint(new Coordinate(02.33, 0.23)), new String[] {"extra1"}, new double[] {0.022}, 1, 1, 0)); ingest( adapter, index, AnalyticFeature.createGeometryFeature( ftype, "b1_2", "128", "chip", "grp2", 20.30203, factory.createPoint(new Coordinate(02.33, 0.23)), new String[] {"extra1"}, new double[] {0.022}, 1, 1, 0)); ingest( adapter, index, AnalyticFeature.createGeometryFeature( ftype, "b1_3", "140", "pearl", "grp1", 20.30203, factory.createPoint(new Coordinate(02.33, 0.23)), new String[] {"extra1"}, new double[] {0.022}, 1, 1, 0)); ingest( adapter, index, AnalyticFeature.createGeometryFeature( ftype, "b1_3", "141", "roxy", "grp1", 20.30203, factory.createPoint(new Coordinate(02.33, 0.23)), new String[] {"extra1"}, new double[] {0.022}, 1, 1, 0)); ingest( adapter, index, AnalyticFeature.createGeometryFeature( ftype, "b1_3", "142", "giggles", "grp2", 20.30203, factory.createPoint(new Coordinate(02.33, 0.23)), new String[] {"extra1"}, new double[] {0.022}, 1, 1, 0)); ingest( adapter, index, AnalyticFeature.createGeometryFeature( ftype, "b1_3", "143", "gazoo", "grp2", 20.30203, factory.createPoint(new Coordinate(02.33, 0.23)), new String[] {"extra1"}, new double[] {0.022}, 1, 1, 0)); } @Test public void test() throws IOException { final DistortionGroupManagement distortionGroupManagement = new DistortionGroupManagement(storePluginOptions); distortionGroupManagement.retainBestGroups( new SimpleFeatureItemWrapperFactory(), adapter.getTypeName(), index.getName(), "b1", 1); final CentroidManagerGeoWave centroidManager = new CentroidManagerGeoWave<>( storePluginOptions.createDataStore(), storePluginOptions.createIndexStore(), storePluginOptions.createAdapterStore(), new SimpleFeatureItemWrapperFactory(), adapter.getTypeName(), storePluginOptions.createInternalAdapterStore().getAdapterId(adapter.getTypeName()), index.getName(), "b1", 1); final List groups = centroidManager.getAllCentroidGroups(); assertEquals(2, groups.size()); final boolean groupFound[] = new boolean[2]; for (final String grpId : groups) { final List> items = centroidManager.getCentroidsForGroup(grpId); assertEquals(2, items.size()); if ("grp1".equals(grpId)) { groupFound[0] = true; assertTrue("pearl".equals(items.get(0).getName()) || "roxy".equals(items.get(0).getName())); } else if ("grp2".equals(grpId)) { groupFound[1] = true; assertTrue( "chip".equals(items.get(0).getName()) || "bamm-bamm".equals(items.get(0).getName())); } } // each unique group is found? int c = 0; for (final boolean gf : groupFound) { c += (gf ? 1 : 0); } assertEquals(2, c); } } ================================================ FILE: analytics/api/src/test/java/org/locationtech/geowave/analytic/clustering/NestedGroupCentroidAssignmentTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.clustering; import static org.junit.Assert.assertEquals; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.geotools.feature.type.BasicFeatureTypes; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.analytic.AnalyticFeature; import org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory; import org.locationtech.geowave.analytic.distance.FeatureCentroidDistanceFn; import org.locationtech.geowave.analytic.kmeans.AssociationNotification; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.core.store.StoreFactoryFamilySpi; import org.locationtech.geowave.core.store.StoreFactoryOptions; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.Writer; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.memory.MemoryStoreFactoryFamily; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.GeometryFactory; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; public class NestedGroupCentroidAssignmentTest { @Rule public TestName name = new TestName(); private void ingest( final DataStore dataStore, final DataTypeAdapter adapter, final Index index, final T entry) throws IOException { dataStore.addType(adapter, index); try (Writer writer = dataStore.createWriter(adapter.getTypeName())) { writer.write(entry); } } @Test public void test() throws IOException { final SimpleFeatureType ftype = AnalyticFeature.createGeometryFeatureAdapter( "centroid", new String[] {"extra1"}, BasicFeatureTypes.DEFAULT_NAMESPACE, ClusteringUtils.CLUSTERING_CRS).getFeatureType(); final GeometryFactory factory = new GeometryFactory(); final String grp1 = "g1"; final String grp2 = "g2"; final SimpleFeature level1b1G1Feature = AnalyticFeature.createGeometryFeature( ftype, "b1", "level1b1G1Feature", "fred", grp1, 20.30203, factory.createPoint(new Coordinate(02.5, 0.25)), new String[] {"extra1"}, new double[] {0.022}, 1, 1, 0); final Index index = SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()); final FeatureDataAdapter adapter = new FeatureDataAdapter(ftype); final String namespace = "test_" + getClass().getName() + "_" + name.getMethodName(); final StoreFactoryFamilySpi storeFamily = new MemoryStoreFactoryFamily(); final StoreFactoryOptions opts = storeFamily.getDataStoreFactory().createOptionsInstance(); opts.setGeoWaveNamespace(namespace); final DataStorePluginOptions storePluginOptions = new DataStorePluginOptions(opts); final DataStore dataStore = storeFamily.getDataStoreFactory().createStore(opts); final IndexStore indexStore = storeFamily.getIndexStoreFactory().createStore(opts); final PersistentAdapterStore adapterStore = storeFamily.getAdapterStoreFactory().createStore(opts); ingest(dataStore, adapter, index, level1b1G1Feature); final SimpleFeature level1b1G2Feature = AnalyticFeature.createGeometryFeature( ftype, "b1", "level1b1G2Feature", "flood", grp2, 20.30203, factory.createPoint(new Coordinate(02.03, 0.2)), new String[] {"extra1"}, new double[] {0.022}, 1, 1, 0); ingest(dataStore, adapter, index, level1b1G2Feature); final SimpleFeature level2b1G1Feature = AnalyticFeature.createGeometryFeature( ftype, "b1", "level2b1G1Feature", "flou", level1b1G1Feature.getID(), 20.30203, factory.createPoint(new Coordinate(02.5, 0.25)), new String[] {"extra1"}, new double[] {0.022}, 2, 1, 0); ingest(dataStore, adapter, index, level2b1G1Feature); final SimpleFeature level2b1G2Feature = AnalyticFeature.createGeometryFeature( ftype, "b1", "level2b1G2Feature", "flapper", level1b1G2Feature.getID(), 20.30203, factory.createPoint(new Coordinate(02.03, 0.2)), new String[] {"extra1"}, new double[] {0.022}, 2, 1, 0); ingest(dataStore, adapter, index, level2b1G2Feature); // different batch final SimpleFeature level2B2G1Feature = AnalyticFeature.createGeometryFeature( ftype, "b2", "level2B2G1Feature", "flapper", level1b1G1Feature.getID(), 20.30203, factory.createPoint(new Coordinate(02.63, 0.25)), new String[] {"extra1"}, new double[] {0.022}, 2, 1, 0); ingest(dataStore, adapter, index, level2B2G1Feature); final SimpleFeatureItemWrapperFactory wrapperFactory = new SimpleFeatureItemWrapperFactory(); final CentroidManagerGeoWave mananger = new CentroidManagerGeoWave<>( dataStore, indexStore, adapterStore, new SimpleFeatureItemWrapperFactory(), adapter.getTypeName(), storePluginOptions.createInternalAdapterStore().getAdapterId(adapter.getTypeName()), index.getName(), "b1", 1); final List> capturedPairing = new ArrayList<>(); final AssociationNotification assoc = new AssociationNotification() { @Override public void notify(final CentroidPairing pairing) { capturedPairing.add(pairing); } }; final FeatureCentroidDistanceFn distanceFn = new FeatureCentroidDistanceFn(); final NestedGroupCentroidAssignment assigmentB1 = new NestedGroupCentroidAssignment<>(mananger, 1, "b1", distanceFn); assigmentB1.findCentroidForLevel(wrapperFactory.create(level1b1G1Feature), assoc); assertEquals(1, capturedPairing.size()); assertEquals(level1b1G1Feature.getID(), capturedPairing.get(0).getCentroid().getID()); capturedPairing.clear(); final NestedGroupCentroidAssignment assigmentB1L2G1 = new NestedGroupCentroidAssignment<>(mananger, 2, "b1", distanceFn); assigmentB1L2G1.findCentroidForLevel(wrapperFactory.create(level1b1G1Feature), assoc); assertEquals(1, capturedPairing.size()); assertEquals(level2b1G1Feature.getID(), capturedPairing.get(0).getCentroid().getID()); capturedPairing.clear(); // level 2 and different parent grouping final NestedGroupCentroidAssignment assigmentB1L2G2 = new NestedGroupCentroidAssignment<>(mananger, 2, "b1", distanceFn); assigmentB1L2G2.findCentroidForLevel(wrapperFactory.create(level1b1G2Feature), assoc); assertEquals(1, capturedPairing.size()); assertEquals(level2b1G2Feature.getID(), capturedPairing.get(0).getCentroid().getID()); capturedPairing.clear(); // level two with different batch than parent final CentroidManagerGeoWave mananger2 = new CentroidManagerGeoWave<>( dataStore, indexStore, adapterStore, new SimpleFeatureItemWrapperFactory(), adapter.getTypeName(), storePluginOptions.createInternalAdapterStore().getAdapterId(adapter.getTypeName()), index.getName(), "b2", 2); final NestedGroupCentroidAssignment assigmentB2L2 = new NestedGroupCentroidAssignment<>(mananger2, 2, "b1", distanceFn); assigmentB2L2.findCentroidForLevel(wrapperFactory.create(level1b1G1Feature), assoc); assertEquals(1, capturedPairing.size()); assertEquals(level2B2G1Feature.getID(), capturedPairing.get(0).getCentroid().getID()); capturedPairing.clear(); } } ================================================ FILE: analytics/api/src/test/java/org/locationtech/geowave/analytic/distance/CoordinateCircleDistanceFnTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.distance; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import org.junit.Test; import org.locationtech.jts.geom.Coordinate; public class CoordinateCircleDistanceFnTest { @Test public void test() { final CoordinateCircleDistanceFn fn = new CoordinateCircleDistanceFn(); final double d1 = fn.measure(new Coordinate(90, 0), new Coordinate(89, 0)); final double d2 = fn.measure(new Coordinate(89, 0), new Coordinate(90, 0)); final double d3close = fn.measure( new Coordinate(10.000000001, 89.00000010), new Coordinate(10.000000002, 89.00000001)); final double dateLineclose = fn.measure(new Coordinate(-179.9999999, 0.00001), new Coordinate(179.9999999, 0.00001)); assertEquals(d1, d2, 0.0000001); assertEquals(111319.49079322655, d1, 0.00001); assertTrue(d3close < 0.04); assertTrue(dateLineclose < 0.03); } } ================================================ FILE: analytics/api/src/test/java/org/locationtech/geowave/analytic/distance/FeatureDistanceFnTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.distance; import static org.junit.Assert.assertTrue; import java.util.UUID; import org.geotools.feature.type.BasicFeatureTypes; import org.junit.Before; import org.junit.Test; import org.locationtech.geowave.analytic.AnalyticFeature; import org.locationtech.geowave.analytic.clustering.ClusteringUtils; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; public class FeatureDistanceFnTest { FeatureDistanceFn functionUnderTest = new FeatureDistanceFn(); SimpleFeatureType featureType; final GeometryFactory factory = new GeometryFactory(); @Before public void setup() { featureType = AnalyticFeature.createGeometryFeatureAdapter( "centroid", new String[] {"extra1"}, BasicFeatureTypes.DEFAULT_NAMESPACE, ClusteringUtils.CLUSTERING_CRS).getFeatureType(); } @Test public void testPoint() { final SimpleFeature feature1 = createFeature(factory.createPoint(new Coordinate(0, 0))); final SimpleFeature feature2 = createFeature(factory.createPoint(new Coordinate(0.001, 0.001))); testBounds(functionUnderTest.measure(feature1, feature2), 100, 200); } @Test public void testPointWithPoly() { final SimpleFeature feature1 = createFeature(factory.createPoint(new Coordinate(0, 0))); final SimpleFeature feature2 = createFeature( factory.createPolygon( new Coordinate[] { new Coordinate(0.001, 0.001), new Coordinate(0.001, 0.002), new Coordinate(0.002, 0.002), new Coordinate(0.001, 0.001)})); testBounds(functionUnderTest.measure(feature1, feature2), 100, 200); } @Test public void testPolyWithPoly() { final SimpleFeature feature1 = createFeature( factory.createPolygon( new Coordinate[] { new Coordinate(0.000, 0.000), new Coordinate(-0.000, -0.001), new Coordinate(-0.001, -0.001), new Coordinate(0.00, 0.00)})); final SimpleFeature feature2 = createFeature( factory.createPolygon( new Coordinate[] { new Coordinate(0.001, 0.001), new Coordinate(0.001, 0.002), new Coordinate(0.002, 0.002), new Coordinate(0.001, 0.001)})); testBounds(functionUnderTest.measure(feature1, feature2), 100, 200); } @Test public void testIntersectingPoly() { final SimpleFeature feature1 = createFeature( factory.createPolygon( new Coordinate[] { new Coordinate(0.000, 0.000), new Coordinate(0.0012, 0.000), new Coordinate(0.0013, 0.0015), new Coordinate(0.00, 0.00)})); final SimpleFeature feature2 = createFeature( factory.createPolygon( new Coordinate[] { new Coordinate(0.001, 0.001), new Coordinate(0.002, 0.001), new Coordinate(0.002, 0.002), new Coordinate(0.001, 0.001)})); testBounds(functionUnderTest.measure(feature1, feature2), 0, 0.00001); final SimpleFeature feature3 = createFeature( factory.createPolygon( new Coordinate[] { new Coordinate(0.000, 0.000), new Coordinate(0.001, 0.001), new Coordinate(0.000, 0.001), new Coordinate(0.00, 0.00)})); final SimpleFeature feature4 = createFeature( factory.createPolygon( new Coordinate[] { new Coordinate(0.001, 0.001), new Coordinate(0.002, 0.001), new Coordinate(0.002, 0.002), new Coordinate(0.001, 0.001)})); testBounds(functionUnderTest.measure(feature3, feature4), 0.0, 0.00001); } private void testBounds(final double distance, final double lower, final double upper) { assertTrue((distance >= lower) && (distance <= upper)); } private SimpleFeature createFeature(final Geometry geometry) { return AnalyticFeature.createGeometryFeature( featureType, "b1", UUID.randomUUID().toString(), UUID.randomUUID().toString(), "NA", 20.30203, geometry, new String[] {"extra1"}, new double[] {0.022}, 1, 1, 0); } } ================================================ FILE: analytics/api/src/test/java/org/locationtech/geowave/analytic/kmeans/CentroidAssociationFnTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.kmeans; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; import org.junit.Assert; import org.junit.Test; import org.locationtech.geowave.analytic.AnalyticItemWrapper; import org.locationtech.geowave.analytic.clustering.CentroidPairing; import org.locationtech.geowave.analytic.clustering.LongCentroid; import org.locationtech.geowave.analytic.distance.DistanceFn; public class CentroidAssociationFnTest { private static Set> expectedPairings = new HashSet<>(); private static double expectedCost = 0; static { expectedPairings.add( new CentroidPairing<>(new LongCentroid(10, "", 0), new LongCentroid(345, "", 0), 335)); expectedPairings.add( new CentroidPairing<>(new LongCentroid(1000, "", 0), new LongCentroid(764, "", 0), 236)); expectedPairings.add( new CentroidPairing<>(new LongCentroid(10, "", 0), new LongCentroid(89, "", 0), 79)); expectedPairings.add( new CentroidPairing<>(new LongCentroid(1000, "", 0), new LongCentroid(900, "", 0), 100)); for (final CentroidPairing pairing : expectedPairings) { expectedCost += pairing.getDistance(); } } @Test public void test() { final CentroidAssociationFn fn = new CentroidAssociationFn<>(); fn.setDistanceFunction(new DistanceFn() { /** */ private static final long serialVersionUID = 1L; @Override public double measure(final Long x, final Long y) { return Math.abs(x.longValue() - y.longValue()); } }); final List> dataSet = Arrays.asList( (AnalyticItemWrapper) new LongCentroid(345, "", 0), new LongCentroid(764, "", 0), new LongCentroid(89, "", 0), new LongCentroid(900, "", 0)); final List> centroidSet = Arrays.asList( (AnalyticItemWrapper) new LongCentroid(10, "", 0), (AnalyticItemWrapper) new LongCentroid(1000, "", 0)); final double cost = fn.compute(dataSet, centroidSet, new AssociationNotification() { @Override public void notify(final CentroidPairing pairing) { Assert.assertTrue(expectedPairings.contains(pairing)); } }); Assert.assertEquals(expectedCost, cost, 0.0001); } } ================================================ FILE: analytics/api/src/test/java/org/locationtech/geowave/analytic/kmeans/KMeansParallelInitializeTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.kmeans; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import org.apache.commons.lang3.tuple.Pair; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.feature.simple.SimpleFeatureTypeBuilder; import org.geotools.referencing.crs.DefaultGeographicCRS; import org.junit.Before; import org.junit.Test; import org.locationtech.geowave.analytic.AnalyticItemWrapper; import org.locationtech.geowave.analytic.GeometryDataSetGenerator; import org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory; import org.locationtech.geowave.analytic.clustering.CentroidPairing; import org.locationtech.geowave.analytic.distance.FeatureCentroidDistanceFn; import org.locationtech.geowave.analytic.kmeans.serial.AnalyticStats.StatValue; import org.locationtech.geowave.analytic.kmeans.serial.KMeansParallelInitialize; import org.locationtech.geowave.analytic.kmeans.serial.StatsMap; import org.locationtech.geowave.analytic.sample.BahmanEtAlSampleProbabilityFn; import org.locationtech.geowave.analytic.sample.Sampler; import org.locationtech.jts.geom.Geometry; import org.opengis.feature.simple.SimpleFeature; public class KMeansParallelInitializeTest { final KMeansParallelInitialize initializer = new KMeansParallelInitialize<>(); final SimpleFeatureItemWrapperFactory itemFactory = new SimpleFeatureItemWrapperFactory(); @Before public void setup() { initializer.getCentroidAssociationFn().setDistanceFunction(new FeatureCentroidDistanceFn()); initializer.setCentroidFactory(new SimpleFeatureItemWrapperFactory()); final Sampler sampler = initializer.getSampler(); sampler.setSampleProbabilityFn(new BahmanEtAlSampleProbabilityFn()); sampler.setSampleSize(5); } private SimpleFeatureBuilder getBuilder() { final SimpleFeatureTypeBuilder typeBuilder = new SimpleFeatureTypeBuilder(); typeBuilder.setName("test"); typeBuilder.setCRS(DefaultGeographicCRS.WGS84); // <- Coordinate // reference // add attributes in order typeBuilder.add("geom", Geometry.class); typeBuilder.add("name", String.class); typeBuilder.add("count", Long.class); // build the type return new SimpleFeatureBuilder(typeBuilder.buildFeatureType()); } @Test public void test() { final GeometryDataSetGenerator dataGenerator = new GeometryDataSetGenerator( initializer.getCentroidAssociationFn().getDistanceFunction(), getBuilder()); final List pointSet = dataGenerator.generatePointSet(0.15, 0.2, 10, 10000); // Sort the data as if coming out of geowave // Also, the pointSet from the generator contains the centers first, so // the data is already // skewed to optimal sampling Collections.sort(pointSet, new Comparator() { @Override public int compare(final SimpleFeature arg0, final SimpleFeature arg1) { final double arg0ToCorner = initializer.getCentroidAssociationFn().getDistanceFunction().measure( arg0, dataGenerator.getCorner()); final double arg1ToCorner = initializer.getCentroidAssociationFn().getDistanceFunction().measure( arg1, dataGenerator.getCorner()); return (arg0ToCorner - arg1ToCorner) < 0 ? -1 : 1; } }); final List> itemSet = new ArrayList<>(); for (final SimpleFeature feature : pointSet) { itemSet.add(itemFactory.create(feature)); } final Pair>, List>> result = initializer.runLocal(itemSet); assertTrue(result.getRight().size() >= 5); assertTrue(isMonotonic((StatsMap) initializer.getStats())); for (final AnalyticItemWrapper centroid : result.getRight()) { System.out.println( centroid.getWrappedItem().toString() + " = " + centroid.getAssociationCount()); } } private boolean isMonotonic(final StatsMap stats) { Double last = null; for (final Double stat : stats.getStats(StatValue.COST)) { System.out.println(stat); if (last == null) { last = stat; } else if (last.compareTo(stat) < 0) { return false; } } return true; } } ================================================ FILE: analytics/api/src/test/java/org/locationtech/geowave/analytic/kryo/FeatureSerializationTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.kryo; import static org.junit.Assert.assertEquals; import java.util.List; import java.util.UUID; import org.geotools.data.DataUtilities; import org.geotools.feature.SchemaException; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.feature.simple.SimpleFeatureImpl; import org.junit.Test; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.GeometryFactory; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.AttributeDescriptor; import com.esotericsoftware.kryo.Kryo; import com.esotericsoftware.kryo.io.Input; import com.esotericsoftware.kryo.io.InputChunked; import com.esotericsoftware.kryo.io.Output; import com.esotericsoftware.kryo.io.OutputChunked; public class FeatureSerializationTest { @Test public void test() throws SchemaException { final Kryo kryo = new Kryo(); kryo.register(SimpleFeatureImpl.class, new FeatureSerializer()); final SimpleFeatureType schema = DataUtilities.createType("testGeo", "location:Point:srid=4326,name:String"); final List descriptors = schema.getAttributeDescriptors(); final Object[] defaults = new Object[descriptors.size()]; int p = 0; for (final AttributeDescriptor descriptor : descriptors) { defaults[p++] = descriptor.getDefaultValue(); } final SimpleFeature feature = SimpleFeatureBuilder.build(schema, defaults, UUID.randomUUID().toString()); final GeometryFactory geoFactory = new GeometryFactory(); feature.setAttribute("location", geoFactory.createPoint(new Coordinate(-45, 45))); final Output output = new OutputChunked(); kryo.getSerializer(SimpleFeatureImpl.class).write(kryo, output, feature); final Input input = new InputChunked(); input.setBuffer(output.getBuffer()); final SimpleFeature f2 = (SimpleFeature) kryo.getSerializer(SimpleFeatureImpl.class).read( kryo, input, SimpleFeatureImpl.class); assertEquals(feature, f2); } } ================================================ FILE: analytics/api/src/test/java/org/locationtech/geowave/analytic/nn/NNProcessorTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.nn; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.JobContext; import org.junit.Before; import org.junit.Test; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.nn.NNProcessor.CompleteNotifier; import org.locationtech.geowave.analytic.param.ParameterEnum; import org.locationtech.geowave.analytic.partitioner.Partitioner; import org.locationtech.geowave.analytic.partitioner.Partitioner.PartitionData; import org.locationtech.geowave.core.index.ByteArray; public class NNProcessorTest { static Map> expectedResults = new HashMap<>(); @Before public void setupResults() { expectedResults.put(new Integer(293), Arrays.asList(new Integer(233))); expectedResults.put(new Integer(233), Arrays.asList(new Integer(293))); expectedResults.put(new Integer(735), Arrays.asList(new Integer(833))); expectedResults.put(new Integer(833), Arrays.asList(new Integer(735))); expectedResults.put(new Integer(1833), Arrays.asList(new Integer(2033))); expectedResults.put(new Integer(2033), Arrays.asList(new Integer(1833))); expectedResults.put(new Integer(1033), Collections.emptyList()); expectedResults.put(new Integer(533), Collections.emptyList()); } NNProcessor buildProcessor() { return new NNProcessor<>(new Partitioner() { /** */ private static final long serialVersionUID = 1L; @Override public void initialize(final JobContext context, final Class scope) throws IOException {} @Override public List getCubeIdentifiers( final Object entry) { return Collections.singletonList( new PartitionData( new ByteArray(new byte[] {}), NNProcessorTest.partition((Integer) entry), true)); } @Override public void partition( final Object entry, final org.locationtech.geowave.analytic.partitioner.Partitioner.PartitionDataCallback callback) throws Exception { for (final PartitionData pd : getCubeIdentifiers(entry)) { callback.partitionWith(pd); } } @Override public Collection> getParameters() { return Collections.emptyList(); } @Override public void setup( final PropertyManagement runTimeProperties, final Class scope, final Configuration configuration) {} }, new TypeConverter() { @Override public Integer convert(final ByteArray id, final Object o) { return (Integer) o; } }, new DistanceProfileGenerateFn() { @Override public DistanceProfile computeProfile(final Integer item1, final Integer item2) { return new DistanceProfile<>(Math.abs(item1.doubleValue() - item2.doubleValue()), item1); } }, 200, new PartitionData(new ByteArray(new byte[] {}), new ByteArray("123"), true)); } @Test public void testNormalOp() throws IOException, InterruptedException { runProcess(buildProcessor(), new CompleteNotifier() { @Override public void complete( final ByteArray id, final Integer value, final NeighborList list) throws IOException, InterruptedException { final Iterator> it = list.iterator(); final List expectedResultSet = new ArrayList<>(expectedResults.get(value)); assertNotNull(expectedResultSet); while (it.hasNext()) { final Integer result = it.next().getValue(); assertTrue("" + value + " with " + result, expectedResultSet.remove(result)); } assertTrue(expectedResultSet.isEmpty()); } }); } @Test public void testRemoveOp() throws IOException, InterruptedException { final NNProcessor processor = buildProcessor(); runProcess(processor, new CompleteNotifier() { @Override public void complete( final ByteArray id, final Integer value, final NeighborList list) throws IOException, InterruptedException { processor.remove(id); } }); } @Test public void testTrimOp() throws IOException, InterruptedException { final NNProcessor processor = buildProcessor(); addToProcess(processor, 293); addToProcess(processor, 233); addToProcess(processor, 533); addToProcess(processor, 735); addToProcess(processor, 833); addToProcess(processor, 1033); addToProcess(processor, 1833); addToProcess(processor, 2033); processor.trimSmallPartitions(10); processor.process(new NeighborListFactory() { @Override public NeighborList buildNeighborList( final ByteArray cnterId, final Integer center) { return new DefaultNeighborList<>(); } }, new CompleteNotifier() { @Override public void complete( final ByteArray id, final Integer value, final NeighborList list) throws IOException, InterruptedException { fail("Should not get here"); } }); } private void runProcess( final NNProcessor processor, final CompleteNotifier notifier) throws IOException, InterruptedException { addToProcess(processor, 293); addToProcess(processor, 233); addToProcess(processor, 533); addToProcess(processor, 735); addToProcess(processor, 833); addToProcess(processor, 1033); addToProcess(processor, 1833); addToProcess(processor, 2033); processor.process(new NeighborListFactory() { @Override public NeighborList buildNeighborList( final ByteArray cnterId, final Integer center) { return new DefaultNeighborList<>(); } }, notifier); } private static ByteArray partition(final Integer v) { return new ByteArray(Integer.toString((v.intValue() / 300))); } private void addToProcess(final NNProcessor processor, final Integer v) throws IOException { processor.add(new ByteArray(v.toString()), true, v); } } ================================================ FILE: analytics/api/src/test/java/org/locationtech/geowave/analytic/partitioner/BoundaryDistancePartitionerTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.partitioner; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.Job; import org.geotools.feature.type.BasicFeatureTypes; import org.geotools.referencing.CRS; import org.junit.Test; import org.locationtech.geowave.analytic.AnalyticFeature; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.clustering.ClusteringUtils; import org.locationtech.geowave.analytic.extract.SimpleFeatureGeometryExtractor; import org.locationtech.geowave.analytic.model.SpatialIndexModelBuilder; import org.locationtech.geowave.analytic.param.CommonParameters; import org.locationtech.geowave.analytic.param.ExtractParameters; import org.locationtech.geowave.analytic.param.GlobalParameters; import org.locationtech.geowave.analytic.param.PartitionParameters; import org.locationtech.geowave.analytic.partitioner.Partitioner.PartitionData; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.GeometryFactory; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.referencing.FactoryException; import org.opengis.referencing.crs.CoordinateReferenceSystem; public class BoundaryDistancePartitionerTest { public static CoordinateReferenceSystem DEFAULT_CRS; static { try { DEFAULT_CRS = CRS.decode("EPSG:4326", true); } catch (final FactoryException e) { e.printStackTrace(); } } @Test public void test() throws IOException, ClassNotFoundException { final SimpleFeatureType ftype = AnalyticFeature.createGeometryFeatureAdapter( "centroid", new String[] {"extra1"}, BasicFeatureTypes.DEFAULT_NAMESPACE, ClusteringUtils.CLUSTERING_CRS).getFeatureType(); final GeometryFactory factory = new GeometryFactory(); SimpleFeature feature = AnalyticFeature.createGeometryFeature( ftype, "b1", "123", "fred", "NA", 20.30203, factory.createPoint(new Coordinate(0, 0)), new String[] {"extra1"}, new double[] {0.022}, 1, 1, 0); final PropertyManagement propertyManagement = new PropertyManagement(); propertyManagement.store(PartitionParameters.Partition.DISTANCE_THRESHOLDS, "10000"); propertyManagement.store( CommonParameters.Common.INDEX_MODEL_BUILDER_CLASS, SpatialIndexModelBuilder.class); propertyManagement.store( ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS, SimpleFeatureGeometryExtractor.class); propertyManagement.store(GlobalParameters.Global.CRS_ID, "EPSG:4326"); propertyManagement.store(PartitionParameters.Partition.GEOMETRIC_DISTANCE_UNIT, "m"); final BoundaryPartitioner partitioner = new BoundaryPartitioner(); final Configuration configuration = new Configuration(); final Class scope = BoundaryDistancePartitionerTest.class; propertyManagement.setJobConfiguration(configuration, scope); partitioner.initialize(Job.getInstance(configuration), scope); List partitions = partitioner.getCubeIdentifiers(feature); assertEquals(4, partitions.size()); assertTrue(hasNPrimary(partitions, 1)); for (final PartitionData partition : partitions) { final MultiDimensionalNumericData ranges = partitioner.getRangesForPartition(partition); assertTrue(ranges.getDataPerDimension()[0].getMin() < 0.0000000001); assertTrue(ranges.getDataPerDimension()[0].getMax() > -0.0000000001); assertTrue(ranges.getDataPerDimension()[1].getMin() < 0.00000000001); assertTrue(ranges.getDataPerDimension()[1].getMax() > -0.0000000001); } feature = AnalyticFeature.createGeometryFeature( ftype, "b1", "123", "fred", "NA", 20.30203, factory.createPoint(new Coordinate(-179.99999996, 0)), new String[] {"extra1"}, new double[] {0.022}, 1, 1, 0); partitions = partitioner.getCubeIdentifiers(feature); assertEquals(4, partitions.size()); assertTrue(hasNPrimary(partitions, 1)); feature = AnalyticFeature.createGeometryFeature( ftype, "b1", "123", "fred", "NA", 20.30203, factory.createLinearRing( new Coordinate[] { new Coordinate(88, 0), new Coordinate(88, 0.001), new Coordinate(88.001, 0.001), new Coordinate(88.001, 0), new Coordinate(88, 0)}), new String[] {"extra1"}, new double[] {0.022}, 1, 1, 0); partitions = partitioner.getCubeIdentifiers(feature); assertTrue(hasNPrimary(partitions, 4)); } private boolean hasNPrimary(final List data, final int expected) { int count = 0; for (final PartitionData dataitem : data) { count += (dataitem.isPrimary() ? 1 : 0); } return count == expected; } } ================================================ FILE: analytics/api/src/test/java/org/locationtech/geowave/analytic/partitioner/OrthodromicDistancePartitionerTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.partitioner; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.Job; import org.geotools.feature.type.BasicFeatureTypes; import org.geotools.referencing.CRS; import org.junit.Test; import org.locationtech.geowave.analytic.AnalyticFeature; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.clustering.ClusteringUtils; import org.locationtech.geowave.analytic.extract.SimpleFeatureGeometryExtractor; import org.locationtech.geowave.analytic.model.SpatialIndexModelBuilder; import org.locationtech.geowave.analytic.param.CommonParameters; import org.locationtech.geowave.analytic.param.ExtractParameters; import org.locationtech.geowave.analytic.param.GlobalParameters; import org.locationtech.geowave.analytic.param.PartitionParameters; import org.locationtech.geowave.analytic.partitioner.Partitioner.PartitionData; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.GeometryFactory; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.referencing.FactoryException; import org.opengis.referencing.crs.CoordinateReferenceSystem; public class OrthodromicDistancePartitionerTest { public static CoordinateReferenceSystem DEFAULT_CRS; static { try { DEFAULT_CRS = CRS.decode("EPSG:4326", true); } catch (final FactoryException e) { e.printStackTrace(); } } @Test public void test() throws IOException, ClassNotFoundException { final SimpleFeatureType ftype = AnalyticFeature.createGeometryFeatureAdapter( "centroid", new String[] {"extra1"}, BasicFeatureTypes.DEFAULT_NAMESPACE, ClusteringUtils.CLUSTERING_CRS).getFeatureType(); final GeometryFactory factory = new GeometryFactory(); SimpleFeature feature = AnalyticFeature.createGeometryFeature( ftype, "b1", "123", "fred", "NA", 20.30203, factory.createPoint(new Coordinate(0, 0)), new String[] {"extra1"}, new double[] {0.022}, 1, 1, 0); final PropertyManagement propertyManagement = new PropertyManagement(); propertyManagement.store(PartitionParameters.Partition.DISTANCE_THRESHOLDS, "10000"); propertyManagement.store( CommonParameters.Common.INDEX_MODEL_BUILDER_CLASS, SpatialIndexModelBuilder.class); propertyManagement.store( ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS, SimpleFeatureGeometryExtractor.class); propertyManagement.store(GlobalParameters.Global.CRS_ID, "EPSG:4326"); propertyManagement.store(PartitionParameters.Partition.GEOMETRIC_DISTANCE_UNIT, "m"); final OrthodromicDistancePartitioner partitioner = new OrthodromicDistancePartitioner<>(); final Configuration configuration = new Configuration(); final Class scope = OrthodromicDistancePartitionerTest.class; propertyManagement.setJobConfiguration(configuration, scope); partitioner.initialize(Job.getInstance(configuration), scope); List partitions = partitioner.getCubeIdentifiers(feature); assertEquals(4, partitions.size()); assertTrue(hasOnePrimary(partitions)); for (final PartitionData partition : partitions) { final MultiDimensionalNumericData ranges = partitioner.getRangesForPartition(partition); assertTrue(ranges.getDataPerDimension()[0].getMin() < 0.0000000001); assertTrue(ranges.getDataPerDimension()[0].getMax() > -0.0000000001); assertTrue(ranges.getDataPerDimension()[1].getMin() < 0.00000000001); assertTrue(ranges.getDataPerDimension()[1].getMax() > -0.0000000001); } feature = AnalyticFeature.createGeometryFeature( ftype, "b1", "123", "fred", "NA", 20.30203, factory.createPoint(new Coordinate(-179.99999996, 0)), new String[] {"extra1"}, new double[] {0.022}, 1, 1, 0); partitions = partitioner.getCubeIdentifiers(feature); assertEquals(4, partitions.size()); assertTrue(hasOnePrimary(partitions)); feature = AnalyticFeature.createGeometryFeature( ftype, "b1", "123", "fred", "NA", 20.30203, factory.createPoint(new Coordinate(88, 0)), new String[] {"extra1"}, new double[] {0.022}, 1, 1, 0); partitions = partitioner.getCubeIdentifiers(feature); assertEquals(2, partitions.size()); assertTrue(hasOnePrimary(partitions)); double maxX = 0; double minX = 0; double maxY = 0; double minY = 0; for (final PartitionData partition : partitions) { final MultiDimensionalNumericData ranges = partitioner.getRangesForPartition(partition); // System.out.println(ranges.getDataPerDimension()[0] + "; " // +ranges.getDataPerDimension()[1] + " = " + partition.isPrimary); maxX = Math.max(maxX, ranges.getMaxValuesPerDimension()[1]); maxY = Math.max(maxY, ranges.getMaxValuesPerDimension()[0]); minX = Math.min(minX, ranges.getMinValuesPerDimension()[1]); minY = Math.min(minY, ranges.getMinValuesPerDimension()[0]); } assertTrue(maxY > 88.0); assertTrue(minY < 88.0); assertTrue(maxX > 0); assertTrue(minX < 0); try (final ByteArrayOutputStream bs = new ByteArrayOutputStream()) { final ObjectOutputStream os = new ObjectOutputStream(bs); os.writeObject(partitioner); os.flush(); try (final ObjectInputStream is = new ObjectInputStream(new ByteArrayInputStream(bs.toByteArray()))) { @SuppressWarnings("unchecked") final OrthodromicDistancePartitioner partitioner2 = (OrthodromicDistancePartitioner) is.readObject(); assertEquals(partitioner2, partitioner); } } } private boolean hasOnePrimary(final List data) { int count = 0; for (final PartitionData dataitem : data) { count += (dataitem.isPrimary() ? 1 : 0); } return count == 1; } } ================================================ FILE: analytics/mapreduce/.gitignore ================================================ /bin/ ================================================ FILE: analytics/mapreduce/pom.xml ================================================ 4.0.0 geowave-analytic-parent org.locationtech.geowave ../ 2.0.2-SNAPSHOT geowave-analytic-mapreduce GeoWave MapReduce Analytics org.locationtech.geowave geowave-core-cli ${project.version} org.locationtech.geowave geowave-analytic-api javax.vecmath vecmath 1.5.2 org.apache.mrunit mrunit 1.1.0 test hadoop2 build-installer-plugin maven-assembly-plugin ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/CountofDoubleWritable.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.io.Serializable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.io.WritableComparator; /** * Used for (1) representation of collections (2) summation in a combiner (3) and finally, for * computation of averages */ public class CountofDoubleWritable implements Writable, WritableComparable { private double value = 0.0; private double count = 0.0; public CountofDoubleWritable() {} public CountofDoubleWritable(final double value, final double count) { set(value, count); } @Override public void readFields(final DataInput in) throws IOException { value = in.readDouble(); count = in.readDouble(); } @Override public void write(final DataOutput out) throws IOException { out.writeDouble(value); out.writeDouble(count); } public void set(final double value, final double count) { this.value = value; this.count = count; } public double getValue() { return value; } public double getCount() { return count; } /** Returns true iff o is a DoubleWritable with the same value. */ @Override public boolean equals(final Object o) { if (!(o instanceof CountofDoubleWritable)) { return false; } return compareTo(o) == 0; } @Override public int hashCode() { return (int) Double.doubleToLongBits(value / count); } @Override public int compareTo(final Object o) { final CountofDoubleWritable other = (CountofDoubleWritable) o; final double diff = (value / count) - (other.value / other.count); return (Math.abs(diff) < 0.0000001) ? 0 : (diff < 0 ? -1 : 0); } @Override public String toString() { return Double.toString(value) + "/" + Double.toString(count); } /** A Comparator optimized for DoubleWritable. */ public static class Comparator extends WritableComparator implements Serializable { /** */ private static final long serialVersionUID = 1L; public Comparator() { super(CountofDoubleWritable.class); } @Override public int compare( final byte[] b1, final int s1, final int l1, final byte[] b2, final int s2, final int l2) { final double thisValue = readDouble(b1, s1); final double thatValue = readDouble(b2, s2); return Double.compare(thisValue, thatValue); } } static { // register this comparator WritableComparator.define(CountofDoubleWritable.class, new Comparator()); } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/DoubleOutputFormat.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce; import java.io.DataOutputStream; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.DoubleWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapreduce.RecordWriter; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; public class DoubleOutputFormat extends FileOutputFormat { protected static class DoubleRecordWriter extends RecordWriter { protected DataOutputStream out; public DoubleRecordWriter(final DataOutputStream out) { super(); this.out = out; } @Override public synchronized void write(final K key, final V value) throws IOException { if ((value != null) && !(value instanceof NullWritable)) { out.writeDouble(((DoubleWritable) value).get()); } } @Override public synchronized void close(final TaskAttemptContext context) throws IOException { out.close(); } } @Override public RecordWriter getRecordWriter(final TaskAttemptContext job) throws IOException, InterruptedException { final Configuration conf = job.getConfiguration(); final Path file = getDefaultWorkFile(job, ""); final FileSystem fs = file.getFileSystem(conf); final FSDataOutputStream fileOut = fs.create(file, false); return new DoubleRecordWriter<>(fileOut); } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/GeoWaveAnalyticJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.mapreduce.Counters; import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.OutputFormat; import org.apache.hadoop.util.Tool; import org.geotools.feature.type.BasicFeatureTypes; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.analytic.AnalyticFeature; import org.locationtech.geowave.analytic.IndependentJobRunner; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.ScopedJobConfiguration; import org.locationtech.geowave.analytic.clustering.ClusteringUtils; import org.locationtech.geowave.analytic.param.FormatConfiguration; import org.locationtech.geowave.analytic.param.InputParameters; import org.locationtech.geowave.analytic.param.OutputParameters; import org.locationtech.geowave.analytic.param.OutputParameters.Output; import org.locationtech.geowave.analytic.param.ParameterEnum; import org.locationtech.geowave.analytic.param.StoreParameters; import org.locationtech.geowave.analytic.param.StoreParameters.StoreParam; import org.locationtech.geowave.analytic.store.PersistableStore; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.index.CustomNameIndex; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.mapreduce.JobContextAdapterStore; import org.locationtech.geowave.mapreduce.JobContextIndexStore; import org.locationtech.geowave.mapreduce.JobContextInternalAdapterStore; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class managers the input and output formats for a map reduce job. It also controls job * submission, isolating some of the job management responsibilities. One key benefit is support of * unit testing for job runner instances. */ public abstract class GeoWaveAnalyticJobRunner extends Configured implements Tool, MapReduceJobRunner, IndependentJobRunner { protected static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveAnalyticJobRunner.class); private FormatConfiguration inputFormat = null; private FormatConfiguration outputFormat = null; private int reducerCount = 1; private MapReduceIntegration mapReduceIntegrater = new ToolRunnerMapReduceIntegration(); private Counters lastCounterSet = null; public FormatConfiguration getInputFormatConfiguration() { return inputFormat; } public void setInputFormatConfiguration(final FormatConfiguration inputFormat) { this.inputFormat = inputFormat; } public FormatConfiguration getOutputFormatConfiguration() { return outputFormat; } public void setOutputFormatConfiguration(final FormatConfiguration outputFormat) { this.outputFormat = outputFormat; } public MapReduceIntegration getMapReduceIntegrater() { return mapReduceIntegrater; } public void setMapReduceIntegrater(final MapReduceIntegration mapReduceIntegrater) { this.mapReduceIntegrater = mapReduceIntegrater; } public int getReducerCount() { return reducerCount; } public void setReducerCount(final int reducerCount) { this.reducerCount = reducerCount; } public GeoWaveAnalyticJobRunner() {} protected static Logger getLogger() { return LOGGER; } public Class getScope() { return this.getClass(); } public DataStore getDataStore(final PropertyManagement runTimeProperties) throws Exception { final PersistableStore store = (PersistableStore) StoreParameters.StoreParam.INPUT_STORE.getHelper().getValue( runTimeProperties); return store.getDataStoreOptions().createDataStore(); } public PersistentAdapterStore getAdapterStore(final PropertyManagement runTimeProperties) throws Exception { final PersistableStore store = (PersistableStore) StoreParameters.StoreParam.INPUT_STORE.getHelper().getValue( runTimeProperties); return store.getDataStoreOptions().createAdapterStore(); } public InternalAdapterStore getInternalAdapterStore(final PropertyManagement runTimeProperties) throws Exception { final PersistableStore store = (PersistableStore) StoreParameters.StoreParam.INPUT_STORE.getHelper().getValue( runTimeProperties); return store.getDataStoreOptions().createInternalAdapterStore(); } public IndexStore getIndexStore(final PropertyManagement runTimeProperties) throws Exception { final PersistableStore store = (PersistableStore) StoreParameters.StoreParam.INPUT_STORE.getHelper().getValue( runTimeProperties); return store.getDataStoreOptions().createIndexStore(); } @Override public int run(final Configuration configuration, final PropertyManagement runTimeProperties) throws Exception { if ((inputFormat == null) && runTimeProperties.hasProperty(InputParameters.Input.INPUT_FORMAT)) { inputFormat = runTimeProperties.getClassInstance( InputParameters.Input.INPUT_FORMAT, FormatConfiguration.class, null); } if (inputFormat != null) { InputParameters.Input.INPUT_FORMAT.getHelper().setValue( configuration, getScope(), inputFormat.getClass()); inputFormat.setup(runTimeProperties, configuration); } if ((outputFormat == null) && runTimeProperties.hasProperty(OutputParameters.Output.OUTPUT_FORMAT)) { outputFormat = runTimeProperties.getClassInstance( OutputParameters.Output.OUTPUT_FORMAT, FormatConfiguration.class, null); } if (outputFormat != null) { OutputParameters.Output.OUTPUT_FORMAT.getHelper().setValue( configuration, getScope(), outputFormat.getClass()); outputFormat.setup(runTimeProperties, configuration); } runTimeProperties.setConfig( new ParameterEnum[] {StoreParam.INPUT_STORE}, configuration, getScope()); OutputParameters.Output.REDUCER_COUNT.getHelper().setValue( configuration, getScope(), runTimeProperties.getPropertyAsInt(OutputParameters.Output.REDUCER_COUNT, reducerCount)); return mapReduceIntegrater.submit(configuration, runTimeProperties, this); } public static void addDataAdapter( final Configuration config, final InternalDataAdapter adapter) { JobContextAdapterStore.addDataAdapter(config, adapter.getAdapter()); JobContextInternalAdapterStore.addTypeName( config, adapter.getTypeName(), adapter.getAdapterId()); } public static void addIndex(final Configuration config, final Index index) { JobContextIndexStore.addIndex(config, index); } @SuppressWarnings("rawtypes") @Override public int run(final String[] args) throws Exception { final Job job = mapReduceIntegrater.getJob(this); configure(job); final ScopedJobConfiguration configWrapper = new ScopedJobConfiguration(job.getConfiguration(), getScope()); final FormatConfiguration inputFormat = configWrapper.getInstance( InputParameters.Input.INPUT_FORMAT, FormatConfiguration.class, null); if (inputFormat != null) { job.setInputFormatClass((Class) inputFormat.getFormatClass()); } final FormatConfiguration outputFormat = configWrapper.getInstance( OutputParameters.Output.OUTPUT_FORMAT, FormatConfiguration.class, null); if (outputFormat != null) { job.setOutputFormatClass((Class) outputFormat.getFormatClass()); } job.setNumReduceTasks(configWrapper.getInt(OutputParameters.Output.REDUCER_COUNT, 1)); job.setJobName(getJobName()); job.setJarByClass(this.getClass()); final Counters counters = mapReduceIntegrater.waitForCompletion(job); lastCounterSet = counters; return (counters == null) ? 1 : 0; } protected abstract String getJobName(); public long getCounterValue(final Enum counterEnum) { return (lastCounterSet != null) ? (lastCounterSet.findCounter(counterEnum)).getValue() : 0; } public abstract void configure(final Job job) throws Exception; @Override public Collection> getParameters() { final List> params = new ArrayList<>(); if (inputFormat != null) { params.addAll(inputFormat.getParameters()); } if (outputFormat != null) { params.addAll(outputFormat.getParameters()); } params.addAll( Arrays.asList( new ParameterEnum[] { StoreParam.INPUT_STORE, Output.REDUCER_COUNT, Output.OUTPUT_FORMAT})); return params; } @Override public int run(final PropertyManagement runTimeProperties) throws Exception { return this.run(mapReduceIntegrater.getConfiguration(runTimeProperties), runTimeProperties); } protected InternalDataAdapter getAdapter( final PropertyManagement runTimeProperties, final ParameterEnum dataTypeEnum, final ParameterEnum dataNameSpaceEnum) throws Exception { final String projectionDataTypeId = runTimeProperties.storeIfEmpty(dataTypeEnum, "convex_hull").toString(); final PersistentAdapterStore adapterStore = getAdapterStore(runTimeProperties); final InternalAdapterStore internalAdapterStore = getInternalAdapterStore(runTimeProperties); final Short convexHullInternalAdapterId = internalAdapterStore.getAdapterId(projectionDataTypeId); if (convexHullInternalAdapterId == null) { final String namespaceURI = runTimeProperties.storeIfEmpty( dataNameSpaceEnum, BasicFeatureTypes.DEFAULT_NAMESPACE).toString(); final FeatureDataAdapter adapter = AnalyticFeature.createGeometryFeatureAdapter( projectionDataTypeId, new String[0], namespaceURI, ClusteringUtils.CLUSTERING_CRS); final short internalAdapterId = internalAdapterStore.addTypeName(adapter.getTypeName()); final InternalDataAdapter internalAdapter = adapter.asInternalAdapter(internalAdapterId); adapterStore.addAdapter(internalAdapter); return internalAdapter; } return adapterStore.getAdapter(convexHullInternalAdapterId); } protected String checkIndex( final PropertyManagement runTimeProperties, final ParameterEnum indexIdEnum, final String defaultIdxName) throws Exception { final String indexName = runTimeProperties.getPropertyAsString(indexIdEnum, defaultIdxName); final IndexStore indexStore = getIndexStore(runTimeProperties); final DataStore dataStore = getDataStore(runTimeProperties); Index index = indexStore.getIndex(indexName); if (index == null) { final Index defaultSpatialIndex = SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()); index = new CustomNameIndex( defaultSpatialIndex.getIndexStrategy(), defaultSpatialIndex.getIndexModel(), indexName); dataStore.addIndex(index); } return indexName; } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/GeoWaveInputFormatConfiguration.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.hadoop.conf.Configuration; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.param.ExtractParameters; import org.locationtech.geowave.analytic.param.FormatConfiguration; import org.locationtech.geowave.analytic.param.ParameterEnum; import org.locationtech.geowave.analytic.param.StoreParameters.StoreParam; import org.locationtech.geowave.analytic.store.PersistableStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.Query; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.mapreduce.input.GeoWaveInputFormat; public class GeoWaveInputFormatConfiguration implements FormatConfiguration { protected boolean isDataWritable = false; protected List> adapters = new ArrayList<>(); protected List indices = new ArrayList<>(); public GeoWaveInputFormatConfiguration() {} @Override public void setup(final PropertyManagement runTimeProperties, final Configuration configuration) throws Exception { final DataStorePluginOptions dataStoreOptions = ((PersistableStore) runTimeProperties.getProperty( StoreParam.INPUT_STORE)).getDataStoreOptions(); GeoWaveInputFormat.setStoreOptions(configuration, dataStoreOptions); final Query query = runTimeProperties.getPropertyAsQuery(ExtractParameters.Extract.QUERY); if (query != null) { if (query.getQueryConstraints() != null) { GeoWaveInputFormat.setQueryConstraints(configuration, query.getQueryConstraints()); } if (query.getCommonQueryOptions() != null) { GeoWaveInputFormat.setCommonQueryOptions(configuration, query.getCommonQueryOptions()); } if (query.getDataTypeQueryOptions() != null) { GeoWaveInputFormat.setDataTypeQueryOptions( configuration, query.getDataTypeQueryOptions(), dataStoreOptions.createAdapterStore(), dataStoreOptions.createInternalAdapterStore()); } if (query.getIndexQueryOptions() != null) { GeoWaveInputFormat.setIndexQueryOptions( configuration, query.getIndexQueryOptions(), dataStoreOptions.createIndexStore()); } } final int minInputSplits = runTimeProperties.getPropertyAsInt(ExtractParameters.Extract.MIN_INPUT_SPLIT, -1); if (minInputSplits > 0) { GeoWaveInputFormat.setMinimumSplitCount(configuration, minInputSplits); } final int maxInputSplits = runTimeProperties.getPropertyAsInt(ExtractParameters.Extract.MAX_INPUT_SPLIT, -1); if (maxInputSplits > 0) { GeoWaveInputFormat.setMaximumSplitCount(configuration, maxInputSplits); } GeoWaveInputFormat.setIsOutputWritable(configuration, isDataWritable); } public void addDataAdapter(final DataTypeAdapter adapter) { adapters.add(adapter); } public void addIndex(final Index index) { indices.add(index); } @Override public Class getFormatClass() { return GeoWaveInputFormat.class; } @Override public boolean isDataWritable() { return isDataWritable; } @Override public void setDataIsWritable(final boolean isWritable) { isDataWritable = isWritable; } @Override public List> getParameters() { return Arrays.asList( new ParameterEnum[] { ExtractParameters.Extract.QUERY, ExtractParameters.Extract.MAX_INPUT_SPLIT, ExtractParameters.Extract.MIN_INPUT_SPLIT, StoreParam.INPUT_STORE}); } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/GeoWaveOutputFormatConfiguration.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce; import java.util.Arrays; import java.util.Collection; import org.apache.hadoop.conf.Configuration; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.param.FormatConfiguration; import org.locationtech.geowave.analytic.param.ParameterEnum; import org.locationtech.geowave.analytic.param.StoreParameters.StoreParam; import org.locationtech.geowave.analytic.store.PersistableStore; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputFormat; public class GeoWaveOutputFormatConfiguration implements FormatConfiguration { /** Captures the state, but the output format is flexible enough to deal with both. */ protected boolean isDataWritable = false; @Override public void setup(final PropertyManagement runTimeProperties, final Configuration configuration) throws Exception { final DataStorePluginOptions dataStoreOptions = ((PersistableStore) runTimeProperties.getProperty( StoreParam.INPUT_STORE)).getDataStoreOptions(); GeoWaveOutputFormat.setStoreOptions(configuration, dataStoreOptions); } @Override public Class getFormatClass() { return GeoWaveOutputFormat.class; } @Override public boolean isDataWritable() { return isDataWritable; } @Override public void setDataIsWritable(final boolean isWritable) { isDataWritable = isWritable; } @Override public Collection> getParameters() { return Arrays.asList(new ParameterEnum[] {StoreParam.INPUT_STORE}); } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/GroupIDText.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce; import org.apache.hadoop.io.Text; public class GroupIDText extends Text { public void set(final String groupID, final String id) { super.set((groupID == null ? "##" : groupID) + "," + id); } public String getGroupID() { final String t = toString(); final String groupID = t.substring(0, t.indexOf(',')); return ("##".equals(groupID)) ? null : groupID; } public String getID() { final String t = toString(); return t.substring(t.indexOf(',') + 1); } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/HadoopOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce; import java.io.FileInputStream; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.param.MapReduceParameters; import org.locationtech.geowave.analytic.param.MapReduceParameters.MRConfig; import org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class encapsulates the command-line options and parsed values specific to staging * intermediate data to HDFS. */ public class HadoopOptions { private static final Logger LOGGER = LoggerFactory.getLogger(HadoopOptions.class); private final String hdfsHostPort; private final Path basePath; private final String jobTrackerHostPort; private final Configuration config = new Configuration(); public HadoopOptions(final PropertyManagement runTimeProperties) throws IOException { final boolean setRemoteInvocation = runTimeProperties.hasProperty(MRConfig.HDFS_HOST_PORT) || runTimeProperties.hasProperty(MRConfig.JOBTRACKER_HOST_PORT); final String hostport = runTimeProperties.getPropertyAsString(MRConfig.HDFS_HOST_PORT, "localhost:53000"); hdfsHostPort = hostport; basePath = new Path(runTimeProperties.getPropertyAsString(MRConfig.HDFS_BASE_DIR), "/"); jobTrackerHostPort = runTimeProperties.getPropertyAsString( MRConfig.JOBTRACKER_HOST_PORT, runTimeProperties.getPropertyAsString(MRConfig.YARN_RESOURCE_MANAGER)); final String name = runTimeProperties.getPropertyAsString(MapReduceParameters.MRConfig.CONFIG_FILE); if (name != null) { try (FileInputStream in = new FileInputStream(name)) { // HP Fortify "Path Manipulation" false positive // What fortify identifies as "user input" comes // only from users with OS-level access anyway config.addResource(in, name); } catch (final IOException ex) { LOGGER.error("Configuration file not found", ex); throw ex; } } if (setRemoteInvocation) { GeoWaveConfiguratorBase.setRemoteInvocationParams(hdfsHostPort, jobTrackerHostPort, config); } else { LOGGER.info("Assuming local job submission"); } final FileSystem fs = FileSystem.get(config); if (!fs.exists(basePath)) { LOGGER.error("HDFS base directory does not exist"); return; } } public HadoopOptions( final String hdfsHostPort, final Path basePath, final String jobTrackerHostport) { this.hdfsHostPort = hdfsHostPort; this.basePath = basePath; jobTrackerHostPort = jobTrackerHostport; } public String getHdfsHostPort() { return hdfsHostPort; } public Path getBasePath() { return basePath; } public String getJobTrackerOrResourceManagerHostPort() { return jobTrackerHostPort; } public Configuration getConfiguration() { return config; } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/MapReduceIntegration.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.Counters; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.util.Tool; import org.locationtech.geowave.analytic.PropertyManagement; public interface MapReduceIntegration { public int submit( final Configuration configuration, final PropertyManagement runTimeProperties, final GeoWaveAnalyticJobRunner tool) throws Exception; public Counters waitForCompletion(Job job) throws InterruptedException, Exception; public Job getJob(Tool tool) throws IOException; public Configuration getConfiguration(final PropertyManagement runTimeProperties) throws IOException; } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/MapReduceJobController.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce; import java.io.IOException; import java.util.Collection; import java.util.HashSet; import java.util.Set; import org.apache.hadoop.conf.Configuration; import org.locationtech.geowave.analytic.IndependentJobRunner; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.param.MapReduceParameters; import org.locationtech.geowave.analytic.param.ParameterEnum; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Run a series of jobs in a sequence. Use the {@link PostOperationTask} to allow job definitions to * perform an action after running. The purpose of this added task is to support information from a * prior job in the sequence(such as temporary file names, job IDs, stats) to be provided to the * next job or set of jobs. */ public class MapReduceJobController implements MapReduceJobRunner, IndependentJobRunner { static final Logger LOGGER = LoggerFactory.getLogger(MapReduceJobController.class); private MapReduceJobRunner[] runners; private PostOperationTask[] runSetUpTasks; public MapReduceJobController() {} protected void init(final MapReduceJobRunner[] runners, final PostOperationTask[] runSetUpTasks) { this.runners = runners; this.runSetUpTasks = runSetUpTasks; } public MapReduceJobRunner[] getRunners() { return runners; } public static interface PostOperationTask { public void runTask(Configuration config, MapReduceJobRunner runner); } public static final PostOperationTask DoNothingTask = new PostOperationTask() { @Override public void runTask(final Configuration config, final MapReduceJobRunner runner) {} }; @Override public int run(final Configuration config, final PropertyManagement runTimeProperties) throws Exception { for (int i = 0; i < runners.length; i++) { final MapReduceJobRunner runner = runners[i]; LOGGER.info("Running " + runner.getClass().toString()); // HP Fortify "Command Injection" false positive // What Fortify considers "externally-influenced input" // comes only from users with OS-level access anyway final int status = runner.run(config, runTimeProperties); if (status != 0) { return status; } runSetUpTasks[i].runTask(config, runner); } return 0; } @Override public Collection> getParameters() { final Set> params = new HashSet<>(); params.addAll(MapReduceParameters.getParameters()); for (int i = 0; i < runners.length; i++) { final MapReduceJobRunner runner = runners[i]; if (runner instanceof IndependentJobRunner) { params.addAll(((IndependentJobRunner) runner).getParameters()); } } return params; } @Override public int run(final PropertyManagement runTimeProperties) throws Exception { return this.run(getConfiguration(runTimeProperties), runTimeProperties); } public static Configuration getConfiguration(final PropertyManagement pm) throws IOException { return new HadoopOptions(pm).getConfiguration(); } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/MapReduceJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce; import org.apache.hadoop.conf.Configuration; import org.locationtech.geowave.analytic.PropertyManagement; public interface MapReduceJobRunner { public int run(final Configuration config, final PropertyManagement runTimeProperties) throws Exception; } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/SequenceFileInputFormatConfiguration.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce; import java.util.Arrays; import java.util.Collection; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.param.FormatConfiguration; import org.locationtech.geowave.analytic.param.InputParameters; import org.locationtech.geowave.analytic.param.ParameterEnum; public class SequenceFileInputFormatConfiguration implements FormatConfiguration { final Path inputPath; public SequenceFileInputFormatConfiguration() { inputPath = null; } public SequenceFileInputFormatConfiguration(final Path inputPath) { this.inputPath = inputPath; } @Override public void setup(final PropertyManagement runTimeProperties, final Configuration configuration) throws Exception { final Path localInputPath = inputPath == null ? runTimeProperties.getPropertyAsPath(InputParameters.Input.HDFS_INPUT_PATH) : inputPath; if (localInputPath != null) { configuration.set("mapred.input.dir", localInputPath.toString()); } } @Override public Class getFormatClass() { return SequenceFileInputFormat.class; } @Override public boolean isDataWritable() { return true; } @Override public void setDataIsWritable(final boolean isWritable) {} @Override public Collection> getParameters() { return Arrays.asList(new ParameterEnum[] {InputParameters.Input.HDFS_INPUT_PATH}); } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/SequenceFileOutputFormatConfiguration.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce; import java.util.Arrays; import java.util.Collection; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.param.FormatConfiguration; import org.locationtech.geowave.analytic.param.OutputParameters; import org.locationtech.geowave.analytic.param.ParameterEnum; public class SequenceFileOutputFormatConfiguration implements FormatConfiguration { final Path outputPath; public SequenceFileOutputFormatConfiguration() { outputPath = null; } public SequenceFileOutputFormatConfiguration(final Path outputPath) { this.outputPath = outputPath; } @Override public void setup(final PropertyManagement runTimeProperties, final Configuration configuration) throws Exception { final Path localOutputPath = outputPath == null ? runTimeProperties.getPropertyAsPath(OutputParameters.Output.HDFS_OUTPUT_PATH) : outputPath; if (localOutputPath != null) { configuration.set("mapred.output.dir", localOutputPath.toString()); } } @Override public Class getFormatClass() { return SequenceFileOutputFormat.class; } @Override public boolean isDataWritable() { return true; } @Override public void setDataIsWritable(final boolean isWritable) {} @Override public Collection> getParameters() { return Arrays.asList(new ParameterEnum[] {OutputParameters.Output.HDFS_OUTPUT_PATH}); } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/ToolRunnerMapReduceIntegration.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.Counters; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.locationtech.geowave.analytic.PropertyManagement; public class ToolRunnerMapReduceIntegration implements MapReduceIntegration { @Override public Job getJob(final Tool tool) throws IOException { return new Job(tool.getConf()); } @Override public int submit( final Configuration configuration, final PropertyManagement runTimeProperties, final GeoWaveAnalyticJobRunner tool) throws Exception { return ToolRunner.run(configuration, tool, new String[] {}); } @Override public Counters waitForCompletion(final Job job) throws ClassNotFoundException, InterruptedException, Exception { final boolean status = job.waitForCompletion(true); return status ? job.getCounters() : null; } @Override public Configuration getConfiguration(final PropertyManagement runTimeProperties) throws IOException { return MapReduceJobController.getConfiguration(runTimeProperties); } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/clustering/ConvexHullMapReduce.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.clustering; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.UUID; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.geotools.feature.type.BasicFeatureTypes; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.analytic.AnalyticFeature; import org.locationtech.geowave.analytic.AnalyticItemWrapper; import org.locationtech.geowave.analytic.AnalyticItemWrapperFactory; import org.locationtech.geowave.analytic.Projection; import org.locationtech.geowave.analytic.ScopedJobConfiguration; import org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory; import org.locationtech.geowave.analytic.SimpleFeatureProjection; import org.locationtech.geowave.analytic.clustering.CentroidManager; import org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave; import org.locationtech.geowave.analytic.clustering.ClusteringUtils; import org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment; import org.locationtech.geowave.analytic.param.HullParameters; import org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.mapreduce.GeoWaveWritableInputMapper; import org.locationtech.geowave.mapreduce.GeoWaveWritableInputReducer; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey; import org.locationtech.jts.algorithm.ConvexHull; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; import org.opengis.feature.simple.SimpleFeature; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Compute the convex hull over all points associated with each centroid. Each hull is sent to * output as a simple features. * *

Properties: * * *

"ConvexHullMapReduce.Hull.DataTypeId" - Id of the data type to store the the polygons as * simple features - defaults to "convex_hull" *

"ConvexHullMapReduce.Hull.ProjectionClass" - instance of {@link * org.locationtech.geowave.analytic.Projection} *

"ConvexHullMapReduce.Hull.IndexId" - The Index ID used for output simple features. *

"ConvexHullMapReduce.Hull.WrapperFactoryClass" -> {@link AnalyticItemWrapperFactory} to * group and level associated with each entry * @see org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment * */ public class ConvexHullMapReduce { protected static final Logger LOGGER = LoggerFactory.getLogger(ConvexHullMapReduce.class); public static class ConvexHullMap extends GeoWaveWritableInputMapper { protected GeoWaveInputKey outputKey = new GeoWaveInputKey(); private ObjectWritable currentValue; private AnalyticItemWrapperFactory itemWrapperFactory; private NestedGroupCentroidAssignment nestedGroupCentroidAssigner; // Override parent since there is not need to decode the value. @Override protected void mapWritableValue( final GeoWaveInputKey key, final ObjectWritable value, final Mapper.Context context) throws IOException, InterruptedException { // cached for efficiency since the output is the input object // the de-serialized input object is only used for sampling. // For simplicity, allow the de-serialization to occur in all cases, // even though some sampling // functions do not inspect the input object. currentValue = value; super.mapWritableValue(key, value, context); } @Override protected void mapNativeValue( final GeoWaveInputKey key, final Object value, final org.apache.hadoop.mapreduce.Mapper.Context context) throws IOException, InterruptedException { @SuppressWarnings("unchecked") final AnalyticItemWrapper wrapper = itemWrapperFactory.create((T) value); outputKey.setInternalAdapterId(key.getInternalAdapterId()); outputKey.setDataId( new ByteArray( StringUtils.stringToBinary(nestedGroupCentroidAssigner.getGroupForLevel(wrapper)))); outputKey.setGeoWaveKey(key.getGeoWaveKey()); context.write(outputKey, currentValue); } @SuppressWarnings("unchecked") @Override protected void setup( final Mapper.Context context) throws IOException, InterruptedException { super.setup(context); final ScopedJobConfiguration config = new ScopedJobConfiguration( context.getConfiguration(), ConvexHullMapReduce.class, ConvexHullMapReduce.LOGGER); try { itemWrapperFactory = config.getInstance( HullParameters.Hull.WRAPPER_FACTORY_CLASS, AnalyticItemWrapperFactory.class, SimpleFeatureItemWrapperFactory.class); itemWrapperFactory.initialize( context, ConvexHullMapReduce.class, ConvexHullMapReduce.LOGGER); } catch (final Exception e1) { throw new IOException(e1); } try { nestedGroupCentroidAssigner = new NestedGroupCentroidAssignment<>( context, ConvexHullMapReduce.class, ConvexHullMapReduce.LOGGER); } catch (final Exception e1) { throw new IOException(e1); } } } public static class ConvexHullReducer extends GeoWaveWritableInputReducer { private CentroidManager centroidManager; private String[] indexNames; private FeatureDataAdapter outputAdapter; private Projection projectionFunction; /* * Logic inspired by SpatialHadoop convexHullStream method */ // absolute point cloud limit private final int pointCloudThreshold = 50000000; private final List batchCoords = new ArrayList<>(10000); @Override protected void reduceNativeValues( final GeoWaveInputKey key, final Iterable values, final Reducer.Context context) throws IOException, InterruptedException { // limit on new points per convex hull run (batch) int batchThreshold = 10000; batchCoords.clear(); Geometry currentHull = null; final String groupID = StringUtils.stringFromBinary(key.getDataId().getBytes()); final AnalyticItemWrapper centroid = centroidManager.getCentroid(groupID); for (final Object value : values) { currentHull = null; @SuppressWarnings("unchecked") final Geometry geo = projectionFunction.getProjection((T) value); final Coordinate[] coords = geo.getCoordinates(); if ((coords.length + batchCoords.size()) > pointCloudThreshold) { break; } for (final Coordinate coordinate : coords) { batchCoords.add(coordinate); } if (coords.length > batchThreshold) { batchThreshold = coords.length; } if (batchCoords.size() > batchThreshold) { currentHull = compress(key, batchCoords); } } currentHull = (currentHull == null) ? compress(key, batchCoords) : currentHull; if (ConvexHullMapReduce.LOGGER.isTraceEnabled()) { ConvexHullMapReduce.LOGGER.trace(centroid.getGroupID() + " contains " + groupID); } final SimpleFeature newPolygonFeature = AnalyticFeature.createGeometryFeature( outputAdapter.getFeatureType(), centroid.getBatchID(), UUID.randomUUID().toString(), centroid.getName(), centroid.getGroupID(), centroid.getCost(), currentHull, new String[0], new double[0], centroid.getZoomLevel(), centroid.getIterationID(), centroid.getAssociationCount()); // new center context.write( new GeoWaveOutputKey(outputAdapter.getTypeName(), indexNames), newPolygonFeature); } private static Geometry compress( final GeoWaveInputKey key, final List batchCoords) { final Coordinate[] actualCoords = batchCoords.toArray(new Coordinate[batchCoords.size()]); // generate convex hull for current batch of points final ConvexHull convexHull = new ConvexHull(actualCoords, new GeometryFactory()); final Geometry hullGeometry = convexHull.getConvexHull(); final Coordinate[] hullCoords = hullGeometry.getCoordinates(); batchCoords.clear(); for (final Coordinate hullCoord : hullCoords) { batchCoords.add(hullCoord); } return hullGeometry; } @SuppressWarnings("unchecked") @Override protected void setup( final Reducer.Context context) throws IOException, InterruptedException { final ScopedJobConfiguration config = new ScopedJobConfiguration( context.getConfiguration(), ConvexHullMapReduce.class, ConvexHullMapReduce.LOGGER); super.setup(context); try { centroidManager = new CentroidManagerGeoWave<>( context, ConvexHullMapReduce.class, ConvexHullMapReduce.LOGGER); } catch (final Exception e) { ConvexHullMapReduce.LOGGER.warn("Unable to initialize centroid manager", e); throw new IOException("Unable to initialize centroid manager"); } try { projectionFunction = config.getInstance( HullParameters.Hull.PROJECTION_CLASS, Projection.class, SimpleFeatureProjection.class); projectionFunction.initialize(context, ConvexHullMapReduce.class); } catch (final Exception e1) { throw new IOException(e1); } final String polygonDataTypeId = config.getString(HullParameters.Hull.DATA_TYPE_ID, "convex_hull"); outputAdapter = AnalyticFeature.createGeometryFeatureAdapter( polygonDataTypeId, new String[0], config.getString( HullParameters.Hull.DATA_NAMESPACE_URI, BasicFeatureTypes.DEFAULT_NAMESPACE), ClusteringUtils.CLUSTERING_CRS); indexNames = new String[] { config.getString( HullParameters.Hull.INDEX_NAME, new SpatialIndexBuilder().createIndex().getName())}; } } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/clustering/GroupAssignmentMapReduce.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.clustering; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.mapreduce.Mapper; import org.locationtech.geowave.analytic.AnalyticItemWrapperFactory; import org.locationtech.geowave.analytic.ScopedJobConfiguration; import org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory; import org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave; import org.locationtech.geowave.analytic.clustering.CentroidPairing; import org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment; import org.locationtech.geowave.analytic.extract.CentroidExtractor; import org.locationtech.geowave.analytic.extract.SimpleFeatureCentroidExtractor; import org.locationtech.geowave.analytic.kmeans.AssociationNotification; import org.locationtech.geowave.analytic.mapreduce.GroupIDText; import org.locationtech.geowave.analytic.param.CentroidParameters; import org.locationtech.geowave.mapreduce.GeoWaveWritableInputMapper; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Adjust input items so that so that the assigned centroid becomes the group ID. If the item has an * assigned group ID, the resulting item's group ID is replaced in the output. * *

From a multi-level clustering algorithm, an item has a different grouping in each level. Items * are clustered within their respective groups. * * *

Context configuration parameters include: *

"GroupAssignmentMapReduce.Common.DistanceFunctionClass" -> Used to determine distance to * centroid *

"GroupAssignmentMapReduce.Centroid.ExtractorClass" -> {@link * org.locationtech.geowave.analytic.extract.CentroidExtractor} *

"GroupAssignmentMapReduce.Centroid.WrapperFactoryClass" -> {@link * AnalyticItemWrapperFactory} to extract wrap spatial objects with Centroid management * functions *

"GroupAssignmentMapReduce.Centroid.ZoomLevel" -> The current zoom level * @see CentroidManagerGeoWave * */ public class GroupAssignmentMapReduce { protected static final Logger LOGGER = LoggerFactory.getLogger(GroupAssignmentMapReduce.class); public static class GroupAssignmentMapper extends GeoWaveWritableInputMapper { private NestedGroupCentroidAssignment nestedGroupCentroidAssigner; protected GroupIDText outputKeyWritable = new GroupIDText(); protected ObjectWritable outputValWritable = new ObjectWritable(); protected CentroidExtractor centroidExtractor; protected AnalyticItemWrapperFactory itemWrapperFactory; private final Map logCounts = new HashMap<>(); @Override protected void mapNativeValue( final GeoWaveInputKey key, final Object value, final org.apache.hadoop.mapreduce.Mapper.Context context) throws IOException, InterruptedException { final AssociationNotification centroidAssociationFn = new AssociationNotification() { @Override public void notify(final CentroidPairing pairing) { pairing.getPairedItem().setGroupID(pairing.getCentroid().getID()); pairing.getPairedItem().setZoomLevel(pairing.getCentroid().getZoomLevel() + 1); // just get the contents of the returned ObjectWritable to // avoid // having to assign outputValWritable rather than update its // contents. // the 'toWritabeValue' method is efficient, not creating an // extra instance of // ObjectWritable each time, so this is just a simple // exchange of a reference outputValWritable.set( toWritableValue(key, pairing.getPairedItem().getWrappedItem()).get()); AtomicInteger ii = logCounts.get(pairing.getCentroid().getID()); if (ii == null) { ii = new AtomicInteger(0); logCounts.put(pairing.getCentroid().getID(), ii); } ii.incrementAndGet(); } }; nestedGroupCentroidAssigner.findCentroidForLevel( itemWrapperFactory.create(value), centroidAssociationFn); context.write(key, outputValWritable); } @Override protected void cleanup(final org.apache.hadoop.mapreduce.Mapper.Context context) throws IOException, InterruptedException { for (final Entry e : logCounts.entrySet()) { GroupAssignmentMapReduce.LOGGER.info(e.getKey() + " = " + e.getValue()); } super.cleanup(context); } @Override protected void setup( final Mapper.Context context) throws IOException, InterruptedException { super.setup(context); final ScopedJobConfiguration config = new ScopedJobConfiguration( context.getConfiguration(), GroupAssignmentMapReduce.class, GroupAssignmentMapReduce.LOGGER); try { nestedGroupCentroidAssigner = new NestedGroupCentroidAssignment<>( context, GroupAssignmentMapReduce.class, GroupAssignmentMapReduce.LOGGER); } catch (final Exception e1) { throw new IOException(e1); } try { centroidExtractor = config.getInstance( CentroidParameters.Centroid.EXTRACTOR_CLASS, CentroidExtractor.class, SimpleFeatureCentroidExtractor.class); } catch (final Exception e1) { throw new IOException(e1); } try { itemWrapperFactory = config.getInstance( CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS, AnalyticItemWrapperFactory.class, SimpleFeatureItemWrapperFactory.class); itemWrapperFactory.initialize( context, GroupAssignmentMapReduce.class, GroupAssignmentMapReduce.LOGGER); } catch (final Exception e1) { throw new IOException(e1); } } } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/clustering/InputToOutputKeyReducer.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.clustering; import java.io.IOException; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.mapreduce.Reducer; import org.locationtech.geowave.analytic.ScopedJobConfiguration; import org.locationtech.geowave.analytic.param.OutputParameters; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.mapreduce.GeoWaveWritableInputReducer; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputFormat; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** Copy data from an GeoWave Input to a index using the same adapter. */ public class InputToOutputKeyReducer extends GeoWaveWritableInputReducer { protected static final Logger LOGGER = LoggerFactory.getLogger(InputToOutputKeyReducer.class); private GeoWaveOutputKey outputKey; private InternalAdapterStore internalAdapterStore; @Override protected void reduceNativeValues( final GeoWaveInputKey key, final Iterable values, final Reducer.Context context) throws IOException, InterruptedException { outputKey.setTypeName(internalAdapterStore.getTypeName(key.getInternalAdapterId())); for (final Object value : values) { context.write(outputKey, value); } } @Override protected void setup( final Reducer.Context context) throws IOException, InterruptedException { super.setup(context); internalAdapterStore = GeoWaveOutputFormat.getJobContextInternalAdapterStore(context); final ScopedJobConfiguration config = new ScopedJobConfiguration( context.getConfiguration(), InputToOutputKeyReducer.class, LOGGER); outputKey = new GeoWaveOutputKey( "na", new String[] {config.getString(OutputParameters.Output.INDEX_ID, "na")}); } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/clustering/SimpleFeatureOutputReducer.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.clustering; import java.io.IOException; import java.util.Iterator; import java.util.UUID; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.mapreduce.ReduceContext; import org.apache.hadoop.mapreduce.Reducer; import org.geotools.feature.type.BasicFeatureTypes; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.analytic.AnalyticFeature; import org.locationtech.geowave.analytic.ScopedJobConfiguration; import org.locationtech.geowave.analytic.clustering.ClusteringUtils; import org.locationtech.geowave.analytic.extract.DimensionExtractor; import org.locationtech.geowave.analytic.extract.EmptyDimensionExtractor; import org.locationtech.geowave.analytic.param.ExtractParameters; import org.locationtech.geowave.analytic.param.GlobalParameters; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.metadata.InternalAdapterStoreImpl; import org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase; import org.locationtech.geowave.mapreduce.GeoWaveReducer; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.locationtech.jts.geom.Geometry; import org.opengis.feature.simple.SimpleFeature; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Remove duplicate input objects and write out as a simple feature with geometry projected onto CRS * EPSG:4326. The output feature contains the ID of the originating object. The intent is to create * a light weight uniform object that reuses GeoWave data formats to feed analytic processes. * *

If the input object does not require adjustment after de-duplication, use {@link * org.locationtech.geowave.mapreduce.dedupe.GeoWaveDedupeReducer} * *

OutputFeature Attributes, see {@link * org.locationtech.geowave.analytic.AnalyticFeature.ClusterFeatureAttribute} * *

Context configuration parameters include: * * *

"SimpleFeatureOutputReducer.Extract.DimensionExtractClass" -> {@link DimensionExtractor} * to extract non-geometric dimensions *

"SimpleFeatureOutputReducer.Extract.OutputDataTypeId" -> the name of the output * SimpleFeature data type *

"SimpleFeatureOutputReducer.Global.BatchId" ->the id of the batch; defaults to current * time in millis (for range comparisons) * */ public class SimpleFeatureOutputReducer extends GeoWaveReducer { protected DimensionExtractor dimExtractor; protected String outputDataTypeID; protected String batchID; protected String groupID; protected FeatureDataAdapter outputAdapter; protected static final Logger LOGGER = LoggerFactory.getLogger(SimpleFeatureOutputReducer.class); @Override protected void reduceNativeValues( final GeoWaveInputKey key, final Iterable values, final ReduceContext context) throws IOException, InterruptedException { final Iterator valIt = values.iterator(); if (valIt.hasNext()) { key.setInternalAdapterId( // TODO this is a bit of a hack, but the // adapter is seemingly completely // transient and never actually // persisted - it seems unlikely that // the value for internal adapter ID // even matters, but if it does this is // the best effort InternalAdapterStoreImpl.getLazyInitialAdapterId(outputAdapter.getTypeName())); final SimpleFeature feature = getSimpleFeature(key, valIt.next()); context.write(key, feature); } } private SimpleFeature getSimpleFeature(final GeoWaveInputKey key, final Object entry) { final Geometry geometry = dimExtractor.getGeometry(entry); final double[] extraDims = dimExtractor.getDimensions(entry); final String inputID = StringUtils.stringFromBinary(key.getDataId().getBytes()); final SimpleFeature pointFeature = AnalyticFeature.createGeometryFeature( outputAdapter.getFeatureType(), batchID, inputID, inputID, groupID, 0.0, geometry, dimExtractor.getDimensionNames(), extraDims, 1, 1, 0); return pointFeature; } @SuppressWarnings("unchecked") @Override protected void setup( final Reducer.Context context) throws IOException, InterruptedException { super.setup(context); final ScopedJobConfiguration config = new ScopedJobConfiguration(context.getConfiguration(), SimpleFeatureOutputReducer.class); outputDataTypeID = config.getString(ExtractParameters.Extract.OUTPUT_DATA_TYPE_ID, "reduced_features"); batchID = config.getString(GlobalParameters.Global.BATCH_ID, UUID.randomUUID().toString()); groupID = config.getString(ExtractParameters.Extract.GROUP_ID, UUID.randomUUID().toString()); try { dimExtractor = config.getInstance( ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS, DimensionExtractor.class, EmptyDimensionExtractor.class); } catch (final Exception e1) { LOGGER.warn( "Failed to instantiate " + GeoWaveConfiguratorBase.enumToConfKey( SimpleFeatureOutputReducer.class, ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS), e1); throw new IOException( "Invalid configuration for " + GeoWaveConfiguratorBase.enumToConfKey( SimpleFeatureOutputReducer.class, ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS)); } outputAdapter = AnalyticFeature.createGeometryFeatureAdapter( outputDataTypeID, dimExtractor.getDimensionNames(), config.getString( ExtractParameters.Extract.DATA_NAMESPACE_URI, BasicFeatureTypes.DEFAULT_NAMESPACE), ClusteringUtils.CLUSTERING_CRS); } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/clustering/runner/AnalyticJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.clustering.runner; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.util.Tool; public abstract class AnalyticJobRunner extends Configured implements Tool { @SuppressWarnings("deprecation") public int runJob() throws IOException, InterruptedException, ClassNotFoundException { final Configuration conf = super.getConf(); final Job job = Job.getInstance(conf); job.setJarByClass(this.getClass()); final boolean jobSuccess = job.waitForCompletion(true); return (jobSuccess) ? 0 : 1; } protected abstract void configure(Job job) throws Exception; @Override public int run(final String[] args) throws Exception { return runJob(); } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/clustering/runner/ClusteringRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.clustering.runner; import org.locationtech.geowave.analytic.IndependentJobRunner; import org.locationtech.geowave.analytic.mapreduce.MapReduceJobRunner; import org.locationtech.geowave.analytic.param.FormatConfiguration; public interface ClusteringRunner extends MapReduceJobRunner, IndependentJobRunner { public void setInputFormatConfiguration(FormatConfiguration formatConfiguration); public void setZoomLevel(int zoomLevel); } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/clustering/runner/ConvexHullJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.clustering.runner; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.Set; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.mapreduce.Job; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.SimpleFeatureProjection; import org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave; import org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment; import org.locationtech.geowave.analytic.mapreduce.GeoWaveAnalyticJobRunner; import org.locationtech.geowave.analytic.mapreduce.GeoWaveOutputFormatConfiguration; import org.locationtech.geowave.analytic.mapreduce.clustering.ConvexHullMapReduce; import org.locationtech.geowave.analytic.param.CentroidParameters; import org.locationtech.geowave.analytic.param.GlobalParameters; import org.locationtech.geowave.analytic.param.HullParameters; import org.locationtech.geowave.analytic.param.MapReduceParameters; import org.locationtech.geowave.analytic.param.ParameterEnum; import org.locationtech.geowave.analytic.param.StoreParameters; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey; /** */ public class ConvexHullJobRunner extends GeoWaveAnalyticJobRunner { private int zoomLevel = 1; public ConvexHullJobRunner() { super.setOutputFormatConfiguration(new GeoWaveOutputFormatConfiguration()); } public void setZoomLevel(final int zoomLevel) { this.zoomLevel = zoomLevel; } @Override public void configure(final Job job) throws Exception { job.setMapperClass(ConvexHullMapReduce.ConvexHullMap.class); job.setMapOutputKeyClass(GeoWaveInputKey.class); job.setMapOutputValueClass(ObjectWritable.class); job.setReducerClass(ConvexHullMapReduce.ConvexHullReducer.class); job.setReduceSpeculativeExecution(false); job.setOutputKeyClass(GeoWaveOutputKey.class); job.setOutputValueClass(Object.class); } @Override public Class getScope() { return ConvexHullMapReduce.class; } @Override public int run(final Configuration config, final PropertyManagement runTimeProperties) throws Exception { runTimeProperties.storeIfEmpty( HullParameters.Hull.PROJECTION_CLASS, SimpleFeatureProjection.class); runTimeProperties.setConfig( new ParameterEnum[] { HullParameters.Hull.WRAPPER_FACTORY_CLASS, HullParameters.Hull.PROJECTION_CLASS, HullParameters.Hull.DATA_TYPE_ID, HullParameters.Hull.INDEX_NAME}, config, getScope()); setReducerCount(runTimeProperties.getPropertyAsInt(HullParameters.Hull.REDUCER_COUNT, 4)); CentroidManagerGeoWave.setParameters(config, getScope(), runTimeProperties); NestedGroupCentroidAssignment.setParameters(config, getScope(), runTimeProperties); final int localZoomLevel = runTimeProperties.getPropertyAsInt(CentroidParameters.Centroid.ZOOM_LEVEL, zoomLevel); // getting group from next level, now that the prior level is complete NestedGroupCentroidAssignment.setZoomLevel(config, getScope(), localZoomLevel + 1); addDataAdapter( config, getAdapter( runTimeProperties, HullParameters.Hull.DATA_TYPE_ID, HullParameters.Hull.DATA_NAMESPACE_URI)); checkIndex( runTimeProperties, HullParameters.Hull.INDEX_NAME, SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()).getName()); // HP Fortify "Command Injection" false positive // What Fortify considers "externally-influenced input" // comes only from users with OS-level access anyway return super.run(config, runTimeProperties); } @Override public Collection> getParameters() { final Set> params = new HashSet<>(); params.addAll(super.getParameters()); params.addAll( Arrays.asList( new ParameterEnum[] { StoreParameters.StoreParam.INPUT_STORE, StoreParameters.StoreParam.OUTPUT_STORE, GlobalParameters.Global.BATCH_ID})); params.addAll(MapReduceParameters.getParameters()); params.addAll(NestedGroupCentroidAssignment.getParameters()); params.addAll( Arrays.asList( new ParameterEnum[] { HullParameters.Hull.WRAPPER_FACTORY_CLASS, HullParameters.Hull.PROJECTION_CLASS, HullParameters.Hull.REDUCER_COUNT, HullParameters.Hull.DATA_TYPE_ID, HullParameters.Hull.DATA_NAMESPACE_URI, HullParameters.Hull.INDEX_NAME})); return params; } @Override protected String getJobName() { return "Convex Hull"; } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/clustering/runner/GeoWaveAnalyticExtractJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.clustering.runner; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.Set; import java.util.UUID; import org.apache.commons.cli.ParseException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.util.ToolRunner; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.analytic.AnalyticFeature; import org.locationtech.geowave.analytic.IndependentJobRunner; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.ScopedJobConfiguration; import org.locationtech.geowave.analytic.clustering.ClusteringUtils; import org.locationtech.geowave.analytic.extract.DimensionExtractor; import org.locationtech.geowave.analytic.extract.SimpleFeatureGeometryExtractor; import org.locationtech.geowave.analytic.mapreduce.MapReduceJobController; import org.locationtech.geowave.analytic.mapreduce.MapReduceJobRunner; import org.locationtech.geowave.analytic.mapreduce.clustering.SimpleFeatureOutputReducer; import org.locationtech.geowave.analytic.param.ExtractParameters; import org.locationtech.geowave.analytic.param.GlobalParameters; import org.locationtech.geowave.analytic.param.MapReduceParameters; import org.locationtech.geowave.analytic.param.ParameterEnum; import org.locationtech.geowave.analytic.param.StoreParameters.StoreParam; import org.locationtech.geowave.analytic.store.PersistableStore; import org.locationtech.geowave.core.store.api.Query; import org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase; import org.locationtech.geowave.mapreduce.dedupe.GeoWaveDedupeJobRunner; import org.locationtech.geowave.mapreduce.input.GeoWaveInputFormat; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputFormat; /** * Run a map reduce job to extract a population of data from GeoWave (Accumulo), remove duplicates, * and output a SimpleFeature with the ID and the extracted geometry from each of the GeoWave data * item. */ public class GeoWaveAnalyticExtractJobRunner extends GeoWaveDedupeJobRunner implements MapReduceJobRunner, IndependentJobRunner { private String outputBaseDir = "/tmp"; private int reducerCount = 1; public GeoWaveAnalyticExtractJobRunner() { super(null); // Datastore options are set in configure() } @Override protected int getNumReduceTasks() { return reducerCount; } @Override protected String getHdfsOutputBase() { return outputBaseDir; } @Override protected void configure(final Job job) throws Exception { final ScopedJobConfiguration configWrapper = new ScopedJobConfiguration(job.getConfiguration(), SimpleFeatureOutputReducer.class); reducerCount = Math.max(configWrapper.getInt(ExtractParameters.Extract.REDUCER_COUNT, 8), 1); outputBaseDir = configWrapper.getString(MapReduceParameters.MRConfig.HDFS_BASE_DIR, "/tmp"); LOGGER.info("Output base directory " + outputBaseDir); super.configure(job); @SuppressWarnings("rawtypes") final Class dimensionExtractorClass = job.getConfiguration().getClass( GeoWaveConfiguratorBase.enumToConfKey( SimpleFeatureOutputReducer.class, ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS), SimpleFeatureGeometryExtractor.class, DimensionExtractor.class); GeoWaveOutputFormat.addDataAdapter( job.getConfiguration(), createAdapter( job.getConfiguration().get( GeoWaveConfiguratorBase.enumToConfKey( SimpleFeatureOutputReducer.class, ExtractParameters.Extract.OUTPUT_DATA_TYPE_ID)), job.getConfiguration().get( GeoWaveConfiguratorBase.enumToConfKey( SimpleFeatureOutputReducer.class, ExtractParameters.Extract.DATA_NAMESPACE_URI)), dimensionExtractorClass)); job.setJobName("GeoWave Extract (" + dataStoreOptions.getGeoWaveNamespace() + ")"); job.setReduceSpeculativeExecution(false); } private FeatureDataAdapter createAdapter( final String outputDataTypeID, final String namespaceURI, @SuppressWarnings("rawtypes") final Class dimensionExtractorClass) throws InstantiationException, IllegalAccessException { final DimensionExtractor extractor = dimensionExtractorClass.newInstance(); return AnalyticFeature.createGeometryFeatureAdapter( outputDataTypeID, extractor.getDimensionNames(), namespaceURI, ClusteringUtils.CLUSTERING_CRS); } @Override public Path getHdfsOutputPath() { return new Path(getHdfsOutputBase() + "/" + dataStoreOptions.getGeoWaveNamespace() + "_dedupe"); } @Override @SuppressWarnings("rawtypes") protected Class getReducer() { return SimpleFeatureOutputReducer.class; } @Override public int run(final Configuration config, final PropertyManagement runTimeProperties) throws Exception { runTimeProperties.storeIfEmpty(ExtractParameters.Extract.OUTPUT_DATA_TYPE_ID, "centroid"); runTimeProperties.setConfig( new ParameterEnum[] { MapReduceParameters.MRConfig.HDFS_BASE_DIR, ExtractParameters.Extract.REDUCER_COUNT, ExtractParameters.Extract.DATA_NAMESPACE_URI, ExtractParameters.Extract.OUTPUT_DATA_TYPE_ID}, config, SimpleFeatureOutputReducer.class); config.set( GeoWaveConfiguratorBase.enumToConfKey( SimpleFeatureOutputReducer.class, ExtractParameters.Extract.GROUP_ID), runTimeProperties.getPropertyAsString( ExtractParameters.Extract.GROUP_ID, UUID.randomUUID().toString())); config.set( GeoWaveConfiguratorBase.enumToConfKey( SimpleFeatureOutputReducer.class, GlobalParameters.Global.BATCH_ID), runTimeProperties.getPropertyAsString( GlobalParameters.Global.BATCH_ID, UUID.randomUUID().toString())); final Query query = runTimeProperties.getPropertyAsQuery(ExtractParameters.Extract.QUERY); setMinInputSplits( runTimeProperties.getPropertyAsInt(ExtractParameters.Extract.MIN_INPUT_SPLIT, 1)); setMaxInputSplits( runTimeProperties.getPropertyAsInt(ExtractParameters.Extract.MAX_INPUT_SPLIT, 10000)); if (query != null) { if (query.getQueryConstraints() != null) { GeoWaveInputFormat.setQueryConstraints(config, query.getQueryConstraints()); setQueryConstraints(query.getQueryConstraints()); } if (query.getCommonQueryOptions() != null) { GeoWaveInputFormat.setCommonQueryOptions(config, query.getCommonQueryOptions()); setCommonQueryOptions(query.getCommonQueryOptions()); } if (query.getDataTypeQueryOptions() != null) { GeoWaveInputFormat.setDataTypeQueryOptions( config, query.getDataTypeQueryOptions(), dataStoreOptions.createAdapterStore(), dataStoreOptions.createInternalAdapterStore()); setDataTypeQueryOptions(query.getDataTypeQueryOptions()); } if (query.getIndexQueryOptions() != null) { GeoWaveInputFormat.setIndexQueryOptions( config, query.getIndexQueryOptions(), dataStoreOptions.createIndexStore()); setIndexQueryOptions(query.getIndexQueryOptions()); } } if (minInputSplits != null) { GeoWaveInputFormat.setMinimumSplitCount(config, minInputSplits); } if (maxInputSplits != null) { GeoWaveInputFormat.setMaximumSplitCount(config, maxInputSplits); } setConf(config); config.setClass( GeoWaveConfiguratorBase.enumToConfKey( SimpleFeatureOutputReducer.class, ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS), runTimeProperties.getPropertyAsClass( ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS, DimensionExtractor.class, SimpleFeatureGeometryExtractor.class), DimensionExtractor.class); final PersistableStore store = ((PersistableStore) runTimeProperties.getProperty(StoreParam.INPUT_STORE)); dataStoreOptions = store.getDataStoreOptions(); GeoWaveInputFormat.setStoreOptions(config, dataStoreOptions); GeoWaveOutputFormat.setStoreOptions(config, dataStoreOptions); try (final FileSystem fs = FileSystem.get(config)) { if (fs.exists(getHdfsOutputPath())) { fs.delete( // HPFortify "Path Manipulation" // False positive - path is internally managed getHdfsOutputPath(), true); } return ToolRunner.run(config, this, new String[] {}); } } @Override public Collection> getParameters() { final Set> params = new HashSet<>(); params.addAll( Arrays.asList( new ParameterEnum[] { ExtractParameters.Extract.REDUCER_COUNT, ExtractParameters.Extract.OUTPUT_DATA_TYPE_ID, ExtractParameters.Extract.DATA_NAMESPACE_URI, ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS, ExtractParameters.Extract.MIN_INPUT_SPLIT, ExtractParameters.Extract.MAX_INPUT_SPLIT, ExtractParameters.Extract.QUERY, StoreParam.INPUT_STORE, GlobalParameters.Global.BATCH_ID})); params.addAll(MapReduceParameters.getParameters()); return params; } @Override public int run(final PropertyManagement runTimeProperties) throws Exception { return this.run(MapReduceJobController.getConfiguration(runTimeProperties), runTimeProperties); } @Override public boolean runOperation(final String[] args) throws ParseException { try { final Job job = new Job(super.getConf()); job.setJarByClass(this.getClass()); configure(job); return job.waitForCompletion(true); } catch (final Exception e) { LOGGER.error("Unable to run job", e); throw new ParseException(e.getMessage()); } } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/clustering/runner/GeoWaveInputLoadJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.clustering.runner; import java.util.Arrays; import java.util.Collection; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.locationtech.geowave.analytic.IndependentJobRunner; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.mapreduce.GeoWaveAnalyticJobRunner; import org.locationtech.geowave.analytic.mapreduce.GeoWaveInputFormatConfiguration; import org.locationtech.geowave.analytic.mapreduce.MapReduceJobRunner; import org.locationtech.geowave.analytic.mapreduce.SequenceFileOutputFormatConfiguration; import org.locationtech.geowave.analytic.mapreduce.clustering.InputToOutputKeyReducer; import org.locationtech.geowave.analytic.param.CentroidParameters; import org.locationtech.geowave.analytic.param.MapReduceParameters; import org.locationtech.geowave.analytic.param.OutputParameters; import org.locationtech.geowave.analytic.param.ParameterEnum; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey; /** * Run a map reduce job to extract a population of data from GeoWave (Accumulo), remove duplicates, * and output a SimpleFeature with the ID and the extracted geometry from each of the GeoWave data * item. */ public class GeoWaveInputLoadJobRunner extends GeoWaveAnalyticJobRunner implements MapReduceJobRunner, IndependentJobRunner { public GeoWaveInputLoadJobRunner() { // defaults super.setInputFormatConfiguration(new GeoWaveInputFormatConfiguration()); super.setOutputFormatConfiguration(new SequenceFileOutputFormatConfiguration()); } @Override public void configure(final Job job) throws Exception { job.setMapperClass(Mapper.class); job.setReducerClass(InputToOutputKeyReducer.class); job.setMapOutputKeyClass(GeoWaveInputKey.class); job.setMapOutputValueClass(ObjectWritable.class); job.setOutputKeyClass(GeoWaveOutputKey.class); job.setOutputValueClass(Object.class); job.setSpeculativeExecution(false); job.setJobName("GeoWave Input to Output"); job.setReduceSpeculativeExecution(false); } @Override public Class getScope() { return InputToOutputKeyReducer.class; } @Override public int run(final Configuration config, final PropertyManagement runTimeProperties) throws Exception { final String indexId = checkIndex( runTimeProperties, OutputParameters.Output.INDEX_ID, runTimeProperties.getPropertyAsString( CentroidParameters.Centroid.INDEX_NAME, SpatialDimensionalityTypeProvider.createIndexFromOptions( new SpatialOptions()).getName())); OutputParameters.Output.INDEX_ID.getHelper().setValue(config, getScope(), indexId); addDataAdapter( config, getAdapter( runTimeProperties, OutputParameters.Output.DATA_TYPE_ID, OutputParameters.Output.DATA_NAMESPACE_URI)); runTimeProperties.setConfig( new ParameterEnum[] { OutputParameters.Output.DATA_TYPE_ID, OutputParameters.Output.DATA_NAMESPACE_URI, OutputParameters.Output.INDEX_ID}, config, getScope()); // HP Fortify "Command Injection" false positive // What Fortify considers "externally-influenced input" // comes only from users with OS-level access anyway return super.run(config, runTimeProperties); } @Override public Collection> getParameters() { final Collection> params = super.getParameters(); params.addAll( Arrays.asList( new OutputParameters.Output[] { OutputParameters.Output.INDEX_ID, OutputParameters.Output.DATA_TYPE_ID, OutputParameters.Output.DATA_NAMESPACE_URI})); params.addAll(MapReduceParameters.getParameters()); return params; } @Override protected String getJobName() { return "Input Load"; } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/clustering/runner/GroupAssigmentJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.clustering.runner; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.Set; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Reducer; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave; import org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment; import org.locationtech.geowave.analytic.mapreduce.GeoWaveAnalyticJobRunner; import org.locationtech.geowave.analytic.mapreduce.clustering.GroupAssignmentMapReduce; import org.locationtech.geowave.analytic.param.CentroidParameters; import org.locationtech.geowave.analytic.param.GlobalParameters; import org.locationtech.geowave.analytic.param.MapReduceParameters; import org.locationtech.geowave.analytic.param.ParameterEnum; import org.locationtech.geowave.analytic.param.StoreParameters; import org.locationtech.geowave.analytic.param.StoreParameters.StoreParam; import org.locationtech.geowave.analytic.store.PersistableStore; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.mapreduce.input.GeoWaveInputFormat; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; /** Assign group IDs to input items based on centroids. */ public class GroupAssigmentJobRunner extends GeoWaveAnalyticJobRunner { private int zoomLevel = 1; public GroupAssigmentJobRunner() { super.setReducerCount(8); } public void setZoomLevel(final int zoomLevel) { this.zoomLevel = zoomLevel; } @Override public void configure(final Job job) throws Exception { job.setMapperClass(GroupAssignmentMapReduce.GroupAssignmentMapper.class); job.setMapOutputKeyClass(GeoWaveInputKey.class); job.setMapOutputValueClass(ObjectWritable.class); job.setReducerClass(Reducer.class); job.setOutputKeyClass(GeoWaveInputKey.class); job.setOutputValueClass(ObjectWritable.class); } @Override public Class getScope() { return GroupAssignmentMapReduce.class; } @Override public int run(final Configuration config, final PropertyManagement runTimeProperties) throws Exception { // Required since the Mapper uses the input format parameters to lookup // the adapter final DataStorePluginOptions dataStoreOptions = ((PersistableStore) runTimeProperties.getProperty( StoreParam.INPUT_STORE)).getDataStoreOptions(); GeoWaveInputFormat.setStoreOptions(config, dataStoreOptions); runTimeProperties.setConfig( new ParameterEnum[] { CentroidParameters.Centroid.EXTRACTOR_CLASS, CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS,}, config, GroupAssignmentMapReduce.class); NestedGroupCentroidAssignment.setParameters(config, getScope(), runTimeProperties); CentroidManagerGeoWave.setParameters(config, getScope(), runTimeProperties); NestedGroupCentroidAssignment.setZoomLevel(config, getScope(), zoomLevel); // HP Fortify "Command Injection" false positive // What Fortify considers "externally-influenced input" // comes only from users with OS-level access anyway return super.run(config, runTimeProperties); } @Override public Collection> getParameters() { final Set> params = new HashSet<>(); params.addAll(super.getParameters()); params.addAll( Arrays.asList( new ParameterEnum[] { StoreParameters.StoreParam.INPUT_STORE, GlobalParameters.Global.BATCH_ID})); params.addAll(CentroidManagerGeoWave.getParameters()); params.addAll(MapReduceParameters.getParameters()); params.addAll(NestedGroupCentroidAssignment.getParameters()); return params; } @Override protected String getJobName() { return "Group Assignment"; } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/clustering/runner/MultiLevelClusteringJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.clustering.runner; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.Set; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.geotools.feature.type.BasicFeatureTypes; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.extract.SimpleFeatureCentroidExtractor; import org.locationtech.geowave.analytic.extract.SimpleFeatureGeometryExtractor; import org.locationtech.geowave.analytic.mapreduce.MapReduceJobController; import org.locationtech.geowave.analytic.mapreduce.MapReduceJobRunner; import org.locationtech.geowave.analytic.mapreduce.SequenceFileInputFormatConfiguration; import org.locationtech.geowave.analytic.mapreduce.SequenceFileOutputFormatConfiguration; import org.locationtech.geowave.analytic.param.CentroidParameters; import org.locationtech.geowave.analytic.param.ClusteringParameters.Clustering; import org.locationtech.geowave.analytic.param.CommonParameters; import org.locationtech.geowave.analytic.param.ExtractParameters; import org.locationtech.geowave.analytic.param.GlobalParameters.Global; import org.locationtech.geowave.analytic.param.HullParameters; import org.locationtech.geowave.analytic.param.MapReduceParameters; import org.locationtech.geowave.analytic.param.ParameterEnum; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; /** * Runs a clustering at multiple levels. Lower levels cluster within each cluster of the higher * level. * *

Steps: * * *

(1) Extract and deduplicate items from GeoWave. *

(2) Cluster item within heir assigned groups. Initially, items are all part of the same group. *

(3) Assign to each point the cluster (group id). *

(4) Repeat steps 2 to 3 for each lower level. * */ public abstract class MultiLevelClusteringJobRunner extends MapReduceJobController implements MapReduceJobRunner { final GroupAssigmentJobRunner groupAssignmentRunner = new GroupAssigmentJobRunner(); final GeoWaveAnalyticExtractJobRunner jobExtractRunner = new GeoWaveAnalyticExtractJobRunner(); final ConvexHullJobRunner hullRunner = new ConvexHullJobRunner(); public MultiLevelClusteringJobRunner() { init(new MapReduceJobRunner[] {}, new PostOperationTask[] {}); } protected abstract ClusteringRunner getClusteringRunner(); @Override public Collection> getParameters() { final Set> params = new HashSet<>(); params.addAll(jobExtractRunner.getParameters()); params.addAll(hullRunner.getParameters()); params.addAll(getClusteringRunner().getParameters()); params.addAll(Arrays.asList(new ParameterEnum[] {Clustering.ZOOM_LEVELS, Global.BATCH_ID})); params.addAll(MapReduceParameters.getParameters()); // the output data type is used for centroid management params.remove(CentroidParameters.Centroid.DATA_TYPE_ID); params.remove(CentroidParameters.Centroid.DATA_NAMESPACE_URI); return params; } @Override public int run(final Configuration configuration, final PropertyManagement propertyManagement) throws Exception { return runJob(configuration, propertyManagement); } private int runJob(final Configuration config, final PropertyManagement propertyManagement) throws Exception { final ClusteringRunner clusteringRunner = getClusteringRunner(); final Integer zoomLevels = propertyManagement.getPropertyAsInt(Clustering.ZOOM_LEVELS, 1); jobExtractRunner.setConf(config); final String dataTypeId = propertyManagement.getPropertyAsString( ExtractParameters.Extract.OUTPUT_DATA_TYPE_ID, "centroid"); final String namespaceURI = propertyManagement.getPropertyAsString( ExtractParameters.Extract.DATA_NAMESPACE_URI, BasicFeatureTypes.DEFAULT_NAMESPACE); propertyManagement.storeIfEmpty(ExtractParameters.Extract.DATA_NAMESPACE_URI, namespaceURI); propertyManagement.storeIfEmpty(ExtractParameters.Extract.OUTPUT_DATA_TYPE_ID, dataTypeId); propertyManagement.storeIfEmpty( CentroidParameters.Centroid.EXTRACTOR_CLASS, SimpleFeatureCentroidExtractor.class); propertyManagement.storeIfEmpty( CommonParameters.Common.DIMENSION_EXTRACT_CLASS, SimpleFeatureGeometryExtractor.class); propertyManagement.store(CentroidParameters.Centroid.DATA_TYPE_ID, dataTypeId); propertyManagement.store(CentroidParameters.Centroid.DATA_NAMESPACE_URI, namespaceURI); // TODO: set out index type for extracts? propertyManagement.storeIfEmpty( CentroidParameters.Centroid.INDEX_NAME, SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()).getName()); propertyManagement.storeIfEmpty( HullParameters.Hull.INDEX_NAME, SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()).getName()); // first. extract data int status = jobExtractRunner.run(config, propertyManagement); final Path extractPath = jobExtractRunner.getHdfsOutputPath(); groupAssignmentRunner.setInputFormatConfiguration( new SequenceFileInputFormatConfiguration(extractPath)); clusteringRunner.setInputFormatConfiguration( new SequenceFileInputFormatConfiguration(extractPath)); hullRunner.setInputFormatConfiguration(new SequenceFileInputFormatConfiguration(extractPath)); final boolean retainGroupAssigments = propertyManagement.getPropertyAsBoolean(Clustering.RETAIN_GROUP_ASSIGNMENTS, false); // run clustering for each level final String outputBaseDir = propertyManagement.getPropertyAsString(MapReduceParameters.MRConfig.HDFS_BASE_DIR, "/tmp"); FileSystem fs = null; try { fs = FileSystem.get(config); for (int i = 0; (status == 0) && (i < zoomLevels); i++) { final int zoomLevel = i + 1; clusteringRunner.setZoomLevel(zoomLevel); hullRunner.setZoomLevel(zoomLevel); // need to get this removed at some point. propertyManagement.store(CentroidParameters.Centroid.ZOOM_LEVEL, zoomLevel); status = clusteringRunner.run(config, propertyManagement); if (status == 0) { final Path nextPath = new Path(outputBaseDir + "/" + "level_" + zoomLevel); if (fs.exists(nextPath)) { // HPFortify "Path Manipulation" // False positive - path is internally managed fs.delete(nextPath, true); } groupAssignmentRunner.setOutputFormatConfiguration( new SequenceFileOutputFormatConfiguration(nextPath)); groupAssignmentRunner.setZoomLevel(zoomLevel); // HP Fortify "Command Injection" false positive // What Fortify considers "externally-influenced input" // comes only from users with OS-level access anyway status = retainGroupAssigments ? groupAssignmentRunner.run(config, propertyManagement) : 0; if (status == 0) { // HP Fortify "Command Injection" false positive // What Fortify considers "externally-influenced input" // comes only from users with OS-level access anyway status = hullRunner.run(config, propertyManagement); } if (retainGroupAssigments) { clusteringRunner.setInputFormatConfiguration( new SequenceFileInputFormatConfiguration(nextPath)); hullRunner.setInputFormatConfiguration( new SequenceFileInputFormatConfiguration(nextPath)); groupAssignmentRunner.setInputFormatConfiguration( new SequenceFileInputFormatConfiguration(nextPath)); } } } return status; } finally { if (fs != null) { fs.close(); } } } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/clustering/runner/MultiLevelJumpKMeansClusteringJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.clustering.runner; import org.locationtech.geowave.analytic.mapreduce.kmeans.runner.KMeansJumpJobRunner; /** @see KMeansJumpJobRunner */ public class MultiLevelJumpKMeansClusteringJobRunner extends MultiLevelClusteringJobRunner { @Override protected ClusteringRunner getClusteringRunner() { return new KMeansJumpJobRunner(); } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/clustering/runner/MultiLevelKMeansClusteringJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.clustering.runner; import org.locationtech.geowave.analytic.mapreduce.kmeans.runner.KMeansParallelJobRunner; /** @see KMeansParallelJobRunner */ public class MultiLevelKMeansClusteringJobRunner extends MultiLevelClusteringJobRunner { @Override protected ClusteringRunner getClusteringRunner() { return new KMeansParallelJobRunner(); } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/dbscan/Cluster.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.dbscan; import java.util.Set; import org.locationtech.geowave.analytic.nn.NeighborList; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.jts.geom.Geometry; public interface Cluster extends NeighborList { public void merge(Cluster cluster); public ByteArray getId(); /* * Return the cluster to which this cluster is linked */ public Set getLinkedClusters(); public int currentLinkSetSize(); public void invalidate(); public void finish(); public boolean isCompressed(); public Geometry getGeometry(); } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/dbscan/ClusterItem.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.dbscan; import org.locationtech.jts.geom.Geometry; /** A DB Scan cluster Item */ public class ClusterItem { private final String id; private Geometry geometry; private long count; private boolean compressed = false; public ClusterItem( final String id, final Geometry geometry, final long count, final boolean compressed) { super(); this.id = id; this.geometry = geometry; this.count = count; this.compressed = compressed; } public void setCompressed() { compressed = true; } protected boolean isCompressed() { return compressed; } protected String getId() { return id; } protected Geometry getGeometry() { return geometry; } protected long getCount() { return count; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((id == null) ? 0 : id.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final ClusterItem other = (ClusterItem) obj; if (id == null) { if (other.id != null) { return false; } } else if (!id.equals(other.id)) { return false; } return true; } @Override public String toString() { return "ClusterItem [id=" + id + ", geometry=" + geometry + ", count=" + count + "]"; } public void setGeometry(final Geometry geometry) { this.geometry = geometry; } public void setCount(final long count) { this.count = count; } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/dbscan/ClusterItemDistanceFn.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.dbscan; import org.locationtech.geowave.analytic.distance.CoordinateCircleDistanceFn; import org.locationtech.geowave.analytic.distance.DistanceFn; import org.locationtech.geowave.analytic.mapreduce.dbscan.ClusterItemDistanceFn.ClusterProfileContext; import org.locationtech.geowave.analytic.nn.DistanceProfile; import org.locationtech.geowave.analytic.nn.DistanceProfileGenerateFn; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.Point; import org.locationtech.jts.operation.distance.DistanceOp; /** Calculate distance between two cluster items. */ public class ClusterItemDistanceFn implements DistanceFn, DistanceProfileGenerateFn { /** */ private static final long serialVersionUID = 3824608959408031752L; private DistanceFn coordinateDistanceFunction = new CoordinateCircleDistanceFn(); /** Used to reduce memory GC */ private static final ThreadLocal> profile = new ThreadLocal>() { @Override protected DistanceProfile initialValue() { return new DistanceProfile<>(0.0, new ClusterProfileContext()); } }; public ClusterItemDistanceFn() {} public ClusterItemDistanceFn(final DistanceFn coordinateDistanceFunction) { super(); this.coordinateDistanceFunction = coordinateDistanceFunction; } public DistanceFn getCoordinateDistanceFunction() { return coordinateDistanceFunction; } public void setCoordinateDistanceFunction( final DistanceFn coordinateDistanceFunction) { this.coordinateDistanceFunction = coordinateDistanceFunction; } @Override public double measure(final ClusterItem x, final ClusterItem y) { final Geometry gx = x.getGeometry(); final Geometry gy = y.getGeometry(); if ((gx instanceof Point) && (gy instanceof Point)) { return coordinateDistanceFunction.measure(gx.getCoordinate(), gy.getCoordinate()); } final DistanceOp op = new DistanceOp(gx, gy); final Coordinate[] points = op.nearestPoints(); return coordinateDistanceFunction.measure(points[0], points[1]); } @Override public DistanceProfile computeProfile( final ClusterItem item1, final ClusterItem item2) { final DistanceProfile localProfile = profile.get(); final ClusterProfileContext context = localProfile.getContext(); final Geometry gx = item1.getGeometry(); final Geometry gy = item2.getGeometry(); context.setItem1(item1); context.setItem2(item2); if ((gx instanceof Point) && (gy instanceof Point)) { context.setPoint1(gx.getCoordinate()); context.setPoint2(gy.getCoordinate()); } else { final DistanceOp op = new DistanceOp(gx, gy); final Coordinate[] points = op.nearestPoints(); context.setPoint1(points[0]); context.setPoint2(points[1]); } localProfile.setDistance( coordinateDistanceFunction.measure(context.getPoint1(), context.getPoint2())); return localProfile; } public static class ClusterProfileContext { private Coordinate point1; private ClusterItem item1; private Coordinate point2; private ClusterItem item2; public Coordinate getPoint1() { return point1; } public void setPoint1(final Coordinate point1) { this.point1 = point1; } public ClusterItem getItem1() { return item1; } public void setItem1(final ClusterItem item1) { this.item1 = item1; } public Coordinate getPoint2() { return point2; } public void setPoint2(final Coordinate point2) { this.point2 = point2; } public ClusterItem getItem2() { return item2; } public void setItem2(final ClusterItem item2) { this.item2 = item2; } } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/dbscan/ClusterNeighborList.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.dbscan; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import org.locationtech.geowave.analytic.nn.DistanceProfile; import org.locationtech.geowave.analytic.nn.NeighborList; import org.locationtech.geowave.analytic.nn.NeighborListFactory; import org.locationtech.geowave.core.index.ByteArray; public class ClusterNeighborList implements NeighborList { private final ByteArray id; final Map index; final NeighborListFactory factory; public ClusterNeighborList( final ByteArray centerId, final ClusterItem center, final NeighborListFactory factory, final Map index) { super(); this.index = index; id = centerId; this.factory = factory; Cluster cluster = getCluster(); if (cluster == null) { cluster = (Cluster) factory.buildNeighborList(id, center); index.put(id, cluster); } } public Cluster getCluster() { return index.get(id); } @Override public Iterator> iterator() { return getCluster().iterator(); } @Override public boolean add( final DistanceProfile distanceProfile, final ByteArray id, final ClusterItem value) { Cluster cluster = index.get(id); if (cluster == null) { cluster = (Cluster) factory.buildNeighborList(id, value); index.put(id, cluster); } return getCluster().add(distanceProfile, id, value); } @Override public InferType infer(final ByteArray id, final ClusterItem value) { return getCluster().infer(id, value); } @Override public void clear() { getCluster().clear(); } @Override public int size() { return getCluster().size(); } @Override public boolean isEmpty() { return getCluster().isEmpty(); } public static class ClusterNeighborListFactory implements NeighborListFactory { final Map index; final NeighborListFactory factory; public ClusterNeighborListFactory( final NeighborListFactory factory, final Map index) { super(); this.index = index; this.factory = factory; } public Map getIndex() { return index; } @Override public NeighborList buildNeighborList( final ByteArray centerId, final ClusterItem center) { return new ClusterNeighborList(centerId, center, factory, index); } } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/dbscan/ClusterUnionList.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.dbscan; import java.util.Map; import org.locationtech.geowave.analytic.nn.DistanceProfile; import org.locationtech.geowave.analytic.nn.NeighborList; import org.locationtech.geowave.analytic.nn.NeighborListFactory; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.jts.geom.Geometry; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A cluster represented by a hull. * *

Intended to run in a single thread. Not Thread Safe. * *

TODO: connectGeometryTool.connect( */ public class ClusterUnionList extends DBScanClusterList implements Cluster { protected static final Logger LOGGER = LoggerFactory.getLogger(ClusterUnionList.class); public ClusterUnionList( final ByteArray centerId, final ClusterItem center, final NeighborListFactory factory, final Map index) { super(center.getGeometry(), (int) center.getCount(), centerId, index); } @Override protected long addAndFetchCount( final ByteArray id, final ClusterItem newInstance, final DistanceProfile distanceProfile) { return 0; } @Override public void merge(final Cluster cluster) { super.merge(cluster); if (cluster != this) { union(((DBScanClusterList) cluster).clusterGeo); } } @Override public boolean isCompressed() { return true; } @Override protected Geometry compress() { return clusterGeo; } public static class ClusterUnionListFactory implements NeighborListFactory { private final Map index; public ClusterUnionListFactory(final Map index) { super(); this.index = index; } @Override public NeighborList buildNeighborList( final ByteArray centerId, final ClusterItem center) { Cluster list = index.get(centerId); if (list == null) { list = new ClusterUnionList(centerId, center, this, index); } return list; } } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/dbscan/DBScanClusterList.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.dbscan; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.locationtech.geowave.analytic.GeometryHullTool; import org.locationtech.geowave.analytic.nn.DistanceProfile; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.Point; import org.locationtech.jts.geom.TopologyException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Represents a cluster. Maintains links to other clusters through shared components Maintains * counts contributed by components of this cluster. Supports merging with other clusters, * incrementing the count by only those components different from the other cluster. * *

Intended to run in a single thread. Not Thread Safe. */ public abstract class DBScanClusterList implements Cluster { protected static final Logger LOGGER = LoggerFactory.getLogger(DBScanClusterList.class); // internal state protected Geometry clusterGeo = null; protected int itemCount = 1; private Set linkedClusters = null; private List ids = null; private final ByteArray id; // global configuration...to save memory...passing this stuff around. private static GeometryHullTool connectGeometryTool = new GeometryHullTool(); private static int mergeSize = 0; // global state // ID to cluster. protected final Map index; public static GeometryHullTool getHullTool() { return connectGeometryTool; } public static void setMergeSize(final int size) { mergeSize = size; } public DBScanClusterList( final Geometry clusterGeo, final int itemCount, final ByteArray centerId, final Map index) { super(); this.clusterGeo = clusterGeo; this.itemCount = itemCount; this.index = index; id = centerId; } protected abstract long addAndFetchCount( final ByteArray newId, final ClusterItem newInstance, final DistanceProfile distanceProfile); @Override public final boolean add( final DistanceProfile distanceProfile, final ByteArray newId, final ClusterItem newInstance) { LOGGER.trace("link {} to {}", newId, id); if (!getLinkedClusters(true).add(newId)) { return false; } final Cluster cluster = index.get(newId); if (cluster == this) { return false; } incrementItemCount(addAndFetchCount(newId, newInstance, distanceProfile)); return true; } protected List getIds(final boolean allowUpdates) { if ((ids == null) || (ids == Collections.emptyList())) { ids = allowUpdates ? new ArrayList<>(4) : Collections.emptyList(); } return ids; } protected Set getLinkedClusters(final boolean allowUpdates) { if ((linkedClusters == null) || (linkedClusters == Collections.emptySet())) { linkedClusters = allowUpdates ? new HashSet<>() : Collections.emptySet(); } return linkedClusters; } protected void incrementItemCount(final long amount) { final int c = itemCount; itemCount += amount; assert (c <= itemCount); } /** * Clear the contents. Invoked when the contents of a cluster are merged with another cluster. * This method is supportive for GC, not serving any algorithm logic. */ @Override public void clear() { linkedClusters = null; clusterGeo = null; } @Override public void invalidate() { for (final ByteArray linkedId : getLinkedClusters(true)) { final Cluster linkedCluster = index.get(linkedId); if ((linkedCluster != null) && (linkedCluster != this) && (linkedCluster instanceof DBScanClusterList)) { ((DBScanClusterList) linkedCluster).getLinkedClusters(false).remove(id); } } LOGGER.trace("Invalidate " + id); index.remove(id); linkedClusters = null; clusterGeo = null; itemCount = -1; } @Override public InferType infer(final ByteArray id, final ClusterItem value) { final Cluster cluster = index.get(id); if ((cluster == this) || getLinkedClusters(false).contains(id)) { return InferType.SKIP; } return InferType.NONE; } @Override public Iterator> iterator() { return Collections.>emptyList().iterator(); } @Override public int currentLinkSetSize() { return getLinkedClusters(false).size(); } @Override public void finish() { mergeLinks(true); } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((id == null) ? 0 : id.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final DBScanClusterList other = (DBScanClusterList) obj; if (id == null) { if (other.id != null) { return false; } } else if (!id.equals(other.id)) { return false; } return true; } @Override public int size() { return (itemCount); } @Override public boolean isEmpty() { return size() <= 0; } @Override public Geometry getGeometry() { return compress(); } @Override public abstract boolean isCompressed(); @Override public void merge(final Cluster cluster) { final boolean removedLinked = getLinkedClusters(true).remove(cluster.getId()); if (LOGGER.isTraceEnabled()) { LOGGER.trace("Merging {} into {}", cluster.getId(), id); } if (cluster != this) { getIds(true).add(cluster.getId()); index.put(cluster.getId(), this); if (cluster instanceof DBScanClusterList) { for (final ByteArray id : ((DBScanClusterList) cluster).getIds(false)) { index.put(id, this); ids.add(id); } getLinkedClusters(true).addAll(((DBScanClusterList) cluster).getLinkedClusters(false)); } if (isCompressed() && ((DBScanClusterList) cluster).isCompressed()) { incrementItemCount( (long) (interpolateFactor(((DBScanClusterList) cluster).clusterGeo) * ((DBScanClusterList) cluster).itemCount)); } else if (!removedLinked) { incrementItemCount(1); } } } protected double interpolateFactor(final Geometry areaBeingMerged) { try { if (clusterGeo == null) { return 1.0; } final Geometry intersection = areaBeingMerged.intersection(clusterGeo); final double geo2Area = areaBeingMerged.getArea(); if (intersection != null) { if ((intersection instanceof Point) && (areaBeingMerged instanceof Point)) { return 0.0; } else if (intersection.isEmpty()) { return 1.0; } else if (geo2Area > 0) { return 1.0 - (intersection.getArea() / geo2Area); } else { return 0.0; } } return 1.0; } catch (final Exception ex) { LOGGER.warn("Cannot calculate difference of geometries to interpolate size ", ex); } return 0.0; } @Override public ByteArray getId() { return id; } protected abstract Geometry compress(); @Override public Set getLinkedClusters() { return getLinkedClusters(false); } protected void union(final Geometry otherGeo) { if (otherGeo == null) { return; } try { if (clusterGeo == null) { clusterGeo = otherGeo; } else if (clusterGeo instanceof Point) { clusterGeo = connectGeometryTool.connect(otherGeo, clusterGeo); } else { clusterGeo = connectGeometryTool.connect(clusterGeo, otherGeo); } } catch (final TopologyException ex) { LOGGER.error("Union failed due to non-simple geometries", ex); clusterGeo = connectGeometryTool.createHullFromGeometry( clusterGeo, Arrays.asList(otherGeo.getCoordinates()), false); } } protected void mergeLinks(final boolean deleteNonLinks) { if (getLinkedClusters(false).size() == 0) { return; } final Set readyClusters = new HashSet<>(); readyClusters.add(this); buildClusterLists(readyClusters, this, deleteNonLinks); readyClusters.remove(this); final Iterator finishedIt = readyClusters.iterator(); final Cluster top = this; while (finishedIt.hasNext()) { top.merge(finishedIt.next()); } } private void buildClusterLists( final Set readyClusters, final DBScanClusterList cluster, final boolean deleteNonLinks) { for (final ByteArray linkedClusterId : cluster.getLinkedClusters()) { final Cluster linkedCluster = index.get(linkedClusterId); if (readyClusters.add(linkedCluster) && (linkedCluster.size() >= mergeSize)) { buildClusterLists(readyClusters, (DBScanClusterList) linkedCluster, false); } } } @Override public String toString() { return "DBScanClusterList [clusterGeo=" + (clusterGeo == null ? "null" : clusterGeo.toString()) + ", id=" + id + "]"; } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/dbscan/DBScanIterationsJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.dbscan; import java.io.IOException; import java.io.Serializable; import java.util.Collection; import java.util.HashSet; import java.util.Set; import java.util.UUID; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.TaskCounter; import org.geotools.feature.type.BasicFeatureTypes; import org.locationtech.geowave.analytic.IndependentJobRunner; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.mapreduce.GeoWaveInputFormatConfiguration; import org.locationtech.geowave.analytic.mapreduce.GeoWaveOutputFormatConfiguration; import org.locationtech.geowave.analytic.mapreduce.MapReduceJobController; import org.locationtech.geowave.analytic.mapreduce.MapReduceJobRunner; import org.locationtech.geowave.analytic.mapreduce.SequenceFileInputFormatConfiguration; import org.locationtech.geowave.analytic.mapreduce.SequenceFileOutputFormatConfiguration; import org.locationtech.geowave.analytic.mapreduce.clustering.runner.GeoWaveInputLoadJobRunner; import org.locationtech.geowave.analytic.mapreduce.nn.NNMapReduce.PassthruPartitioner; import org.locationtech.geowave.analytic.param.ClusteringParameters; import org.locationtech.geowave.analytic.param.ClusteringParameters.Clustering; import org.locationtech.geowave.analytic.param.FormatConfiguration; import org.locationtech.geowave.analytic.param.GlobalParameters; import org.locationtech.geowave.analytic.param.HullParameters; import org.locationtech.geowave.analytic.param.MapReduceParameters; import org.locationtech.geowave.analytic.param.OutputParameters; import org.locationtech.geowave.analytic.param.ParameterEnum; import org.locationtech.geowave.analytic.param.PartitionParameters; import org.locationtech.geowave.analytic.param.PartitionParameters.Partition; import org.locationtech.geowave.analytic.partitioner.OrthodromicDistancePartitioner; import org.locationtech.geowave.analytic.partitioner.Partitioner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * DBScan involves multiple iterations. The first iteration conceivably takes a set of points and * produces small clusters (nearest neighbors). Each subsequent iteration merges clusters within a * given distance from each other. This process can continue no new clusters are created (merges do * not occur). * *

The first iteration places a constraint on the minimum number of neighbors. Subsequent * iterations do not have a minimum, since each of the clusters is already vetted out by the first * iteration. */ public class DBScanIterationsJobRunner implements MapReduceJobRunner, IndependentJobRunner { protected static final Logger LOGGER = LoggerFactory.getLogger(DBScanIterationsJobRunner.class); DBScanJobRunner jobRunner = new DBScanJobRunner(); GeoWaveInputLoadJobRunner inputLoadRunner = new GeoWaveInputLoadJobRunner(); protected FormatConfiguration inputFormatConfiguration; protected int zoomLevel = 1; public DBScanIterationsJobRunner() { super(); inputFormatConfiguration = new GeoWaveInputFormatConfiguration(); jobRunner.setInputFormatConfiguration(inputFormatConfiguration); inputLoadRunner.setOutputFormatConfiguration(new GeoWaveOutputFormatConfiguration()); } public void setInputFormatConfiguration(final FormatConfiguration inputFormatConfiguration) { this.inputFormatConfiguration = inputFormatConfiguration; } public void setReducerCount(final int reducerCount) { jobRunner.setReducerCount(reducerCount); } protected void setZoomLevel(final int zoomLevel) { this.zoomLevel = zoomLevel; } @Override public int run(final Configuration config, final PropertyManagement runTimeProperties) throws Exception { runTimeProperties.storeIfEmpty(GlobalParameters.Global.BATCH_ID, UUID.randomUUID().toString()); FileSystem fs = null; try { fs = FileSystem.get(config); final String outputBaseDir = runTimeProperties.getPropertyAsString(MapReduceParameters.MRConfig.HDFS_BASE_DIR, "/tmp"); Path startPath = new Path(outputBaseDir + "/level_0"); if (fs.exists(startPath)) { // HPFortify "Path Manipulation" // False positive - path is internally managed fs.delete(startPath, true); } runTimeProperties.storeIfEmpty( Partition.PARTITIONER_CLASS, OrthodromicDistancePartitioner.class); final double maxDistance = runTimeProperties.getPropertyAsDouble(Partition.MAX_DISTANCE, 10); final double precisionDecreaseRate = runTimeProperties.getPropertyAsDouble(Partition.PARTITION_DECREASE_RATE, 0.15); double precisionFactor = runTimeProperties.getPropertyAsDouble(Partition.PARTITION_PRECISION, 1.0); runTimeProperties.storeIfEmpty(Partition.DISTANCE_THRESHOLDS, Double.toString(maxDistance)); final boolean overrideSecondary = runTimeProperties.hasProperty(Partition.SECONDARY_PARTITIONER_CLASS); if (!overrideSecondary) { final Serializable distances = runTimeProperties.get(Partition.DISTANCE_THRESHOLDS); String dstStr; if (distances == null) { dstStr = "0.000001"; } else { dstStr = distances.toString(); } final String distancesArray[] = dstStr.split(","); final double[] distancePerDimension = new double[distancesArray.length]; { int i = 0; for (final String eachDistance : distancesArray) { distancePerDimension[i++] = Double.valueOf(eachDistance); } } boolean secondary = precisionFactor < 1.0; double total = 1.0; for (final double dist : distancePerDimension) { total *= dist; } secondary |= (total >= (Math.pow(maxDistance, distancePerDimension.length) * 2.0)); if (secondary) { runTimeProperties.copy( Partition.PARTITIONER_CLASS, Partition.SECONDARY_PARTITIONER_CLASS); } } jobRunner.setInputFormatConfiguration(inputFormatConfiguration); jobRunner.setOutputFormatConfiguration(new SequenceFileOutputFormatConfiguration(startPath)); LOGGER.info("Running with partition distance {}", maxDistance); // HP Fortify "Command Injection" false positive // What Fortify considers "externally-influenced input" // comes only from users with OS-level access anyway final int initialStatus = jobRunner.run(config, runTimeProperties); if (initialStatus != 0) { return initialStatus; } precisionFactor = precisionFactor - precisionDecreaseRate; int maxIterationCount = runTimeProperties.getPropertyAsInt(ClusteringParameters.Clustering.MAX_ITERATIONS, 15); int iteration = 2; long lastRecordCount = 0; while ((maxIterationCount > 0) && (precisionFactor > 0)) { // context does not mater in this case try { final Partitioner partitioner = runTimeProperties.getClassInstance( PartitionParameters.Partition.PARTITIONER_CLASS, Partitioner.class, OrthodromicDistancePartitioner.class); partitioner.initialize(Job.getInstance(config), partitioner.getClass()); } catch (final IllegalArgumentException argEx) { // this occurs if the partitioner decides that the distance // is // invalid (e.g. bigger than the map space). // In this case, we just exist out of the loop. // startPath has the final data LOGGER.info("Distance is invalid", argEx); break; } catch (final Exception e1) { throw new IOException(e1); } final PropertyManagement localScopeProperties = new PropertyManagement(runTimeProperties); /** * Re-partitioning the fat geometries can force a large number of partitions. The geometries * end up being represented in multiple partitions. Better to skip secondary partitioning. * 0.9 is a bit of a magic number. Ideally, it is based on the area of the max distance cube * divided by the area as defined by threshold distances. However, looking up the partition * dimension space or assuming only two dimensions were both undesirable. */ if ((precisionFactor <= 0.9) && !overrideSecondary) { localScopeProperties.store( Partition.SECONDARY_PARTITIONER_CLASS, PassthruPartitioner.class); } localScopeProperties.store(Partition.PARTITION_PRECISION, precisionFactor); jobRunner.setInputFormatConfiguration(new SequenceFileInputFormatConfiguration(startPath)); jobRunner.setFirstIteration(false); localScopeProperties.store(HullParameters.Hull.ZOOM_LEVEL, zoomLevel); localScopeProperties.store(HullParameters.Hull.ITERATION, iteration); localScopeProperties.storeIfEmpty( OutputParameters.Output.DATA_TYPE_ID, localScopeProperties.getPropertyAsString( HullParameters.Hull.DATA_TYPE_ID, "concave_hull")); // Set to zero to force each cluster to be moved into the next // iteration // even if no merge occurs localScopeProperties.store(ClusteringParameters.Clustering.MINIMUM_SIZE, 0); final Path nextPath = new Path(outputBaseDir + "/level_" + iteration); if (fs.exists(nextPath)) { // HPFortify "Path Manipulation" // False positive - path is internally managed fs.delete(nextPath, true); } jobRunner.setOutputFormatConfiguration(new SequenceFileOutputFormatConfiguration(nextPath)); // HP Fortify "Command Injection" false positive // What Fortify considers "externally-influenced input" // comes only from users with OS-level access anyway final int status = jobRunner.run(config, localScopeProperties); if (status != 0) { return status; } final long currentOutputCount = jobRunner.getCounterValue(TaskCounter.REDUCE_OUTPUT_RECORDS); if (currentOutputCount == lastRecordCount) { maxIterationCount = 0; } lastRecordCount = currentOutputCount; startPath = nextPath; maxIterationCount--; precisionFactor -= precisionDecreaseRate; iteration++; } final PropertyManagement localScopeProperties = new PropertyManagement(runTimeProperties); localScopeProperties.storeIfEmpty( OutputParameters.Output.DATA_TYPE_ID, localScopeProperties.getPropertyAsString( HullParameters.Hull.DATA_TYPE_ID, "concave_hull")); localScopeProperties.storeIfEmpty( OutputParameters.Output.DATA_NAMESPACE_URI, localScopeProperties.getPropertyAsString( HullParameters.Hull.DATA_NAMESPACE_URI, BasicFeatureTypes.DEFAULT_NAMESPACE)); localScopeProperties.storeIfEmpty( OutputParameters.Output.INDEX_ID, localScopeProperties.get(HullParameters.Hull.INDEX_NAME)); inputLoadRunner.setInputFormatConfiguration( new SequenceFileInputFormatConfiguration(startPath)); // HP Fortify "Command Injection" false positive // What Fortify considers "externally-influenced input" // comes only from users with OS-level access anyway inputLoadRunner.run(config, runTimeProperties); } finally { if (fs != null) { fs.close(); } } return 0; } @Override public Collection> getParameters() { final Set> params = new HashSet<>(); params.addAll(jobRunner.getParameters()); params.addAll(inputLoadRunner.getParameters()); params.add(Clustering.MAX_ITERATIONS); params.add(Partition.PARTITION_DECREASE_RATE); params.add(Partition.PARTITION_PRECISION); return params; } @Override public int run(final PropertyManagement runTimeProperties) throws Exception { return this.run(MapReduceJobController.getConfiguration(runTimeProperties), runTimeProperties); } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/dbscan/DBScanJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.dbscan; import java.util.Arrays; import java.util.Collection; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.io.compress.CodecPool; import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.io.compress.CompressionCodecFactory; import org.apache.hadoop.mapreduce.Job; import org.geotools.feature.type.BasicFeatureTypes; import org.locationtech.geowave.analytic.AdapterWithObjectWritable; import org.locationtech.geowave.analytic.AnalyticFeature; import org.locationtech.geowave.analytic.Projection; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.SimpleFeatureProjection; import org.locationtech.geowave.analytic.clustering.ClusteringUtils; import org.locationtech.geowave.analytic.mapreduce.nn.NNJobRunner; import org.locationtech.geowave.analytic.mapreduce.nn.NNMapReduce; import org.locationtech.geowave.analytic.mapreduce.nn.NNMapReduce.PartitionDataWritable; import org.locationtech.geowave.analytic.param.ClusteringParameters; import org.locationtech.geowave.analytic.param.ClusteringParameters.Clustering; import org.locationtech.geowave.analytic.param.GlobalParameters; import org.locationtech.geowave.analytic.param.GlobalParameters.Global; import org.locationtech.geowave.analytic.param.HullParameters; import org.locationtech.geowave.analytic.param.HullParameters.Hull; import org.locationtech.geowave.analytic.param.ParameterEnum; import org.locationtech.geowave.analytic.param.PartitionParameters.Partition; import org.locationtech.geowave.core.store.metadata.InternalAdapterStoreImpl; import org.locationtech.geowave.mapreduce.JobContextAdapterStore; import org.locationtech.geowave.mapreduce.JobContextInternalAdapterStore; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; /** Run a single DBScan job producing micro clusters over a set of neighbors */ public class DBScanJobRunner extends NNJobRunner { private static final String[] CodecsRank = new String[] { "BZip2", // "Gzip", "Lz4", "Snappy", "Lzo",}; private boolean firstIteration = true; private long memInMB = 4096; @Override public void configure(final Job job) throws Exception { super.configure(job); job.setMapperClass(NNMapReduce.NNMapper.class); job.setReducerClass(DBScanMapReduce.DBScanMapHullReducer.class); job.setMapOutputKeyClass(PartitionDataWritable.class); job.setMapOutputValueClass(AdapterWithObjectWritable.class); job.setOutputKeyClass(GeoWaveInputKey.class); job.setOutputValueClass(ObjectWritable.class); job.setSpeculativeExecution(false); final Configuration conf = job.getConfiguration(); conf.set("mapreduce.map.java.opts", "-Xmx" + memInMB + "m"); conf.set("mapreduce.reduce.java.opts", "-Xmx" + memInMB + "m"); conf.setLong("mapred.task.timeout", 2000000); conf.setInt("mapreduce.task.io.sort.mb", 250); job.getConfiguration().setBoolean("mapreduce.reduce.speculative", false); Class bestCodecClass = org.apache.hadoop.io.compress.DefaultCodec.class; int rank = 0; for (final Class codecClass : CompressionCodecFactory.getCodecClasses( conf)) { int r = 1; for (final String codecs : CodecsRank) { if (codecClass.getName().contains(codecs)) { break; } r++; } if ((rank < r) && (r <= CodecsRank.length)) { try { final CompressionCodec codec = codecClass.newInstance(); if (Configurable.class.isAssignableFrom(codecClass)) { ((Configurable) codec).setConf(conf); } // throws an exception if not configurable in this context CodecPool.getCompressor(codec); bestCodecClass = codecClass; rank = r; } catch (final Throwable ex) { // occurs when codec is not installed. LOGGER.info("Not configuable in this context", ex); } } } LOGGER.warn("Compression with " + bestCodecClass.toString()); conf.setClass("mapreduce.map.output.compress.codec", bestCodecClass, CompressionCodec.class); conf.setBoolean("mapreduce.map.output.compress", true); conf.setBooleanIfUnset("first.iteration", firstIteration); } public void setMemoryInMB(final long memInMB) { this.memInMB = memInMB; } protected void setFirstIteration(final boolean firstIteration) { this.firstIteration = firstIteration; } @Override public int run(final Configuration config, final PropertyManagement runTimeProperties) throws Exception { runTimeProperties.storeIfEmpty(HullParameters.Hull.DATA_TYPE_ID, "concave_hull"); final String adapterID = runTimeProperties.getPropertyAsString(HullParameters.Hull.DATA_TYPE_ID, "concave_hull"); final String namespaceURI = runTimeProperties.storeIfEmpty( HullParameters.Hull.DATA_NAMESPACE_URI, BasicFeatureTypes.DEFAULT_NAMESPACE).toString(); JobContextAdapterStore.addDataAdapter( config, AnalyticFeature.createGeometryFeatureAdapter( adapterID, new String[0], namespaceURI, ClusteringUtils.CLUSTERING_CRS)); JobContextInternalAdapterStore.addTypeName( config, adapterID, InternalAdapterStoreImpl.getLazyInitialAdapterId(adapterID)); final Projection projectionFunction = runTimeProperties.getClassInstance( HullParameters.Hull.PROJECTION_CLASS, Projection.class, SimpleFeatureProjection.class); projectionFunction.setup(runTimeProperties, getScope(), config); runTimeProperties.setConfig( new ParameterEnum[] { HullParameters.Hull.PROJECTION_CLASS, GlobalParameters.Global.BATCH_ID, HullParameters.Hull.ZOOM_LEVEL, HullParameters.Hull.ITERATION, HullParameters.Hull.DATA_TYPE_ID, HullParameters.Hull.DATA_NAMESPACE_URI, ClusteringParameters.Clustering.MINIMUM_SIZE, Partition.GEOMETRIC_DISTANCE_UNIT, Partition.DISTANCE_THRESHOLDS, Partition.MAX_MEMBER_SELECTION}, config, getScope()); // HP Fortify "Command Injection" false positive // What Fortify considers "externally-influenced input" // comes only from users with OS-level access anyway return super.run(config, runTimeProperties); } @Override public Collection> getParameters() { final Collection> params = super.getParameters(); params.addAll( Arrays.asList( new ParameterEnum[] { Partition.PARTITIONER_CLASS, Partition.MAX_DISTANCE, Partition.MAX_MEMBER_SELECTION, Global.BATCH_ID, Hull.DATA_TYPE_ID, Hull.PROJECTION_CLASS, Clustering.MINIMUM_SIZE, Partition.GEOMETRIC_DISTANCE_UNIT, Partition.DISTANCE_THRESHOLDS})); return params; } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/dbscan/DBScanMapReduce.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.dbscan; import java.io.IOException; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.UUID; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.mapreduce.Reducer; import org.geotools.feature.type.BasicFeatureTypes; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.adapter.vector.FeatureWritable; import org.locationtech.geowave.analytic.AdapterWithObjectWritable; import org.locationtech.geowave.analytic.AnalyticFeature; import org.locationtech.geowave.analytic.Projection; import org.locationtech.geowave.analytic.ScopedJobConfiguration; import org.locationtech.geowave.analytic.SimpleFeatureProjection; import org.locationtech.geowave.analytic.clustering.ClusteringUtils; import org.locationtech.geowave.analytic.distance.CoordinateCircleDistanceFn; import org.locationtech.geowave.analytic.mapreduce.dbscan.ClusterNeighborList.ClusterNeighborListFactory; import org.locationtech.geowave.analytic.mapreduce.dbscan.ClusterUnionList.ClusterUnionListFactory; import org.locationtech.geowave.analytic.mapreduce.dbscan.PreProcessSingleItemClusterList.PreProcessSingleItemClusterListFactory; import org.locationtech.geowave.analytic.mapreduce.dbscan.SingleItemClusterList.SingleItemClusterListFactory; import org.locationtech.geowave.analytic.mapreduce.nn.NNMapReduce; import org.locationtech.geowave.analytic.mapreduce.nn.NNMapReduce.NNReducer; import org.locationtech.geowave.analytic.mapreduce.nn.NNMapReduce.PartitionDataWritable; import org.locationtech.geowave.analytic.nn.NNProcessor; import org.locationtech.geowave.analytic.nn.NNProcessor.CompleteNotifier; import org.locationtech.geowave.analytic.nn.NeighborList; import org.locationtech.geowave.analytic.nn.NeighborListFactory; import org.locationtech.geowave.analytic.nn.TypeConverter; import org.locationtech.geowave.analytic.param.ClusteringParameters; import org.locationtech.geowave.analytic.param.GlobalParameters; import org.locationtech.geowave.analytic.param.HullParameters; import org.locationtech.geowave.analytic.partitioner.Partitioner.PartitionData; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.store.metadata.InternalAdapterStoreImpl; import org.locationtech.geowave.mapreduce.HadoopWritableSerializer; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.opengis.feature.simple.SimpleFeature; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * The approach differs from the approach commonly documented (e.g. * https://en.wikipedia.org/wiki/DBSCAN). This approach does not maintain a queue of viable * neighbors to navigate. * *

Clusters are merged if they share neighbors in common and both clusters meet the minimum size * constraints. * *

Clusters may be made up of points or geometries. When processing geometries, the closest two * points are included in the cluster, not the entire geometry. The reason for this is that * geometries may span large areas. This technique has a disadvantage of mis-representing dense * segments as a dense set of points. * *

The design uses two level partitioning, working within the confines of @{link NNProcessor}. * Performance gains and memory constraints are accomplished through a pre-processing step. * *

Pre-processing first finds dense clusters, replacing each dense cluster with a concave * polygon. Although not very scientific, the condensing process the minimum condensed cluster size * is between 50 and 200, depending on the setting of the minimum owners. The choice is some what * arbitrary. Retaining individual points for clusters larger than 200 often creates memory * concerns. However, there is little value in condensing below 50 as that indicates a fairly small * cluster, which does not contribute to a performance concern. Override 'calculateCondensingMinimum * ()' to come up with a different approach. * *

Pre-processing also finds cluster centers that have less than the minimum and tosses those * centers. There is a caution here. Clusters of this type can fall on the 'edge' of dense clusters, * thus 'tightening' the dense regions. It does effectively remove outliers. Alter the approach by * over-riding 'calculateTossMinimum()' (e.g. make it a smaller number like 0 or 1). */ public class DBScanMapReduce { protected static final Logger LOGGER = LoggerFactory.getLogger(DBScanMapReduce.class); public abstract static class DBScanMapReducer extends NNReducer> { protected int minOwners = 0; @Override protected Map createSummary() { return new HashMap<>(); } @Override protected void processNeighbors( final PartitionData partitionData, final ByteArray primaryId, final ClusterItem primary, final NeighborList neighbors, final Reducer.Context context, final Map index) throws IOException, InterruptedException { if (LOGGER.isTraceEnabled()) { LOGGER.trace("Finish {} ", primaryId); } if (neighbors == null) { return; } final Cluster cluster = ((ClusterNeighborList) neighbors).getCluster(); if (cluster == null) { return; } if (cluster.size() < minOwners) { LOGGER.trace("Invalidate {} ", primaryId); cluster.invalidate(); return; } cluster.finish(); } @Override protected void setup( final Reducer.Context context) throws IOException, InterruptedException { super.setup(context); final ScopedJobConfiguration config = new ScopedJobConfiguration(context.getConfiguration(), NNMapReduce.class); // first run must at least form a triangle minOwners = config.getInt(ClusteringParameters.Clustering.MINIMUM_SIZE, 2); LOGGER.info("Minumum owners = {}", minOwners); } } public static class SimpleFeatureToClusterItemConverter implements TypeConverter { final Projection projection; public SimpleFeatureToClusterItemConverter(final Projection projection) { super(); this.projection = projection; } @Override public ClusterItem convert(final ByteArray id, final Object o) { final SimpleFeature feature = (SimpleFeature) o; final Long count = (Long) feature.getAttribute(AnalyticFeature.ClusterFeatureAttribute.COUNT.attrName()); return new ClusterItem( feature.getID(), projection.getProjection(feature), count == null ? 1 : count, false); } } public static class DBScanMapHullReducer extends DBScanMapReducer { private String batchID; private int zoomLevel = 1; private int iteration = 1; private FeatureDataAdapter outputAdapter; private final ObjectWritable output = new ObjectWritable(); private boolean firstIteration = true; protected int calculateCondensingMinimum() { return Math.min(Math.max(minOwners, 200), minOwners * 10); } protected int calculateTossMinimum() { return (minOwners - 2); } /** * Find the large clusters and condense them down. Find the points that are not reachable to * viable clusters and remove them. * * @throws InterruptedException * @throws IOException */ @Override protected void preprocess( final Reducer.Context context, final NNProcessor processor, final Map index) throws IOException, InterruptedException { if (!firstIteration) { return; } processor.trimSmallPartitions(calculateTossMinimum()); // 2.0 times minimum compression size. // if compression is not likely to increase // performance, then pre-processing does not buy much performance if (processor.size() < (calculateCondensingMinimum() * 2.0)) { return; } processor.process( new ClusterNeighborListFactory(new PreProcessSingleItemClusterListFactory(index), index), new CompleteNotifier() { final int condenseSize = calculateCondensingMinimum(); final int tossSize = calculateTossMinimum(); @Override public void complete( final ByteArray id, final ClusterItem value, final NeighborList list) { final Cluster cluster = ((ClusterNeighborList) list).getCluster(); // this basically excludes points that cannot // contribute to extending the network. // may be a BAD idea. if (cluster.size() < tossSize) { processor.remove(id); } // this is a condensing component else if (cluster.size() > condenseSize) { cluster.finish(); value.setGeometry(cluster.getGeometry()); value.setCount(list.size()); value.setCompressed(); final Iterator it = cluster.getLinkedClusters().iterator(); while (it.hasNext()) { final ByteArray idToRemove = it.next(); processor.remove(idToRemove); it.remove(); } } else { cluster.clear(); } context.progress(); } }); index.clear(); } @Override protected void processSummary( final PartitionData partitionData, final Map summary, final Reducer.Context context) throws IOException, InterruptedException { final HadoopWritableSerializer serializer = outputAdapter.createWritableSerializer(); final Set processed = new HashSet<>(); final Iterator> clusterIt = summary.entrySet().iterator(); while (clusterIt.hasNext()) { final Cluster cluster = clusterIt.next().getValue(); clusterIt.remove(); if (cluster.isCompressed() && !processed.contains(cluster)) { processed.add(cluster); final SimpleFeature newPolygonFeature = AnalyticFeature.createGeometryFeature( outputAdapter.getFeatureType(), batchID, UUID.randomUUID().toString(), cluster.getId().getString(), // name partitionData.getGroupId() != null ? partitionData.getGroupId().toString() : cluster.getId().getString(), // group 0.0, cluster.getGeometry(), new String[0], new double[0], zoomLevel, iteration, cluster.size()); output.set(serializer.toWritable(newPolygonFeature)); if (LOGGER.isTraceEnabled()) { LOGGER.trace("Generating {}", newPolygonFeature.toString()); } // ShapefileTool.writeShape( // cluster.getId().getString() + iteration, // new File( // "./target/testdb_" + cluster.getId().getString() + // iteration), // new Geometry[] { // (Geometry) cluster.get() // }); context.write( new GeoWaveInputKey( // TODO this is a bit of a hack, but the // adapter is seemingly completely transient // and never actually persisted - it seems // unlikely that the value for internal // adapter ID even matters, but if it does // this is the best effort InternalAdapterStoreImpl.getLazyInitialAdapterId(outputAdapter.getTypeName()), new ByteArray(newPolygonFeature.getID())), output); } } } @Override public NeighborListFactory createNeighborsListFactory( final Map summary) { return new ClusterNeighborListFactory( (firstIteration) ? new SingleItemClusterListFactory(summary) : new ClusterUnionListFactory(summary), summary); } @SuppressWarnings("unchecked") @Override protected void setup( final Reducer.Context context) throws IOException, InterruptedException { final ScopedJobConfiguration config = new ScopedJobConfiguration(context.getConfiguration(), NNMapReduce.class); super.setup(context); DBScanClusterList.getHullTool().setDistanceFnForCoordinate(new CoordinateCircleDistanceFn()); DBScanClusterList.setMergeSize(minOwners); batchID = config.getString(GlobalParameters.Global.BATCH_ID, UUID.randomUUID().toString()); zoomLevel = config.getInt(HullParameters.Hull.ZOOM_LEVEL, 1); iteration = config.getInt(HullParameters.Hull.ITERATION, 1); firstIteration = context.getConfiguration().getBoolean("first.iteration", true); final String polygonDataTypeId = config.getString(HullParameters.Hull.DATA_TYPE_ID, "concave_hull"); outputAdapter = AnalyticFeature.createGeometryFeatureAdapter( polygonDataTypeId, new String[0], config.getString( HullParameters.Hull.DATA_NAMESPACE_URI, BasicFeatureTypes.DEFAULT_NAMESPACE), ClusteringUtils.CLUSTERING_CRS); Projection projectionFunction; try { projectionFunction = config.getInstance( HullParameters.Hull.PROJECTION_CLASS, Projection.class, SimpleFeatureProjection.class); } catch (InstantiationException | IllegalAccessException e) { throw new IOException(e); } super.typeConverter = new SimpleFeatureToClusterItemConverter(projectionFunction); distanceProfileFn = new ClusterItemDistanceFn(); super.distanceFn = new ClusterItemDistanceFn(); } } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/dbscan/PreProcessSingleItemClusterList.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.dbscan; import java.util.Arrays; import java.util.Map; import org.locationtech.geowave.analytic.nn.NeighborList; import org.locationtech.geowave.analytic.nn.NeighborListFactory; import org.locationtech.geowave.core.index.ByteArray; /** * Maintains a single hull around a set of points. * *

Intended to run in a single thread. Not Thread Safe. */ public class PreProcessSingleItemClusterList extends SingleItemClusterList implements Cluster { public PreProcessSingleItemClusterList( final ByteArray centerId, final ClusterItem center, final NeighborListFactory factory, final Map index) { super(centerId, center, factory, index); } @Override protected void mergeLinks(final boolean deleteNonLinks) { for (final ByteArray id : this.getLinkedClusters()) { final PreProcessSingleItemClusterList other = (PreProcessSingleItemClusterList) index.get(id); final long snapShot = getClusterPoints(false).size(); if (other.clusterGeo != null) { getClusterPoints(true).addAll(Arrays.asList(other.clusterGeo.getCoordinates())); } getClusterPoints(true).addAll(other.getClusterPoints(false)); incrementItemCount(getClusterPoints(true).size() - snapShot); } } public static class PreProcessSingleItemClusterListFactory implements NeighborListFactory { private final Map index; public PreProcessSingleItemClusterListFactory(final Map index) { super(); this.index = index; } @Override public NeighborList buildNeighborList( final ByteArray centerId, final ClusterItem center) { Cluster list = index.get(centerId); if (list == null) { list = new PreProcessSingleItemClusterList(centerId, center, this, index); } return list; } } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/dbscan/SingleItemClusterList.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.dbscan; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.locationtech.geowave.analytic.mapreduce.dbscan.ClusterItemDistanceFn.ClusterProfileContext; import org.locationtech.geowave.analytic.nn.DistanceProfile; import org.locationtech.geowave.analytic.nn.NeighborList; import org.locationtech.geowave.analytic.nn.NeighborListFactory; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.Point; /** * Maintains a single hull around a set of points. * *

Intended to run in a single thread. Not Thread Safe. */ public class SingleItemClusterList extends DBScanClusterList implements Cluster { private boolean compressed = false; private Set clusterPoints = null; public SingleItemClusterList( final ByteArray centerId, final ClusterItem center, final NeighborListFactory factory, final Map index) { super( (center.getGeometry() instanceof Point) || center.isCompressed() ? center.getGeometry() : null, (int) center.getCount(), centerId, index); final Geometry clusterGeo = center.getGeometry(); compressed = center.isCompressed(); if (compressed) { getClusterPoints(true).add(clusterGeo.getCentroid().getCoordinate()); } } protected Set getClusterPoints(final boolean allowUpdates) { if ((clusterPoints == null) || (clusterPoints == Collections.emptySet())) { clusterPoints = allowUpdates ? new HashSet<>() : Collections.emptySet(); } return clusterPoints; } @Override public void clear() { super.clear(); clusterPoints = null; } @Override protected long addAndFetchCount( final ByteArray id, final ClusterItem newInstance, final DistanceProfile distanceProfile) { final ClusterProfileContext context = (ClusterProfileContext) distanceProfile.getContext(); boolean checkForCompress = false; final Coordinate centerCoordinate = context.getItem1() == newInstance ? context.getPoint2() : context.getPoint1(); final Geometry thisGeo = getGeometry(); // only need to cluster this new point if it is likely top be an // inter-segment point if ((thisGeo == null) || !(thisGeo instanceof Point)) { checkForCompress = getClusterPoints(true).add(centerCoordinate); } // Closest distance points are only added if they are on a segment of a // complex geometry. if (!(newInstance.getGeometry() instanceof Point)) { final Coordinate newInstanceCoordinate = context.getItem2() == newInstance ? context.getPoint2() : context.getPoint1(); checkForCompress = getClusterPoints(true).add(newInstanceCoordinate); } if (checkForCompress) { checkForCompression(); } return 1; } @Override public void merge(final Cluster cluster) { if (this == cluster) { return; } final SingleItemClusterList singleItemCluster = ((SingleItemClusterList) cluster); super.merge(cluster); if (singleItemCluster.clusterGeo != null) { getClusterPoints(true).addAll(Arrays.asList(singleItemCluster.clusterGeo.getCoordinates())); } final Set otherPoints = singleItemCluster.getClusterPoints(false); if (otherPoints.size() > 0) { // handle any remaining points getClusterPoints(true).addAll(otherPoints); } checkForCompression(); } @Override public boolean isCompressed() { return compressed; } @Override public void finish() { super.finish(); compressAndUpdate(); } private void checkForCompression() { if (getClusterPoints(false).size() > 50) { compressAndUpdate(); } } private void compressAndUpdate() { clusterGeo = compress(); clusterPoints = null; compressed = true; } @Override protected Geometry compress() { if (getClusterPoints(false).size() > 0) { return DBScanClusterList.getHullTool().createHullFromGeometry( clusterGeo, clusterPoints, true); } return clusterGeo; } public static class SingleItemClusterListFactory implements NeighborListFactory { private final Map index; public SingleItemClusterListFactory(final Map index) { super(); this.index = index; } @Override public NeighborList buildNeighborList( final ByteArray centerId, final ClusterItem center) { Cluster list = index.get(centerId); if (list == null) { list = new SingleItemClusterList(centerId, center, this, index); } return list; } } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/CellCounter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kde; public interface CellCounter { public void increment(long cellId, double weight); } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/CellSummationCombiner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kde; import java.io.IOException; import org.apache.hadoop.io.DoubleWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.mapreduce.Reducer; public class CellSummationCombiner extends Reducer { @Override public void reduce( final LongWritable key, final Iterable values, final Context context) throws IOException, InterruptedException { double s = 0.0; for (final DoubleWritable value : values) { s += value.get(); } context.write(key, new DoubleWritable(s)); } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/CellSummationReducer.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kde; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import org.apache.hadoop.io.DoubleWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.mapreduce.Reducer; public class CellSummationReducer extends Reducer { private final Map maxPerLevel = new HashMap<>(); protected int minLevel; protected int maxLevel; protected int numLevels; @Override protected void setup(final Context context) throws IOException, InterruptedException { minLevel = context.getConfiguration().getInt(KDEJobRunner.MIN_LEVEL_KEY, 1); maxLevel = context.getConfiguration().getInt(KDEJobRunner.MAX_LEVEL_KEY, 25); numLevels = (maxLevel - minLevel) + 1; super.setup(context); } @Override public void reduce( final LongWritable key, final Iterable values, final Context context) throws IOException, InterruptedException { double sum = 0.0; for (final DoubleWritable value : values) { sum += value.get(); } context.write(new DoubleWritable(sum), key); collectStats(key, sum, context); } protected void collectStats(final LongWritable key, final double sum, final Context context) { final long level = (key.get() % numLevels) + minLevel; Double max = maxPerLevel.get(level); if ((max == null) || (sum > max)) { max = sum; maxPerLevel.put(level, max); } context.getCounter("Entries per level", "level " + Long.toString(level)).increment(1); } @Override protected void cleanup(final org.apache.hadoop.mapreduce.Reducer.Context context) throws IOException, InterruptedException { for (final Entry e : maxPerLevel.entrySet()) { context.write(new DoubleWritable(-e.getValue()), new LongWritable(e.getKey() - minLevel)); } super.cleanup(context); } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/DoubleLevelPartitioner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kde; import org.apache.hadoop.io.DoubleWritable; public class DoubleLevelPartitioner extends LevelPartitioner { } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/GaussianCellMapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kde; import java.io.IOException; import java.util.HashMap; import java.util.Map; import org.apache.hadoop.io.DoubleWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.mapreduce.Mapper; import org.geotools.filter.text.cql2.CQLException; import org.geotools.filter.text.ecql.ECQL; import org.geotools.geometry.jts.JTS; import org.geotools.referencing.CRS; import org.locationtech.geowave.analytic.mapreduce.kde.GaussianFilter.ValueRange; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.Point; import org.opengis.feature.simple.SimpleFeature; import org.opengis.filter.Filter; import org.opengis.geometry.MismatchedDimensionException; import org.opengis.referencing.FactoryException; import org.opengis.referencing.operation.MathTransform; import org.opengis.referencing.operation.TransformException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class GaussianCellMapper extends Mapper { private static final Logger LOGGER = LoggerFactory.getLogger(GaussianCellMapper.class); protected static final String CQL_FILTER_KEY = "CQL_FILTER"; protected int minLevel; protected int maxLevel; protected Filter filter; protected Map levelStoreMap; protected ValueRange[] valueRangePerDimension; protected String inputCrsCode; protected String outputCrsCode; protected MathTransform transform; @Override protected void setup(final Context context) throws IOException, InterruptedException { super.setup(context); minLevel = context.getConfiguration().getInt(KDEJobRunner.MIN_LEVEL_KEY, 1); maxLevel = context.getConfiguration().getInt(KDEJobRunner.MAX_LEVEL_KEY, 25); valueRangePerDimension = new ValueRange[] { new ValueRange( context.getConfiguration().getDouble(KDEJobRunner.X_MIN_KEY, -180), context.getConfiguration().getDouble(KDEJobRunner.X_MAX_KEY, 180)), new ValueRange( context.getConfiguration().getDouble(KDEJobRunner.Y_MIN_KEY, -90), context.getConfiguration().getDouble(KDEJobRunner.Y_MAX_KEY, 90))}; inputCrsCode = context.getConfiguration().get(KDEJobRunner.INPUT_CRSCODE_KEY); outputCrsCode = context.getConfiguration().get(KDEJobRunner.OUTPUT_CRSCODE_KEY); final String cql = context.getConfiguration().get(CQL_FILTER_KEY); if ((cql != null) && !cql.isEmpty()) { try { filter = ECQL.toFilter(cql); } catch (final CQLException e) { LOGGER.warn("Unable to parse CQL filter", e); } } levelStoreMap = new HashMap<>(); for (int level = maxLevel; level >= minLevel; level--) { final int numXPosts = (int) Math.pow(2, level + 1) * KDEJobRunner.TILE_SIZE; final int numYPosts = (int) Math.pow(2, level) * KDEJobRunner.TILE_SIZE; populateLevelStore(context, numXPosts, numYPosts, level); } } protected void populateLevelStore( final Context context, final int numXPosts, final int numYPosts, final int level) { levelStoreMap.put( level, new LevelStore( numXPosts, numYPosts, new MapContextCellCounter(context, level, minLevel, maxLevel))); } @Override protected void map(final GeoWaveInputKey key, final SimpleFeature value, final Context context) throws IOException, InterruptedException { Point pt = null; if (value != null) { if ((filter != null) && !filter.evaluate(value)) { return; } final Object geomObj = value.getDefaultGeometry(); if ((geomObj != null) && (geomObj instanceof Geometry)) { if (inputCrsCode.equals(outputCrsCode)) { pt = ((Geometry) geomObj).getCentroid(); } else { if (transform == null) { try { transform = CRS.findMathTransform( CRS.decode(inputCrsCode, true), CRS.decode(outputCrsCode, true), true); } catch (final FactoryException e) { LOGGER.error("Unable to decode " + inputCrsCode + " CRS", e); throw new RuntimeException("Unable to initialize " + inputCrsCode + " object", e); } } try { final Geometry transformedGeometry = JTS.transform((Geometry) geomObj, transform); pt = transformedGeometry.getCentroid(); } catch (MismatchedDimensionException | TransformException e) { LOGGER.warn( "Unable to perform transform to specified CRS of the index, the feature geometry will remain in its original CRS", e); } } } } if ((pt == null) || pt.isEmpty()) { return; } for (int level = maxLevel; level >= minLevel; level--) { incrementLevelStore(level, pt, value, valueRangePerDimension); } } protected void incrementLevelStore( final int level, final Point pt, final SimpleFeature feature, final ValueRange[] valueRangePerDimension) { final LevelStore levelStore = levelStoreMap.get(level); GaussianFilter.incrementPt( pt.getY(), pt.getX(), levelStore.counter, levelStore.numXPosts, levelStore.numYPosts, valueRangePerDimension); } public static class LevelStore { public final int numXPosts; public final int numYPosts; public final CellCounter counter; public LevelStore(final int numXPosts, final int numYPosts, final CellCounter counter) { this.numXPosts = numXPosts; this.numYPosts = numYPosts; this.counter = counter; } } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/GaussianFilter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kde; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.IntStream; public class GaussianFilter { private static final double SQRT_2_PI = Math.sqrt(2 * Math.PI); // private static double[] majorSmoothingGaussianKernel = // new double[] { // 0.008812229292562285, // 0.02714357714347937, // 0.06511405659938267, // 0.12164907301380959, // 0.17699835683135567, // 0.20056541423882082, // 0.17699835683135567, // 0.12164907301380959, // 0.06511405659938267, // 0.02714357714347937, // 0.008812229292562285}; /** This kernel was computed with sigma = 1 for x=(-3,-2,-1,0,1,2,3) */ private static double[] intermediateSmoothingGaussianKernel = new double[] {0.006, 0.061, 0.242, 0.383, 0.242, 0.061, 0.006}; private static Map> offsetsCache = new HashMap<>(); private static List TYPICAL_2D_OFFSET; private static double[] TYPICAL_2D_OFFSET_BLURS; // private static double[] minorSmoothingGaussianKernel = new double[] { // 0.2186801, // 0.531923041, // 0.2186801 // }; public static class ValueRange { private final double min; private final double max; public ValueRange(final double min, final double max) { this.min = min; this.max = max; } public double getMin() { return min; } public double getMax() { return max; } } private static final ValueRange[] valueRangePerDimension = new ValueRange[] {new ValueRange(-180, 180), new ValueRange(-90, 90)}; public static void incrementPt( final double lat, final double lon, final CellCounter results, final int numXPosts, final int numYPosts, final ValueRange[] valueRangePerDimension) { incrementBBox(lon, lon, lat, lat, results, numXPosts, numYPosts, 1, valueRangePerDimension); } public static void incrementPt( final double lat, final double lon, final CellCounter results, final int numXPosts, final int numYPosts, final double contributionScaleFactor, final ValueRange[] valueRangePerDimension) { incrementBBox( lon, lon, lat, lat, results, numXPosts, numYPosts, contributionScaleFactor, valueRangePerDimension); } public static void incrementPtFast( final double[] binLocationPerDimension, final int[] binsPerDimension, final CellCounter results) { final int numDimensions = 2; final double[] gaussianKernel = getGaussianKernel(1, 3); final int maxOffset = gaussianKernel.length / 2; final List offsets = getOffsets(numDimensions, 0, new int[numDimensions], gaussianKernel, maxOffset); for (int i = 0; i < offsets.size(); i++) { final int[] offset = offsets.get(i); final double blur = getBlurFromOffset(i, numDimensions, offset, gaussianKernel, maxOffset); final List positionsAndContributions = getPositionsAndContributionPt( numDimensions, 0, binLocationPerDimension, blur, new int[numDimensions], binsPerDimension, offset); for (final BinPositionAndContribution positionAndContribution : positionsAndContributions) { results.increment(positionAndContribution.position, positionAndContribution.contribution); } } } public static void incrementPtFast( final double lat, final double lon, final CellCounter results, final int numXPosts, final int numYPosts) { final int numDimensions = 2; final double[] binLocationPerDimension = new double[numDimensions]; final int[] binsPerDimension = new int[] {numXPosts, numYPosts}; final double[] valsPerDimension = new double[] {lon, lat}; for (int d = 0; d < numDimensions; d++) { final ValueRange valueRange = valueRangePerDimension[d]; final double span = (valueRange.getMax() - valueRange.getMin()); binLocationPerDimension[d] = (((valsPerDimension[d] - valueRange.getMin()) / span) * binsPerDimension[d]); } final double[] gaussianKernel = getGaussianKernel(1, 3); final int maxOffset = gaussianKernel.length / 2; final List offsets = getOffsets(numDimensions, 0, new int[numDimensions], gaussianKernel, maxOffset); for (int i = 0; i < offsets.size(); i++) { final int[] offset = offsets.get(i); final double blur = getBlurFromOffset(i, numDimensions, offset, gaussianKernel, maxOffset); final List positionsAndContributions = getPositionsAndContributionPt( numDimensions, 0, binLocationPerDimension, blur, new int[numDimensions], binsPerDimension, offset); for (final BinPositionAndContribution positionAndContribution : positionsAndContributions) { results.increment(positionAndContribution.position, positionAndContribution.contribution); } } } public static void incrementPtFast( final double x, final double y, final double minX, final double maxX, final double minY, final double maxY, final CellCounter results, final int numXPosts, final int numYPosts) { final int numDimensions = 2; final double[] binLocationPerDimension = new double[numDimensions]; final int[] binsPerDimension = new int[] {numXPosts, numYPosts}; final double spanX = (maxX - minX); final double spanY = (maxY - minY); binLocationPerDimension[0] = (((x - minX) / spanX) * binsPerDimension[0]); binLocationPerDimension[1] = (((y - minY) / spanY) * binsPerDimension[1]); final double[] gaussianKernel = getGaussianKernel(1, 3); final int maxOffset = gaussianKernel.length / 2; final List offsets = getOffsets(numDimensions, 0, new int[numDimensions], gaussianKernel, maxOffset); for (int i = 0; i < offsets.size(); i++) { final int[] offset = offsets.get(i); final double blur = getBlurFromOffset(i, numDimensions, offset, gaussianKernel, maxOffset); final List positionsAndContributions = getPositionsAndContributionPt( numDimensions, 0, binLocationPerDimension, blur, new int[numDimensions], binsPerDimension, offset); for (final BinPositionAndContribution positionAndContribution : positionsAndContributions) { results.increment(positionAndContribution.position, positionAndContribution.contribution); } } } public static void incrementBBox( final double minX, final double maxX, final double minY, final double maxY, final CellCounter results, final int numXPosts, final int numYPosts, final double contributionScaleFactor, final ValueRange[] valueRangePerDimension) { final int numDimensions = 2; final double[] minBinLocationPerDimension = new double[numDimensions]; final double[] maxBinLocationPerDimension = new double[numDimensions]; final int[] binsPerDimension = new int[] {numXPosts, numYPosts}; final double[] minsPerDimension = new double[] {minX, minY}; final double[] maxesPerDimension = new double[] {maxX, maxY}; for (int d = 0; d < numDimensions; d++) { final ValueRange valueRange = valueRangePerDimension[d]; final double span = (valueRange.getMax() - valueRange.getMin()); minBinLocationPerDimension[d] = (((minsPerDimension[d] - valueRange.getMin()) / span) * binsPerDimension[d]); maxBinLocationPerDimension[d] = (((maxesPerDimension[d] - valueRange.getMin()) / span) * binsPerDimension[d]); // give it a buffer of 1 for being counted within this bounds // because we perform smoothing on the values anyway if ((maxBinLocationPerDimension[d] < -1) || (minBinLocationPerDimension[d] > binsPerDimension[d])) { // not in bounds return; } else { minBinLocationPerDimension[d] = Math.max(minBinLocationPerDimension[d], -1); maxBinLocationPerDimension[d] = Math.min(maxBinLocationPerDimension[d], binsPerDimension[d]); } } final double[] gaussianKernel = getGaussianKernel(1, 3); final int maxOffset = gaussianKernel.length / 2; final List offsets = getOffsets(numDimensions, 0, new int[numDimensions], gaussianKernel, maxOffset); for (int i = 0; i < offsets.size(); i++) { final int[] offset = offsets.get(i); final double blur = getBlurFromOffset(i, numDimensions, offset, gaussianKernel, maxOffset); final List positionsAndContributions = getPositionsAndContribution( numDimensions, 0, minBinLocationPerDimension, maxBinLocationPerDimension, blur, new int[numDimensions], binsPerDimension, offset); for (final BinPositionAndContribution positionAndContribution : positionsAndContributions) { results.increment( positionAndContribution.position, positionAndContribution.contribution * contributionScaleFactor); } } } protected static double getSigma(final int radius, final int order) { return ((radius * 2.0) + 1.0) / (5.0 + (0.8 * order)); } protected static double[] getGaussianKernel(final double sigma, final int radius) { return intermediateSmoothingGaussianKernel; } protected static double[] calculateGaussianKernel(final double sigma, final int radius) { // return majorSmoothingGaussianKernel; final double[] kernel = new double[(radius * 2) + 1]; int index = 0; for (int i = radius; i >= -radius; i--) { kernel[index++] = computePDF(0, sigma, i); } return normalizeSumToOne(kernel); } protected static double computePDF(final double mean, final double sigma, final double sample) { final double delta = sample - mean; return Math.exp((-delta * delta) / (2.0 * sigma * sigma)) / (sigma * SQRT_2_PI); } protected static double[] normalizeSumToOne(final double[] kernel) { final double[] retVal = new double[kernel.length]; double total = 0; for (final double element : kernel) { total += element; } for (int i = 0; i < kernel.length; i++) { retVal[i] = kernel[i] / total; } return retVal; } private static List getOffsets( final int numDimensions, final int currentDimension, final int[] currentOffsetsPerDimension, final double[] gaussianKernel, final int maxOffset) { if ((numDimensions == 2) && (TYPICAL_2D_OFFSET != null)) { return TYPICAL_2D_OFFSET; } List offsets = offsetsCache.get(numDimensions); if (offsets == null) { synchronized (offsetsCache) { offsets = calculateOffsets( numDimensions, currentDimension, currentOffsetsPerDimension, gaussianKernel, maxOffset); offsetsCache.put(numDimensions, offsets); if (numDimensions == 2) { TYPICAL_2D_OFFSET = offsets; TYPICAL_2D_OFFSET_BLURS = IntStream.range(0, TYPICAL_2D_OFFSET.size()).mapToDouble( i -> calculateBlurFromOffset( TYPICAL_2D_OFFSET.get(i), gaussianKernel, maxOffset)).toArray(); } } } return offsets; } private static List calculateOffsets( final int numDimensions, final int currentDimension, final int[] currentOffsetsPerDimension, final double[] gaussianKernel, final int maxOffset) { final List offsets = new ArrayList<>(); if (currentDimension == numDimensions) { offsets.add(currentOffsetsPerDimension.clone()); } else { for (int i = -maxOffset; i < (gaussianKernel.length - maxOffset); i++) { currentOffsetsPerDimension[currentDimension] = i; offsets.addAll( calculateOffsets( numDimensions, currentDimension + 1, currentOffsetsPerDimension, gaussianKernel, maxOffset)); } } return offsets; } private static double getBlurFromOffset( final int index, final int numDimensions, final int[] indexIntoGaussianPerDimension, final double[] gaussianKernel, final int maxOffset) { if (numDimensions == 2) { return TYPICAL_2D_OFFSET_BLURS[index]; } return calculateBlurFromOffset(indexIntoGaussianPerDimension, gaussianKernel, maxOffset); } private static double calculateBlurFromOffset( final int[] indexIntoGaussianPerDimension, final double[] gaussianKernel, final int maxOffset) { double blurFactor = 1; for (final int index : indexIntoGaussianPerDimension) { blurFactor *= gaussianKernel[index + maxOffset]; } return blurFactor; } private static List getPositionsAndContributionPt( final int numDimensions, final int currentDimension, final double[] locationPerDimension, final double currentContribution, final int[] finalIndexPerDimension, final int[] binsPerDimension, final int[] offset) { final List positions = new ArrayList<>(); if (currentDimension == numDimensions) { positions.add( new BinPositionAndContribution( getPosition(finalIndexPerDimension, binsPerDimension), currentContribution)); } else { final int floorOfLocation = (int) (locationPerDimension[currentDimension]); final int[] floorLocation = finalIndexPerDimension; floorLocation[currentDimension] = floorOfLocation + offset[currentDimension]; if ((floorLocation[currentDimension] >= 0) && (floorLocation[currentDimension] < binsPerDimension[currentDimension])) { positions.addAll( getPositionsAndContributionPt( numDimensions, currentDimension + 1, locationPerDimension, currentContribution, floorLocation, binsPerDimension, offset)); } } return positions; } private static List getPositionsAndContribution( final int numDimensions, final int currentDimension, final double[] minLocationPerDimension, final double[] maxLocationPerDimension, final double currentContribution, final int[] finalIndexPerDimension, final int[] binsPerDimension, final int[] offset) { final List positions = new ArrayList<>(); if (currentDimension == numDimensions) { positions.add( new BinPositionAndContribution( getPosition(finalIndexPerDimension, binsPerDimension), currentContribution)); } else { final int floorOfLocation = (int) (minLocationPerDimension[currentDimension]); final int[] floorLocation = finalIndexPerDimension.clone(); floorLocation[currentDimension] = floorOfLocation + offset[currentDimension]; if ((floorLocation[currentDimension] >= 0) && (floorLocation[currentDimension] < binsPerDimension[currentDimension])) { positions.addAll( getPositionsAndContribution( numDimensions, currentDimension + 1, minLocationPerDimension, maxLocationPerDimension, currentContribution, floorLocation, binsPerDimension, offset)); } final int ceilOfLocation = (int) Math.ceil(maxLocationPerDimension[currentDimension]); /** * the exterior cells are covered above by the floor of the min and ceil of the max, * everything in between is covered below */ final int startLocation = Math.max(floorOfLocation + offset[currentDimension] + 1, 0); final int stopLocation = Math.min(ceilOfLocation + offset[currentDimension], binsPerDimension[currentDimension]); if (startLocation < stopLocation) { for (int location = startLocation; location < stopLocation; location++) { final int[] middleLocation = finalIndexPerDimension.clone(); middleLocation[currentDimension] = location; positions.addAll( getPositionsAndContribution( numDimensions, currentDimension + 1, minLocationPerDimension, maxLocationPerDimension, currentContribution, middleLocation, binsPerDimension, offset)); } } } return positions; } private static long getPosition(final int[] positionPerDimension, final int[] binsPerDimension) { long retVal = 0; double multiplier = 1; for (int d = positionPerDimension.length - 1; d >= 0; d--) { retVal += (positionPerDimension[d] * multiplier); multiplier *= binsPerDimension[d]; } return retVal; } private static class BinPositionAndContribution { private final long position; private final double contribution; private BinPositionAndContribution(final long position, final double contribution) { this.position = position; this.contribution = contribution; } } /* * protected void incrementCount( final double minx, final double maxx, final double miny, final * double maxy, final int count ) { final double[] minsPerDimension = new double[]{minx, miny}; * final double[] maxesPerDimension = new double[]{maxx,maxy}; * * for (final BoundsAndCounts counts : statistics.boundsWithCounts) { boolean inBounds = true; * final double[] minBinLocationPerDimension = new double[2]; final double[] * maxBinLocationPerDimension = new double[2]; for (int d = 0; d < 2; d++) { final ValueRange * valueRange = counts.valueRangePerDimension[d]; final double span = (valueRange.getMax() - * valueRange.getMin()); minBinLocationPerDimension[d] = (((minsPerDimension[d] - * valueRange.getMin()) / span) * counts.binsPerDimension[d]); maxBinLocationPerDimension[d] = * (((maxesPerDimension[d] - valueRange.getMin()) / span) * counts.binsPerDimension[d]); // give * it a buffer of 1 for being counted within this bounds // because we perform smoothing on the * values anyway if ((maxBinLocationPerDimension[d] < -1) || (minBinLocationPerDimension[d] > * counts.binsPerDimension[d])) { inBounds = false; break; } else { minBinLocationPerDimension[d] * = Math.max( minBinLocationPerDimension[d], -1); maxBinLocationPerDimension[d] = Math.min( * maxBinLocationPerDimension[d], counts.binsPerDimension[d]); } * * } if (inBounds) { final double[] gaussianKernel =majorSmoothingGaussianKernel; final int * maxOffset = gaussianKernel.length / 2; final List offsets = getOffsets( 2, 0, new * int[2], gaussianKernel, maxOffset); for (final int[] offset : offsets) { final double blur = * getBlurFromOffset( offset, gaussianKernel, maxOffset); final List * positionsAndContributions = getPositionsAndContribution( 2, 0, minBinLocationPerDimension, * maxBinLocationPerDimension, blur, new int[2], counts.binsPerDimension, offset); for (final * BinPositionAndContribution positionAndContribution : positionsAndContributions) { * counts.incrementCount( positionAndContribution.position, positionAndContribution.contribution * * count); } } } } } * * static private List getOffsets( final int numDimensions, final int currentDimension, * final int[] currentOffsetsPerDimension, final double[] gaussianKernel, final int maxOffset ) { * final List offsets = new ArrayList(); if (currentDimension == numDimensions) { * offsets.add(currentOffsetsPerDimension.clone()); } else { for (int i = -maxOffset; i < * (gaussianKernel.length - maxOffset); i++) { currentOffsetsPerDimension[currentDimension] = i; * offsets.addAll(getOffsets( numDimensions, currentDimension + 1, currentOffsetsPerDimension, * gaussianKernel, maxOffset)); } } return offsets; } * * static private double getBlurFromOffset( final int[] indexIntoGaussianPerDimension, final * double[] gaussianKernel, final int maxOffset ) { double blurFactor = 1; * * for (final int index : indexIntoGaussianPerDimension) { blurFactor *= gaussianKernel[index + * maxOffset]; } return blurFactor; } * * private List getPositionsAndContribution( final int numDimensions, * final int currentDimension, final double[] minLocationPerDimension, final double[] * maxLocationPerDimension, final double currentContribution, final int[] finalIndexPerDimension, * final int[] binsPerDimension, final int[] offset ) { final List * positions = new ArrayList(); if (currentDimension == numDimensions) * { positions.add(new BinPositionAndContribution( getPosition( finalIndexPerDimension, * binsPerDimension), currentContribution)); } else { final int floorOfLocation = (int) * (minLocationPerDimension[currentDimension]); final int[] floorLocation = * finalIndexPerDimension.clone(); floorLocation[currentDimension] = floorOfLocation + * offset[currentDimension]; if ((floorLocation[currentDimension] >= 0) && * (floorLocation[currentDimension] < binsPerDimension[currentDimension])) { * positions.addAll(getPositionsAndContribution( numDimensions, currentDimension + 1, * minLocationPerDimension, maxLocationPerDimension, currentContribution, floorLocation, * binsPerDimension, offset)); } final int ceilOfLocation = (int) * Math.ceil(maxLocationPerDimension[currentDimension]); */ /** * the exterior cells are covered above by the floor of the min and ceil of the max, everything in * between is covered below */ /* * final int startLocation = Math.max( floorOfLocation + offset[currentDimension] + 1, 0); final * int stopLocation = Math.min( ceilOfLocation + offset[currentDimension], * binsPerDimension[currentDimension]); if (startLocation < stopLocation) { for (int location = * startLocation; location < stopLocation; location++) { final int[] middleLocation = * finalIndexPerDimension.clone(); middleLocation[currentDimension] = location; * positions.addAll(getPositionsAndContribution( numDimensions, currentDimension + 1, * minLocationPerDimension, maxLocationPerDimension, currentContribution, middleLocation, * binsPerDimension, offset)); } } } return positions; } * * private static int getPosition( final int[] positionPerDimension, final int[] binsPerDimension * ) { int retVal = 0; double multiplier = 1; for (int d = 0; d < positionPerDimension.length; * d++) { retVal += (positionPerDimension[d] * multiplier); multiplier *= binsPerDimension[d]; } * return retVal; } * * protected static int[] getPositionPerDimension( final int position, final int[] * binsPerDimension ) { int multiplier = 1; * * final int[] positionPerDimension = new int[binsPerDimension.length]; for (int d = 0; d < * positionPerDimension.length; d++) { positionPerDimension[d] = (position / multiplier) % * binsPerDimension[d]; multiplier *= binsPerDimension[d]; } return positionPerDimension; } * * private static class BinPositionAndContribution { final private int position; final private * double contribution; * * private BinPositionAndContribution( final int position, final double contribution ) { * this.position = position; this.contribution = contribution; } } * * protected static class BoundsAndCounts { public final double minx; public final double maxx; * public final double miny; public final double maxy; public final Double[] counts; public final * int[] binsPerDimension; * * public BoundsAndCounts( final ValueRange[] valueRangePerDimension, final Double[] counts, final * int[] binsPerDimension ) { this.valueRangePerDimension = valueRangePerDimension; this.counts = * counts; this.binsPerDimension = binsPerDimension; } * * private void incrementCount( final int position, final double increment ) { if (counts.length > * position) { synchronized (counts) { if (counts[position] == null) { counts[position] = new * Double( 0); } } counts[position] += increment; } else { * logger.warn("position of count summary outside of bounds"); } } } * * protected static class SummaryStatistics { final public List boundsWithCounts; * * public SummaryStatistics() { boundsWithCounts = Collections.synchronizedList(new * ArrayList()); } * * public SummaryStatistics( final List boundsWithCounts) { this.boundsWithCounts * = boundsWithCounts; } * * public ValueRange getCountMinMax() { double min = Double.MAX_VALUE; double max = * -Double.MAX_VALUE; for (final BoundsAndCounts boundsAndCount : boundsWithCounts) { * for (final Double count : boundsAndCount.counts) { if (count != null) { min = Math.min( min, * count); max = Math.max( max, count); } } } return new ValueRange( min, max); } } */ } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/IdentityMapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kde; import java.io.IOException; import org.apache.hadoop.io.DoubleWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.mapreduce.Mapper; public class IdentityMapper extends Mapper { @Override protected void map( final DoubleWritable key, final LongWritable value, final org.apache.hadoop.mapreduce.Mapper.Context context) throws IOException, InterruptedException { context.write(key, value); } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/KDECommandLineOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kde; import org.locationtech.geowave.mapreduce.operations.HdfsHostPortConverter; import com.beust.jcommander.Parameter; public class KDECommandLineOptions { @Parameter( names = "--featureType", required = true, description = "The name of the feature type to run a KDE on") private String featureType; @Parameter(names = "--indexName", description = "An optional index name to filter the input data") private String indexName; @Parameter(names = "--minLevel", required = true, description = "The min level to run a KDE at") private Integer minLevel; @Parameter(names = "--maxLevel", required = true, description = "The max level to run a KDE at") private Integer maxLevel; @Parameter(names = "--minSplits", description = "The min partitions for the input data") private Integer minSplits; @Parameter(names = "--maxSplits", description = "The max partitions for the input data") private Integer maxSplits; @Parameter(names = "--coverageName", required = true, description = "The coverage name") private String coverageName; @Parameter( names = "--hdfsHostPort", description = "The hdfs host port", converter = HdfsHostPortConverter.class) private String hdfsHostPort; @Parameter( names = "--jobSubmissionHostPort", required = true, description = "The job submission tracker") private String jobTrackerOrResourceManHostPort; @Parameter(names = "--tileSize", description = "The tile size") private Integer tileSize = 1; @Parameter( names = "--cqlFilter", description = "An optional CQL filter applied to the input data") private String cqlFilter; @Parameter( names = "--outputIndex", description = "An optional index for output datastore. Only spatial index type is supported") private String outputIndex; public String getOutputIndex() { return outputIndex; } public void setOutputIndex(final String outputIndex) { this.outputIndex = outputIndex; } public KDECommandLineOptions() {} public String getIndexName() { return indexName; } public void setIndexName(final String inputIndex) { this.indexName = inputIndex; } public String getFeatureType() { return featureType; } public Integer getMinLevel() { return minLevel; } public Integer getMaxLevel() { return maxLevel; } public Integer getMinSplits() { return minSplits; } public Integer getMaxSplits() { return maxSplits; } public String getCoverageName() { return coverageName; } public String getHdfsHostPort() { return hdfsHostPort; } public String getJobTrackerOrResourceManHostPort() { return jobTrackerOrResourceManHostPort; } public Integer getTileSize() { return tileSize; } public String getCqlFilter() { return cqlFilter; } public void setFeatureType(final String featureType) { this.featureType = featureType; } public void setMinLevel(final Integer minLevel) { this.minLevel = minLevel; } public void setMaxLevel(final Integer maxLevel) { this.maxLevel = maxLevel; } public void setMinSplits(final Integer minSplits) { this.minSplits = minSplits; } public void setMaxSplits(final Integer maxSplits) { this.maxSplits = maxSplits; } public void setCoverageName(final String coverageName) { this.coverageName = coverageName; } public void setHdfsHostPort(final String hdfsHostPort) { this.hdfsHostPort = hdfsHostPort; } public void setJobTrackerOrResourceManHostPort(final String jobTrackerOrResourceManHostPort) { this.jobTrackerOrResourceManHostPort = jobTrackerOrResourceManHostPort; } public void setTileSize(final Integer tileSize) { this.tileSize = tileSize; } public void setCqlFilter(final String cqlFilter) { this.cqlFilter = cqlFilter; } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/KDEJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kde; import java.io.File; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.Map; import java.util.Properties; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.DoubleWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.OutputFormat; import org.apache.hadoop.mapreduce.Partitioner; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.geotools.filter.text.ecql.ECQL; import org.locationtech.geowave.adapter.raster.RasterUtils; import org.locationtech.geowave.adapter.raster.operations.ResizeMRCommand; import org.locationtech.geowave.analytic.mapreduce.operations.KdeCommand; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.cli.parser.CommandLineOperationParams; import org.locationtech.geowave.core.cli.parser.ManualOperationParams; import org.locationtech.geowave.core.cli.parser.OperationParser; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder; import org.locationtech.geowave.core.geotime.util.ExtractGeometryFilterVisitor; import org.locationtech.geowave.core.geotime.util.ExtractGeometryFilterVisitorResult; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.store.StoreFactoryOptions; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.Writer; import org.locationtech.geowave.core.store.cli.store.AddStoreCommand; import org.locationtech.geowave.core.store.cli.store.ClearStoreCommand; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.config.ConfigUtils; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase; import org.locationtech.geowave.mapreduce.input.GeoWaveInputFormat; import org.locationtech.geowave.mapreduce.operations.ConfigHDFSCommand; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputFormat; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey; import org.locationtech.jts.geom.Geometry; import org.opengis.coverage.grid.GridCoverage; import org.opengis.filter.Filter; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.opengis.referencing.cs.CoordinateSystem; import org.opengis.referencing.cs.CoordinateSystemAxis; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class KDEJobRunner extends Configured implements Tool { private static final Logger LOGGER = LoggerFactory.getLogger(KDEJobRunner.class); public static final String GEOWAVE_CLASSPATH_JARS = "geowave.classpath.jars"; private static final String TMP_COVERAGE_SUFFIX = "_tMp_CoVeRaGe"; protected static int TILE_SIZE = 1; public static final String MAX_LEVEL_KEY = "MAX_LEVEL"; public static final String MIN_LEVEL_KEY = "MIN_LEVEL"; public static final String COVERAGE_NAME_KEY = "COVERAGE_NAME"; protected KDECommandLineOptions kdeCommandLineOptions; protected DataStorePluginOptions inputDataStoreOptions; protected DataStorePluginOptions outputDataStoreOptions; protected File configFile; protected Index outputIndex; public static final String X_MIN_KEY = "X_MIN"; public static final String X_MAX_KEY = "X_MAX"; public static final String Y_MIN_KEY = "Y_MIN"; public static final String Y_MAX_KEY = "Y_MAX"; public static final String INPUT_CRSCODE_KEY = "INPUT_CRS"; public static final String OUTPUT_CRSCODE_KEY = "OUTPUT_CRS"; public KDEJobRunner( final KDECommandLineOptions kdeCommandLineOptions, final DataStorePluginOptions inputDataStoreOptions, final DataStorePluginOptions outputDataStoreOptions, final File configFile, final Index outputIndex) { this.kdeCommandLineOptions = kdeCommandLineOptions; this.inputDataStoreOptions = inputDataStoreOptions; this.outputDataStoreOptions = outputDataStoreOptions; this.configFile = configFile; this.outputIndex = outputIndex; } /** Main method to execute the MapReduce analytic. */ @SuppressWarnings("deprecation") public int runJob() throws Exception { Configuration conf = super.getConf(); if (conf == null) { conf = new Configuration(); setConf(conf); } Index inputPrimaryIndex = null; final Index[] idxArray = inputDataStoreOptions.createDataStore().getIndices(); for (final Index idx : idxArray) { if ((idx != null) && ((kdeCommandLineOptions.getIndexName() == null) || kdeCommandLineOptions.getIndexName().equals(idx.getName()))) { inputPrimaryIndex = idx; break; } } final CoordinateReferenceSystem inputIndexCrs = GeometryUtils.getIndexCrs(inputPrimaryIndex); final String inputCrsCode = GeometryUtils.getCrsCode(inputIndexCrs); Index outputPrimaryIndex = outputIndex; CoordinateReferenceSystem outputIndexCrs = null; String outputCrsCode = null; if (outputPrimaryIndex != null) { outputIndexCrs = GeometryUtils.getIndexCrs(outputPrimaryIndex); outputCrsCode = GeometryUtils.getCrsCode(outputIndexCrs); } else { final SpatialDimensionalityTypeProvider sdp = new SpatialDimensionalityTypeProvider(); final SpatialOptions so = sdp.createOptions(); so.setCrs(inputCrsCode); outputPrimaryIndex = SpatialDimensionalityTypeProvider.createIndexFromOptions(so); outputIndexCrs = inputIndexCrs; outputCrsCode = inputCrsCode; } final CoordinateSystem cs = outputIndexCrs.getCoordinateSystem(); final CoordinateSystemAxis csx = cs.getAxis(0); final CoordinateSystemAxis csy = cs.getAxis(1); final double xMax = csx.getMaximumValue(); final double xMin = csx.getMinimumValue(); final double yMax = csy.getMaximumValue(); final double yMin = csy.getMinimumValue(); if ((xMax == Double.POSITIVE_INFINITY) || (xMin == Double.NEGATIVE_INFINITY) || (yMax == Double.POSITIVE_INFINITY) || (yMin == Double.NEGATIVE_INFINITY)) { LOGGER.error( "Raster KDE resize with raster primary index CRS dimensions min/max equal to positive infinity or negative infinity is not supported"); throw new RuntimeException( "Raster KDE resize with raster primary index CRS dimensions min/max equal to positive infinity or negative infinity is not supported"); } DataStorePluginOptions rasterResizeOutputDataStoreOptions; String kdeCoverageName; // so we don't need a no data merge strategy, use 1 for the tile size of // the KDE output and then run a resize operation if ((kdeCommandLineOptions.getTileSize() > 1)) { // this is the ending data store options after resize, the KDE will // need to output to a temporary namespace, a resize operation // will use the outputDataStoreOptions rasterResizeOutputDataStoreOptions = outputDataStoreOptions; // first clone the outputDataStoreOptions, then set it to a tmp // namespace final Map configOptions = outputDataStoreOptions.getOptionsAsMap(); final StoreFactoryOptions options = ConfigUtils.populateOptionsFromList( outputDataStoreOptions.getFactoryFamily().getDataStoreFactory().createOptionsInstance(), configOptions); options.setGeoWaveNamespace(outputDataStoreOptions.getGeoWaveNamespace() + "_tmp"); outputDataStoreOptions = new DataStorePluginOptions(options); kdeCoverageName = kdeCommandLineOptions.getCoverageName() + TMP_COVERAGE_SUFFIX; } else { rasterResizeOutputDataStoreOptions = null; kdeCoverageName = kdeCommandLineOptions.getCoverageName(); } if (kdeCommandLineOptions.getHdfsHostPort() == null) { final Properties configProperties = ConfigOptions.loadProperties(configFile); final String hdfsFSUrl = ConfigHDFSCommand.getHdfsUrl(configProperties); kdeCommandLineOptions.setHdfsHostPort(hdfsFSUrl); } GeoWaveConfiguratorBase.setRemoteInvocationParams( kdeCommandLineOptions.getHdfsHostPort(), kdeCommandLineOptions.getJobTrackerOrResourceManHostPort(), conf); conf.setInt(MAX_LEVEL_KEY, kdeCommandLineOptions.getMaxLevel()); conf.setInt(MIN_LEVEL_KEY, kdeCommandLineOptions.getMinLevel()); conf.set(COVERAGE_NAME_KEY, kdeCoverageName); if (kdeCommandLineOptions.getCqlFilter() != null) { conf.set(GaussianCellMapper.CQL_FILTER_KEY, kdeCommandLineOptions.getCqlFilter()); } conf.setDouble(X_MIN_KEY, xMin); conf.setDouble(X_MAX_KEY, xMax); conf.setDouble(Y_MIN_KEY, yMin); conf.setDouble(Y_MAX_KEY, yMax); conf.set(INPUT_CRSCODE_KEY, inputCrsCode); conf.set(OUTPUT_CRSCODE_KEY, outputCrsCode); preJob1Setup(conf); final Job job = new Job(conf); job.setJarByClass(this.getClass()); addJobClasspathDependencies(job, conf); job.setJobName(getJob1Name()); job.setMapperClass(getJob1Mapper()); job.setCombinerClass(CellSummationCombiner.class); job.setReducerClass(getJob1Reducer()); job.setMapOutputKeyClass(LongWritable.class); job.setMapOutputValueClass(DoubleWritable.class); job.setOutputKeyClass(DoubleWritable.class); job.setOutputValueClass(LongWritable.class); job.setInputFormatClass(GeoWaveInputFormat.class); job.setOutputFormatClass(SequenceFileOutputFormat.class); job.setNumReduceTasks(8); job.setSpeculativeExecution(false); final PersistentAdapterStore adapterStore = inputDataStoreOptions.createAdapterStore(); final IndexStore indexStore = inputDataStoreOptions.createIndexStore(); final InternalAdapterStore internalAdapterStore = inputDataStoreOptions.createInternalAdapterStore(); final short internalAdapterId = internalAdapterStore.getAdapterId(kdeCommandLineOptions.getFeatureType()); final DataTypeAdapter adapter = adapterStore.getAdapter(internalAdapterId).getAdapter(); VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder().addTypeName(adapter.getTypeName()); if (kdeCommandLineOptions.getIndexName() != null) { bldr = bldr.indexName(kdeCommandLineOptions.getIndexName()); } GeoWaveInputFormat.setMinimumSplitCount( job.getConfiguration(), kdeCommandLineOptions.getMinSplits()); GeoWaveInputFormat.setMaximumSplitCount( job.getConfiguration(), kdeCommandLineOptions.getMaxSplits()); GeoWaveInputFormat.setStoreOptions(job.getConfiguration(), inputDataStoreOptions); if (kdeCommandLineOptions.getCqlFilter() != null) { Geometry bbox = null; if (adapter instanceof GeotoolsFeatureDataAdapter) { final String geometryAttribute = ((GeotoolsFeatureDataAdapter) adapter).getFeatureType().getGeometryDescriptor().getLocalName(); final Filter filter = ECQL.toFilter(kdeCommandLineOptions.getCqlFilter()); final ExtractGeometryFilterVisitorResult geoAndCompareOpData = (ExtractGeometryFilterVisitorResult) filter.accept( new ExtractGeometryFilterVisitor(GeometryUtils.getDefaultCRS(), geometryAttribute), null); bbox = geoAndCompareOpData.getGeometry(); } if ((bbox != null) && !bbox.equals(GeometryUtils.infinity())) { bldr = bldr.constraints( bldr.constraintsFactory().spatialTemporalConstraints().spatialConstraints( bbox).build()); } } GeoWaveInputFormat.setQuery(conf, bldr.build(), adapterStore, internalAdapterStore, indexStore); FileSystem fs = null; try { fs = FileSystem.get(conf); fs.delete( new Path( "/tmp/" + inputDataStoreOptions.getGeoWaveNamespace() + "_stats_" + kdeCommandLineOptions.getMinLevel() + "_" + kdeCommandLineOptions.getMaxLevel() + "_" + kdeCommandLineOptions.getCoverageName()), true); FileOutputFormat.setOutputPath( job, new Path( "/tmp/" + inputDataStoreOptions.getGeoWaveNamespace() + "_stats_" + kdeCommandLineOptions.getMinLevel() + "_" + kdeCommandLineOptions.getMaxLevel() + "_" + kdeCommandLineOptions.getCoverageName() + "/basic")); final boolean job1Success = job.waitForCompletion(true); boolean job2Success = false; boolean postJob2Success = false; // Linear MapReduce job chaining if (job1Success) { setupEntriesPerLevel(job, conf); // Stats Reducer Job configuration parameters final Job statsReducer = new Job(conf); statsReducer.setJarByClass(this.getClass()); addJobClasspathDependencies(statsReducer, conf); statsReducer.setJobName(getJob2Name()); statsReducer.setMapperClass(IdentityMapper.class); statsReducer.setPartitionerClass(getJob2Partitioner()); statsReducer.setReducerClass(getJob2Reducer()); statsReducer.setNumReduceTasks( getJob2NumReducers( (kdeCommandLineOptions.getMaxLevel() - kdeCommandLineOptions.getMinLevel()) + 1)); statsReducer.setMapOutputKeyClass(DoubleWritable.class); statsReducer.setMapOutputValueClass(LongWritable.class); statsReducer.setOutputKeyClass(getJob2OutputKeyClass()); statsReducer.setOutputValueClass(getJob2OutputValueClass()); statsReducer.setInputFormatClass(SequenceFileInputFormat.class); statsReducer.setOutputFormatClass(getJob2OutputFormatClass()); FileInputFormat.setInputPaths( statsReducer, new Path( "/tmp/" + inputDataStoreOptions.getGeoWaveNamespace() + "_stats_" + kdeCommandLineOptions.getMinLevel() + "_" + kdeCommandLineOptions.getMaxLevel() + "_" + kdeCommandLineOptions.getCoverageName() + "/basic")); setupJob2Output( conf, statsReducer, outputDataStoreOptions.getGeoWaveNamespace(), kdeCoverageName, outputPrimaryIndex); job2Success = statsReducer.waitForCompletion(true); if (job2Success) { postJob2Success = postJob2Actions(conf, outputDataStoreOptions.getGeoWaveNamespace(), kdeCoverageName); } } else { job2Success = false; } if (rasterResizeOutputDataStoreOptions != null) { // delegate to resize command to wrap it up with the correctly // requested tile size final ResizeMRCommand resizeCommand = new ResizeMRCommand(); final File configFile = File.createTempFile("temp-config", null); final ManualOperationParams params = new ManualOperationParams(); params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile); final AddStoreCommand addStore = new AddStoreCommand(); addStore.setParameters("temp-out"); addStore.setPluginOptions(outputDataStoreOptions); addStore.execute(params); addStore.setParameters("temp-raster-out"); addStore.setPluginOptions(rasterResizeOutputDataStoreOptions); addStore.execute(params); // We're going to override these anyway. resizeCommand.setParameters("temp-out", "temp-raster-out"); resizeCommand.getOptions().setInputCoverageName(kdeCoverageName); resizeCommand.getOptions().setMinSplits(kdeCommandLineOptions.getMinSplits()); resizeCommand.getOptions().setMaxSplits(kdeCommandLineOptions.getMaxSplits()); resizeCommand.setHdfsHostPort(kdeCommandLineOptions.getHdfsHostPort()); resizeCommand.setJobTrackerOrResourceManHostPort( kdeCommandLineOptions.getJobTrackerOrResourceManHostPort()); resizeCommand.getOptions().setOutputCoverageName(kdeCommandLineOptions.getCoverageName()); resizeCommand.getOptions().setOutputTileSize(kdeCommandLineOptions.getTileSize()); final int resizeStatus = ToolRunner.run(resizeCommand.createRunner(params), new String[] {}); if (resizeStatus == 0) { // delegate to clear command to clean up with tmp namespace // after successful resize final ClearStoreCommand clearCommand = new ClearStoreCommand(); clearCommand.setParameters("temp-out"); clearCommand.execute(params); } else { LOGGER.warn( "Resize command error code '" + resizeStatus + "'. Retaining temporary namespace '" + outputDataStoreOptions.getGeoWaveNamespace() + "' with tile size of 1."); } } fs.delete( new Path( "/tmp/" + inputDataStoreOptions.getGeoWaveNamespace() + "_stats_" + kdeCommandLineOptions.getMinLevel() + "_" + kdeCommandLineOptions.getMaxLevel() + "_" + kdeCommandLineOptions.getCoverageName()), true); return (job1Success && job2Success && postJob2Success) ? 0 : 1; } finally { if (fs != null) { try { fs.close(); } catch (final IOException e) { LOGGER.info(e.getMessage()); // Attempt to close, but don't throw an error if it is // already closed. // Log message, so find bugs does not complain. } } } } protected void setupEntriesPerLevel(final Job job1, final Configuration conf) throws IOException { for (int l = kdeCommandLineOptions.getMinLevel(); l <= kdeCommandLineOptions.getMaxLevel(); l++) { conf.setLong( "Entries per level.level" + l, job1.getCounters().getGroup("Entries per level").findCounter( "level " + Long.valueOf(l)).getValue()); } } protected void preJob1Setup(final Configuration conf) {} protected boolean postJob2Actions( final Configuration conf, final String statsNamespace, final String coverageName) throws Exception { return true; } protected Class> getJob2OutputFormatClass() { return GeoWaveOutputFormat.class; } protected Class getJob2OutputKeyClass() { return GeoWaveOutputKey.class; } protected Class getJob2OutputValueClass() { return GridCoverage.class; } protected Class> getJob2Reducer() { return KDEReducer.class; } protected Class> getJob2Partitioner() { return DoubleLevelPartitioner.class; } protected int getJob2NumReducers(final int numLevels) { return numLevels; } protected Class> getJob1Mapper() { return GaussianCellMapper.class; } protected Class> getJob1Reducer() { return CellSummationReducer.class; } protected String getJob2Name() { return inputDataStoreOptions.getGeoWaveNamespace() + "(" + kdeCommandLineOptions.getCoverageName() + ")" + " levels " + kdeCommandLineOptions.getMinLevel() + "-" + kdeCommandLineOptions.getMaxLevel() + " Ingest"; } protected String getJob1Name() { return inputDataStoreOptions.getGeoWaveNamespace() + "(" + kdeCommandLineOptions.getCoverageName() + ")" + " levels " + kdeCommandLineOptions.getMinLevel() + "-" + kdeCommandLineOptions.getMaxLevel() + " Calculation"; } protected void setupJob2Output( final Configuration conf, final Job statsReducer, final String statsNamespace, final String coverageName, final Index index) throws Exception { final DataTypeAdapter adapter = RasterUtils.createDataAdapterTypeDouble( coverageName, KDEReducer.NUM_BANDS, TILE_SIZE, KDEReducer.MINS_PER_BAND, KDEReducer.MAXES_PER_BAND, KDEReducer.NAME_PER_BAND, null); setup(statsReducer, statsNamespace, adapter, index); } protected void setup( final Job job, final String namespace, final DataTypeAdapter adapter, final Index index) throws IOException { GeoWaveOutputFormat.setStoreOptions(job.getConfiguration(), outputDataStoreOptions); GeoWaveOutputFormat.addDataAdapter(job.getConfiguration(), adapter); GeoWaveOutputFormat.addIndex(job.getConfiguration(), index); final DataStore dataStore = outputDataStoreOptions.createDataStore(); dataStore.addType(adapter, index); final Writer writer = dataStore.createWriter(adapter.getTypeName()); writer.close(); } public static void main(final String[] args) throws Exception { final ConfigOptions opts = new ConfigOptions(); final OperationParser parser = new OperationParser(); parser.addAdditionalObject(opts); final KdeCommand command = new KdeCommand(); final CommandLineOperationParams params = parser.parse(command, args); opts.prepare(params); final int res = ToolRunner.run(new Configuration(), command.createRunner(params), args); System.exit(res); } @Override public int run(final String[] args) throws Exception { return runJob(); } protected void addJobClasspathDependencies(final Job job, final Configuration conf) throws IOException, URISyntaxException { final String[] jars = conf.getTrimmedStrings(GEOWAVE_CLASSPATH_JARS); if (jars != null) { for (final String jarPath : jars) { job.addArchiveToClassPath(new Path(new URI(jarPath))); } } } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/KDEReducer.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kde; import java.awt.image.WritableRaster; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.io.DoubleWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.mapreduce.Reducer; import org.locationtech.geowave.adapter.raster.RasterUtils; import org.locationtech.geowave.analytic.mapreduce.kde.GaussianFilter.ValueRange; import org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder; import org.locationtech.geowave.core.index.FloatCompareUtils; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.mapreduce.JobContextIndexStore; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey; import org.opengis.coverage.grid.GridCoverage; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; public class KDEReducer extends Reducer { private static final class TileInfo { private final double tileWestLon; private final double tileEastLon; private final double tileSouthLat; private final double tileNorthLat; private final int x; private final int y; public TileInfo( final double tileWestLon, final double tileEastLon, final double tileSouthLat, final double tileNorthLat, final int x, final int y) { this.tileWestLon = tileWestLon; this.tileEastLon = tileEastLon; this.tileSouthLat = tileSouthLat; this.tileNorthLat = tileNorthLat; this.x = x; this.y = y; } @Override public int hashCode() { final int prime = 31; int result = 1; long temp; temp = Double.doubleToLongBits(tileEastLon); result = (prime * result) + (int) (temp ^ (temp >>> 32)); temp = Double.doubleToLongBits(tileNorthLat); result = (prime * result) + (int) (temp ^ (temp >>> 32)); temp = Double.doubleToLongBits(tileSouthLat); result = (prime * result) + (int) (temp ^ (temp >>> 32)); temp = Double.doubleToLongBits(tileWestLon); result = (prime * result) + (int) (temp ^ (temp >>> 32)); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final TileInfo other = (TileInfo) obj; if (Double.doubleToLongBits(tileEastLon) != Double.doubleToLongBits(other.tileEastLon)) { return false; } if (Double.doubleToLongBits(tileNorthLat) != Double.doubleToLongBits(other.tileNorthLat)) { return false; } if (Double.doubleToLongBits(tileSouthLat) != Double.doubleToLongBits(other.tileSouthLat)) { return false; } if (Double.doubleToLongBits(tileWestLon) != Double.doubleToLongBits(other.tileWestLon)) { return false; } return true; } } private static final double WEIGHT_EPSILON = 2.22E-14; public static final int NUM_BANDS = 3; protected static final String[] NAME_PER_BAND = new String[] {"Weight", "Normalized", "Percentile"}; protected static final double[] MINS_PER_BAND = new double[] {0, 0, 0}; protected static final double[] MAXES_PER_BAND = new double[] {Double.MAX_VALUE, 1, 1}; private double max = -Double.MAX_VALUE; private long currentKey = 0; private long totalKeys; private int minLevels; private int maxLevels; private int numLevels; private int level; private int numYPosts; private int numXTiles; private int numYTiles; private String coverageName; protected List indexList; protected ValueRange[] valueRangePerDimension; protected String crsCode; protected double prevValue = -1; protected double prevPct = 0; @Override protected void reduce( final DoubleWritable key, final Iterable values, final Context context) throws IOException, InterruptedException { if (key.get() < 0) { final double prevMax = -key.get(); if (prevMax > max) { max = prevMax; } } else { final double value = key.get(); final double normalizedValue = value / max; // for consistency give all cells with matching weight the same // percentile // because we are using a DoubleWritable as the key, the ordering // isn't always completely reproducible as Double equals does not // take into account an epsilon, but we can make it reproducible by // doing a comparison with the previous value using an appropriate // epsilon final double percentile; if (FloatCompareUtils.checkDoublesEqual(prevValue, value, WEIGHT_EPSILON)) { percentile = prevPct; } else { percentile = (currentKey + 1.0) / totalKeys; prevPct = percentile; prevValue = value; } // calculate weights for this key for (final LongWritable v : values) { final long cellIndex = v.get() / numLevels; final TileInfo tileInfo = fromCellIndexToTileInfo(cellIndex); final WritableRaster raster = RasterUtils.createRasterTypeDouble(NUM_BANDS, KDEJobRunner.TILE_SIZE); raster.setSample(tileInfo.x, tileInfo.y, 0, key.get()); raster.setSample(tileInfo.x, tileInfo.y, 1, normalizedValue); raster.setSample(tileInfo.x, tileInfo.y, 2, percentile); context.write( new GeoWaveOutputKey(coverageName, indexList.toArray(new String[0])), RasterUtils.createCoverageTypeDouble( coverageName, tileInfo.tileWestLon, tileInfo.tileEastLon, tileInfo.tileSouthLat, tileInfo.tileNorthLat, MINS_PER_BAND, MAXES_PER_BAND, NAME_PER_BAND, raster, crsCode)); currentKey++; } } } @SuppressFBWarnings( value = "INT_BAD_REM_BY_1", justification = "The calculation is appropriate if we ever want to vary to tile size.") private TileInfo fromCellIndexToTileInfo(final long index) { final int xPost = (int) (index / numYPosts); final int yPost = (int) (index % numYPosts); final int xTile = xPost / KDEJobRunner.TILE_SIZE; final int yTile = yPost / KDEJobRunner.TILE_SIZE; final int x = (xPost % KDEJobRunner.TILE_SIZE); final int y = (yPost % KDEJobRunner.TILE_SIZE); final double xMin = valueRangePerDimension[0].getMin(); final double xMax = valueRangePerDimension[0].getMax(); final double yMin = valueRangePerDimension[1].getMin(); final double yMax = valueRangePerDimension[1].getMax(); final double crsWidth = xMax - xMin; final double crsHeight = yMax - yMin; final double tileWestLon = ((xTile * crsWidth) / numXTiles) + xMin; final double tileSouthLat = ((yTile * crsHeight) / numYTiles) + yMin; final double tileEastLon = tileWestLon + (crsWidth / numXTiles); final double tileNorthLat = tileSouthLat + (crsHeight / numYTiles); return new TileInfo( tileWestLon, tileEastLon, tileSouthLat, tileNorthLat, x, KDEJobRunner.TILE_SIZE - y - 1); // remember java rasters go // from 0 at the // top // to (height-1) at the bottom, so we have // to // inverse the y here which goes from bottom // to top } @Override protected void setup(final Context context) throws IOException, InterruptedException { super.setup(context); minLevels = context.getConfiguration().getInt(KDEJobRunner.MIN_LEVEL_KEY, 1); maxLevels = context.getConfiguration().getInt(KDEJobRunner.MAX_LEVEL_KEY, 25); coverageName = context.getConfiguration().get(KDEJobRunner.COVERAGE_NAME_KEY, ""); valueRangePerDimension = new ValueRange[] { new ValueRange( context.getConfiguration().getDouble(KDEJobRunner.X_MIN_KEY, -180), context.getConfiguration().getDouble(KDEJobRunner.X_MAX_KEY, 180)), new ValueRange( context.getConfiguration().getDouble(KDEJobRunner.Y_MIN_KEY, -90), context.getConfiguration().getDouble(KDEJobRunner.Y_MAX_KEY, 90))}; crsCode = context.getConfiguration().get(KDEJobRunner.OUTPUT_CRSCODE_KEY); numLevels = (maxLevels - minLevels) + 1; level = context.getConfiguration().getInt("mapred.task.partition", 0) + minLevels; numXTiles = (int) Math.pow(2, level + 1); numYTiles = (int) Math.pow(2, level); numYPosts = numYTiles * KDEJobRunner.TILE_SIZE; totalKeys = context.getConfiguration().getLong("Entries per level.level" + level, 10); final Index[] indices = JobContextIndexStore.getIndices(context); indexList = new ArrayList<>(); if ((indices != null) && (indices.length > 0)) { for (final Index index : indices) { indexList.add(index.getName()); } } else { indexList.add(new SpatialIndexBuilder().createIndex().getName()); } } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/LevelPartitioner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kde; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.mapreduce.Partitioner; public abstract class LevelPartitioner extends Partitioner { @Override public int getPartition(final K key, final LongWritable value, final int numReduceTasks) { return getPartition(value.get(), numReduceTasks); } protected int getPartition(final long positiveCellId, final int numReduceTasks) { return (int) (positiveCellId % numReduceTasks); } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/MapContextCellCounter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kde; import java.io.IOException; import org.apache.hadoop.io.DoubleWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.mapreduce.Mapper.Context; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class MapContextCellCounter implements CellCounter { private static final Logger LOGGER = LoggerFactory.getLogger(MapContextCellCounter.class); private final Context context; private final long minLevel; private final long maxLevel; private final long numLevels; private final long level; public MapContextCellCounter( final Context context, final long level, final long minLevel, final long maxLevel) { this.context = context; this.level = level; this.minLevel = minLevel; this.maxLevel = maxLevel; numLevels = (maxLevel - minLevel) + 1; } @Override public void increment(final long cellId, final double weight) { if (weight > 0) { try { context.write(new LongWritable(getCellId(cellId)), new DoubleWritable(weight)); } catch (IOException | InterruptedException e) { LOGGER.error("Unable to write", e); } } } protected long getCellId(final long cellId) { return (cellId * numLevels) + (level - minLevel); } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/compare/ComparisonAccumuloStatsReducer.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kde.compare; import java.awt.image.WritableRaster; import java.io.IOException; import java.util.Arrays; import javax.vecmath.Point2d; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.mapreduce.Reducer; import org.locationtech.geowave.adapter.raster.RasterUtils; import org.locationtech.geowave.analytic.mapreduce.kde.KDEJobRunner; import org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.mapreduce.JobContextIndexStore; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey; import org.opengis.coverage.grid.GridCoverage; public class ComparisonAccumuloStatsReducer extends Reducer { public static final int NUM_BANDS = 4; protected static final String[] NAME_PER_BAND = new String[] {"Summer", "Winter", "Combined", "Combined Percentile"}; protected static final double[] MINS_PER_BAND = new double[] {0, 0, -1, 0}; protected static final double[] MAXES_PER_BAND = new double[] {1, 1, 1, 1}; private static final int TILE_SIZE = 1; private long totalKeys = 0; private long currentKey; private int minLevels; private int maxLevels; private int numLevels; private int level; private int numXPosts; private int numYPosts; private String coverageName; protected String[] indexNames; @Override protected void reduce( final ComparisonCellData key, final Iterable values, final Context context) throws IOException, InterruptedException { // for consistency give all cells with matching weight the same // percentile final double percentile = (currentKey + 1.0) / totalKeys; // calculate weights for this key for (final LongWritable v : values) { final long cellIndex = v.get() / numLevels; final Point2d[] bbox = fromIndexToLL_UR(cellIndex); final WritableRaster raster = RasterUtils.createRasterTypeDouble(NUM_BANDS, TILE_SIZE); raster.setSample(0, 0, 0, key.getSummerPercentile()); raster.setSample(0, 0, 1, key.getWinterPercentile()); raster.setSample(0, 0, 2, key.getCombinedPercentile()); raster.setSample(0, 0, 3, percentile); context.write( new GeoWaveOutputKey(coverageName, indexNames), RasterUtils.createCoverageTypeDouble( coverageName, bbox[0].x, bbox[1].x, bbox[0].y, bbox[1].y, MINS_PER_BAND, MAXES_PER_BAND, NAME_PER_BAND, raster)); currentKey++; } } private Point2d[] fromIndexToLL_UR(final long index) { final double llLon = ((Math.floor(index / (double) numYPosts) * 360.0) / numXPosts) - 180.0; final double llLat = (((index % numYPosts) * 180.0) / numYPosts) - 90.0; final double urLon = llLon + (360.0 / numXPosts); final double urLat = llLat + (180.0 / numYPosts); return new Point2d[] {new Point2d(llLon, llLat), new Point2d(urLon, urLat)}; } @Override protected void setup(final Context context) throws IOException, InterruptedException { super.setup(context); minLevels = context.getConfiguration().getInt(KDEJobRunner.MIN_LEVEL_KEY, 1); maxLevels = context.getConfiguration().getInt(KDEJobRunner.MAX_LEVEL_KEY, 25); coverageName = context.getConfiguration().get(KDEJobRunner.COVERAGE_NAME_KEY, ""); numLevels = (maxLevels - minLevels) + 1; level = context.getConfiguration().getInt("mapred.task.partition", 0) + minLevels; numXPosts = (int) Math.pow(2, level + 1); numYPosts = (int) Math.pow(2, level); totalKeys = context.getConfiguration().getLong("Entries per level.level" + level, 10); final Index[] indices = JobContextIndexStore.getIndices(context); if ((indices != null) && (indices.length > 0)) { indexNames = Arrays.stream(indices).map(i -> i.getName()).toArray(i -> new String[i]); } else { indexNames = new String[] {new SpatialIndexBuilder().createIndex().getName()}; } } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/compare/ComparisonCellData.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kde.compare; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.hadoop.io.WritableComparable; public class ComparisonCellData implements WritableComparable { private double summerPercentile; private double winterPercentile; public ComparisonCellData() {} public ComparisonCellData(final double summerPercentile, final double winterPercentile) { this.summerPercentile = summerPercentile; this.winterPercentile = winterPercentile; } @Override public void readFields(final DataInput input) throws IOException { summerPercentile = input.readDouble(); winterPercentile = input.readDouble(); } @Override public void write(final DataOutput output) throws IOException { output.writeDouble(summerPercentile); output.writeDouble(winterPercentile); } public double getSummerPercentile() { return summerPercentile; } public double getWinterPercentile() { return winterPercentile; } public double getCombinedPercentile() { return applyCombinationFunction(summerPercentile, winterPercentile); } @Override public int compareTo(final ComparisonCellData other) { final double combined = getCombinedPercentile(); return Double.compare(combined, other.getCombinedPercentile()); } @Override public boolean equals(final Object val) { if (!(val instanceof ComparisonCellData)) { return false; } if (val == this) { return true; } return compareTo((ComparisonCellData) val) == 0; } @Override public int hashCode() { return new HashCodeBuilder(2003, 6373).append(summerPercentile).append( winterPercentile).toHashCode(); } private static double applyCombinationFunction( final double summerPercentile, final double winterPercentile) { return summerPercentile - winterPercentile; } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/compare/ComparisonCellDataReducer.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kde.compare; import java.io.IOException; import org.apache.hadoop.io.DoubleWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.mapreduce.Reducer; import org.locationtech.geowave.analytic.mapreduce.kde.KDEJobRunner; public class ComparisonCellDataReducer extends Reducer { private long totalKeys = 0; private long currentKey = 0; private int level; @Override protected void reduce( final DoubleWritable key, final Iterable values, final Context context) throws IOException, InterruptedException { // for consistency give all cells with matching weight the same // percentile final double percentile = (currentKey + 1.0) / totalKeys; // calculate weights for this key for (final LongWritable v : values) { context.write(v, new DoubleWritable(percentile)); currentKey++; } } @Override protected void setup(final Context context) throws IOException, InterruptedException { super.setup(context); final int minLevel = context.getConfiguration().getInt(KDEJobRunner.MIN_LEVEL_KEY, 1); final int maxLevel = context.getConfiguration().getInt(KDEJobRunner.MAX_LEVEL_KEY, 25); level = context.getConfiguration().getInt("mapred.task.partition", 0) + minLevel; boolean isWinter = false; if (level > maxLevel) { level -= ((maxLevel - minLevel) + 1); isWinter = true; } totalKeys = context.getConfiguration().getLong( "Entries per level (" + (isWinter ? "winter" : "summer") + ", " + level + ")", 10); } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/compare/ComparisonCellLevelPartitioner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kde.compare; import org.locationtech.geowave.analytic.mapreduce.kde.LevelPartitioner; public class ComparisonCellLevelPartitioner extends LevelPartitioner { } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/compare/ComparisonCellSummationReducer.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kde.compare; import org.apache.hadoop.io.LongWritable; import org.locationtech.geowave.analytic.mapreduce.kde.CellSummationReducer; public class ComparisonCellSummationReducer extends CellSummationReducer { @Override protected void collectStats( final LongWritable key, final double sum, final org.apache.hadoop.mapreduce.Reducer.Context context) { long positiveKey = key.get(); boolean isWinter = false; if (positiveKey < 0) { positiveKey = -positiveKey - 1; isWinter = true; } final long level = (positiveKey % numLevels) + minLevel; context.getCounter( "Entries per level (" + (isWinter ? "winter" : "summer") + ")", "level " + Long.toString(level)).increment(1); } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/compare/ComparisonCombinedLevelPartitioner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kde.compare; import org.apache.hadoop.io.DoubleWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.mapreduce.Partitioner; public class ComparisonCombinedLevelPartitioner extends Partitioner { @Override public int getPartition( final DoubleWritable key, final LongWritable value, final int numReduceTasks) { return getPartition(value.get(), numReduceTasks); } protected int getPartition(final long positiveCellId, final int numReduceTasks) { return (int) (positiveCellId % numReduceTasks); } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/compare/ComparisonCombiningStatsMapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kde.compare; import java.io.IOException; import org.apache.hadoop.io.DoubleWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.mapreduce.Mapper; public class ComparisonCombiningStatsMapper extends Mapper { @Override protected void map( final LongWritable key, final DoubleWritable value, final org.apache.hadoop.mapreduce.Mapper.Context context) throws IOException, InterruptedException { long positiveKey = key.get(); double adjustedValue = value.get(); if (positiveKey < 0) { positiveKey = -positiveKey - 1; adjustedValue *= -1; } super.map(new LongWritable(positiveKey), new DoubleWritable(adjustedValue), context); } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/compare/ComparisonCombiningStatsReducer.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kde.compare; import java.io.IOException; import org.apache.hadoop.io.DoubleWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.mapreduce.Reducer; import org.locationtech.geowave.analytic.mapreduce.kde.KDEJobRunner; public class ComparisonCombiningStatsReducer extends Reducer { protected int minLevel; protected int maxLevel; protected int numLevels; @Override protected void setup(final Context context) throws IOException, InterruptedException { minLevel = context.getConfiguration().getInt(KDEJobRunner.MIN_LEVEL_KEY, 1); maxLevel = context.getConfiguration().getInt(KDEJobRunner.MAX_LEVEL_KEY, 25); numLevels = (maxLevel - minLevel) + 1; super.setup(context); } @Override public void reduce( final LongWritable key, final Iterable values, final Context context) throws IOException, InterruptedException { double summer = 0; double winter = 0; for (final DoubleWritable v : values) { if (v.get() < 0) { winter = -v.get(); } else { summer = v.get(); } } context.write(new ComparisonCellData(summer, winter), key); collectStats(key.get(), context); } protected void collectStats(final long key, final Context context) { final long level = (key % numLevels) + minLevel; context.getCounter("Entries per level", "level " + Long.toString(level)).increment(1); } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/compare/ComparisonCommandLineOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kde.compare; import com.beust.jcommander.Parameter; public class ComparisonCommandLineOptions { @Parameter(names = "--timeAttribute", description = "The name of the time attribute") private String timeAttribute; public ComparisonCommandLineOptions() {} public ComparisonCommandLineOptions(final String timeAttribute) { this.timeAttribute = timeAttribute; } public String getTimeAttribute() { return timeAttribute; } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/compare/ComparisonDoubleLevelPartitioner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kde.compare; import org.apache.hadoop.io.DoubleWritable; public class ComparisonDoubleLevelPartitioner extends ComparisonLevelPartitioner { } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/compare/ComparisonGaussianCellMapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kde.compare; import java.io.IOException; import java.util.Calendar; import java.util.Date; import java.util.HashMap; import java.util.Map; import org.locationtech.geowave.analytic.mapreduce.kde.GaussianCellMapper; import org.locationtech.geowave.analytic.mapreduce.kde.GaussianFilter; import org.locationtech.geowave.analytic.mapreduce.kde.GaussianFilter.ValueRange; import org.locationtech.jts.geom.Point; import org.opengis.feature.simple.SimpleFeature; public class ComparisonGaussianCellMapper extends GaussianCellMapper { protected static final String TIME_ATTRIBUTE_KEY = "TIME_ATTRIBUTE"; private String timeAttribute; private final Map winterLevelStoreMap = new HashMap<>(); @Override protected void setup(final Context context) throws IOException, InterruptedException { super.setup(context); timeAttribute = context.getConfiguration().get(TIME_ATTRIBUTE_KEY); } @Override protected void populateLevelStore( final org.apache.hadoop.mapreduce.Mapper.Context context, final int numXPosts, final int numYPosts, final int level) { super.populateLevelStore(context, numXPosts, numYPosts, level); winterLevelStoreMap.put( level, new LevelStore( numXPosts, numYPosts, new NegativeCellIdCounter(context, level, minLevel, maxLevel))); } @Override protected void incrementLevelStore( final int level, final Point pt, final SimpleFeature feature, final ValueRange[] valueRangePerDimension) { final Object obj = feature.getAttribute(timeAttribute); if ((obj != null) && (obj instanceof Date)) { double contribution = 0; LevelStore levelStore = null; final Calendar cal = Calendar.getInstance(); cal.setTime((Date) obj); // the seasonal variance algorithm we'll use will apply a gaussian // function to winter months (October - March), incrementing the // winter counter // and apply a gaussian function to April and September incrementing // the summer counter // the other months increment the summer counter final int featureMonth = cal.get(Calendar.MONTH); if (featureMonth < 3) { final Calendar baseDate = Calendar.getInstance(); baseDate.set(cal.get(Calendar.YEAR), 0, 0, 0, 0, 0); final double deltaTime = cal.getTime().getTime() - baseDate.getTime().getTime(); // now normalize so the value is between 0 and 3 (somewhat // arbitrary but e^-(x*x) asymptotically approaches 0 near 3 and // -3) final Calendar maxDate = Calendar.getInstance(); maxDate.set(cal.get(Calendar.YEAR), 3, 0, 0, 0, 0); final double normalizedTime = (deltaTime * 3) / (maxDate.getTimeInMillis() - baseDate.getTimeInMillis()); contribution = Math.pow(Math.E, -(normalizedTime * normalizedTime)); levelStore = winterLevelStoreMap.get(level); } else if (featureMonth > 8) { final Calendar baseDate = Calendar.getInstance(); baseDate.set(cal.get(Calendar.YEAR) + 1, 0, 0, 0, 0, 0); final double deltaTime = baseDate.getTime().getTime() - cal.getTime().getTime(); // now normalize so the value is between 0 and 3 (somewhat // arbitrary but e^-(x*x) asymptotically approaches 0 near 3 and // -3) final Calendar minDate = Calendar.getInstance(); minDate.set(cal.get(Calendar.YEAR), 9, 0, 0, 0, 0); final double normalizedTime = (deltaTime * 3) / (baseDate.getTimeInMillis() - minDate.getTimeInMillis()); contribution = Math.pow(Math.E, -(normalizedTime * normalizedTime)); levelStore = winterLevelStoreMap.get(level); } else if ((featureMonth == 3) || (featureMonth == 8)) { final Calendar maxDate = Calendar.getInstance(); maxDate.set(cal.get(Calendar.YEAR), featureMonth + 1, 0, 0, 0, 0); final double deltaTime; final Calendar minDate = Calendar.getInstance(); minDate.set(cal.get(Calendar.YEAR), featureMonth, 0, 0, 0, 0); if (featureMonth == 3) { deltaTime = maxDate.getTime().getTime() - cal.getTime().getTime(); } else { deltaTime = cal.getTime().getTime() - minDate.getTime().getTime(); } final double normalizedTime = (deltaTime * 3) / (maxDate.getTimeInMillis() - minDate.getTimeInMillis()); contribution = Math.pow(Math.E, -(normalizedTime * normalizedTime)); levelStore = levelStoreMap.get(level); } else { contribution = 1; levelStore = levelStoreMap.get(level); } GaussianFilter.incrementPt( pt.getY(), pt.getX(), levelStore.counter, levelStore.numXPosts, levelStore.numYPosts, contribution, valueRangePerDimension); } } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/compare/ComparisonIdentityMapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kde.compare; import java.io.IOException; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.mapreduce.Mapper; public class ComparisonIdentityMapper extends Mapper { @Override protected void map( final ComparisonCellData key, final LongWritable value, final org.apache.hadoop.mapreduce.Mapper.Context context) throws IOException, InterruptedException { context.write(key, value); } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/compare/ComparisonLevelPartitioner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kde.compare; import org.apache.hadoop.io.LongWritable; import org.locationtech.geowave.analytic.mapreduce.kde.LevelPartitioner; public abstract class ComparisonLevelPartitioner extends LevelPartitioner { @Override public int getPartition(final T key, final LongWritable value, final int numReduceTasks) { final int reduceTasksPerSeason = numReduceTasks / 2; if (value.get() < 0) { // let the winter (cell ID < 0) get the second half of partitions return getPartition(-value.get() - 1, reduceTasksPerSeason) + reduceTasksPerSeason; } else { // let the summer (cell ID >= 0) get the first set of partitions return getPartition(value.get(), reduceTasksPerSeason); } } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/compare/ComparisonStatsJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kde.compare; import java.io.File; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.DoubleWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat; import org.apache.hadoop.util.ToolRunner; import org.locationtech.geowave.adapter.raster.RasterUtils; import org.locationtech.geowave.analytic.mapreduce.kde.KDECommandLineOptions; import org.locationtech.geowave.analytic.mapreduce.kde.KDEJobRunner; import org.locationtech.geowave.analytic.mapreduce.operations.KdeCommand; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.cli.parser.CommandLineOperationParams; import org.locationtech.geowave.core.cli.parser.OperationParser; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputFormat; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey; import org.opengis.feature.simple.SimpleFeature; public class ComparisonStatsJobRunner extends KDEJobRunner { private final String timeAttribute; public ComparisonStatsJobRunner( final ComparisonCommandLineOptions inputOptions, final KDECommandLineOptions kdeCommandLineOptions, final DataStorePluginOptions inputDataStoreOptions, final DataStorePluginOptions outputDataStoreOptions, final File configFile, final Index outputIndex) { super( kdeCommandLineOptions, inputDataStoreOptions, outputDataStoreOptions, configFile, outputIndex); timeAttribute = inputOptions.getTimeAttribute(); } public static void main(final String[] args) throws Exception { final ConfigOptions opts = new ConfigOptions(); final ComparisonCommandLineOptions comparisonOptions = new ComparisonCommandLineOptions(); final OperationParser parser = new OperationParser(); parser.addAdditionalObject(opts); parser.addAdditionalObject(comparisonOptions); final KdeCommand kdeCommand = new KdeCommand(); final CommandLineOperationParams params = parser.parse(kdeCommand, args); // Load the params for config file. opts.prepare(params); final File configFile = (File) params.getContext().get(ConfigOptions.PROPERTIES_FILE_CONTEXT); // Don't care about output, but this will set the datastore options. kdeCommand.createRunner(params); final ComparisonStatsJobRunner runner = new ComparisonStatsJobRunner( comparisonOptions, kdeCommand.getKdeOptions(), kdeCommand.getInputStoreOptions(), kdeCommand.getOutputStoreOptions(), configFile, null); final int res = ToolRunner.run(new Configuration(), runner, args); System.exit(res); } @Override protected void preJob1Setup(final Configuration conf) { super.preJob1Setup(conf); conf.set(ComparisonGaussianCellMapper.TIME_ATTRIBUTE_KEY, timeAttribute); } @Override protected boolean postJob2Actions( final Configuration conf, final String statsNamespace, final String coverageName) throws Exception { try (final FileSystem fs = FileSystem.get(conf)) { fs.delete( new Path( "/tmp/" + inputDataStoreOptions.getGeoWaveNamespace() + "_stats_" + kdeCommandLineOptions.getMinLevel() + "_" + kdeCommandLineOptions.getMaxLevel() + "_" + kdeCommandLineOptions.getCoverageName() + "/basic"), true); final Job combiner = new Job(conf); combiner.setJarByClass(this.getClass()); combiner.setJobName( inputDataStoreOptions.getGeoWaveNamespace() + "(" + kdeCommandLineOptions.getCoverageName() + ")" + " levels " + kdeCommandLineOptions.getMinLevel() + "-" + kdeCommandLineOptions.getMaxLevel() + " combining seasons"); combiner.setMapperClass(ComparisonCombiningStatsMapper.class); combiner.setReducerClass(ComparisonCombiningStatsReducer.class); combiner.setMapOutputKeyClass(LongWritable.class); combiner.setMapOutputValueClass(DoubleWritable.class); combiner.setOutputKeyClass(ComparisonCellData.class); combiner.setOutputValueClass(LongWritable.class); combiner.setInputFormatClass(SequenceFileInputFormat.class); combiner.setOutputFormatClass(SequenceFileOutputFormat.class); FileOutputFormat.setOutputPath( combiner, new Path( "/tmp/" + inputDataStoreOptions.getGeoWaveNamespace() + "_stats_" + kdeCommandLineOptions.getMinLevel() + "_" + kdeCommandLineOptions.getMaxLevel() + "_" + kdeCommandLineOptions.getCoverageName() + "/combined_pct")); FileInputFormat.setInputPaths( combiner, new Path( "/tmp/" + inputDataStoreOptions.getGeoWaveNamespace() + "_stats_" + kdeCommandLineOptions.getMinLevel() + "_" + kdeCommandLineOptions.getMaxLevel() + "_" + kdeCommandLineOptions.getCoverageName() + "/percentiles")); if (combiner.waitForCompletion(true)) { fs.delete( new Path( "/tmp/" + inputDataStoreOptions.getGeoWaveNamespace() + "_stats_" + kdeCommandLineOptions.getMinLevel() + "_" + kdeCommandLineOptions.getMaxLevel() + "_" + kdeCommandLineOptions.getCoverageName() + "/percentiles"), true); for (int l = kdeCommandLineOptions.getMinLevel(); l <= kdeCommandLineOptions.getMaxLevel(); l++) { conf.setLong( "Entries per level.level" + l, combiner.getCounters().getGroup("Entries per level").findCounter( "level " + Long.valueOf(l)).getValue()); } // Stats Reducer Job configuration parameters final Job ingester = new Job(conf); ingester.setJarByClass(this.getClass()); ingester.setJobName( inputDataStoreOptions.getGeoWaveNamespace() + "(" + kdeCommandLineOptions.getCoverageName() + ")" + " levels " + kdeCommandLineOptions.getMinLevel() + "-" + kdeCommandLineOptions + " Ingest"); ingester.setMapperClass(ComparisonIdentityMapper.class); ingester.setPartitionerClass(ComparisonCellLevelPartitioner.class); ingester.setReducerClass(ComparisonAccumuloStatsReducer.class); ingester.setNumReduceTasks( (kdeCommandLineOptions.getMaxLevel() - kdeCommandLineOptions.getMinLevel()) + 1); ingester.setMapOutputKeyClass(ComparisonCellData.class); ingester.setMapOutputValueClass(LongWritable.class); ingester.setOutputKeyClass(GeoWaveOutputKey.class); ingester.setOutputValueClass(SimpleFeature.class); ingester.setInputFormatClass(SequenceFileInputFormat.class); ingester.setOutputFormatClass(GeoWaveOutputFormat.class); FileInputFormat.setInputPaths( ingester, new Path( "/tmp/" + inputDataStoreOptions.getGeoWaveNamespace() + "_stats_" + kdeCommandLineOptions.getMinLevel() + "_" + kdeCommandLineOptions.getMaxLevel() + "_" + kdeCommandLineOptions.getCoverageName() + "/combined_pct")); GeoWaveOutputFormat.setStoreOptions(conf, outputDataStoreOptions); setup( ingester, statsNamespace, RasterUtils.createDataAdapterTypeDouble( coverageName, ComparisonAccumuloStatsReducer.NUM_BANDS, 1, ComparisonAccumuloStatsReducer.MINS_PER_BAND, ComparisonAccumuloStatsReducer.MAXES_PER_BAND, ComparisonAccumuloStatsReducer.NAME_PER_BAND, null), SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions())); return ingester.waitForCompletion(true); } return false; } } @Override protected Class getJob2OutputFormatClass() { return SequenceFileOutputFormat.class; } @Override protected Class getJob2OutputKeyClass() { return LongWritable.class; } @Override protected Class getJob2OutputValueClass() { return DoubleWritable.class; } @Override protected Class getJob2Reducer() { return ComparisonCellDataReducer.class; } @Override protected int getJob2NumReducers(final int numLevels) { return super.getJob2NumReducers(numLevels) * 2; } @Override protected Class getJob1Mapper() { return ComparisonGaussianCellMapper.class; } @Override protected Class getJob1Reducer() { return ComparisonCellSummationReducer.class; } @Override protected Class getJob2Partitioner() { return ComparisonDoubleLevelPartitioner.class; } @Override protected String getJob2Name() { return inputDataStoreOptions.getGeoWaveNamespace() + "(" + kdeCommandLineOptions.getCoverageName() + ")" + " levels " + kdeCommandLineOptions.getMinLevel() + "-" + kdeCommandLineOptions.getMaxLevel() + " Percentile Calculation by season"; } @Override protected String getJob1Name() { return super.getJob1Name() + " initial calculation by season"; } @Override protected void setupEntriesPerLevel(final Job job1, final Configuration conf) throws IOException { for (int l = kdeCommandLineOptions.getMinLevel(); l <= kdeCommandLineOptions.getMaxLevel(); l++) { conf.setLong( "Entries per level (winter, " + l + ")", job1.getCounters().getGroup("Entries per level (winter)").findCounter( "level " + Long.valueOf(l)).getValue()); conf.setLong( "Entries per level (summer, " + l + ")", job1.getCounters().getGroup("Entries per level (summer)").findCounter( "level " + Long.valueOf(l)).getValue()); } } @Override protected void setupJob2Output( final Configuration conf, final Job statsReducer, final String statsNamespace, final String coverageName, final Index index) throws Exception { FileOutputFormat.setOutputPath( statsReducer, new Path( "/tmp/" + inputDataStoreOptions.getGeoWaveNamespace() + "_stats_" + kdeCommandLineOptions.getMinLevel() + "_" + kdeCommandLineOptions.getMaxLevel() + "_" + kdeCommandLineOptions.getCoverageName() + "/percentiles")); } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kde/compare/NegativeCellIdCounter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kde.compare; import org.apache.hadoop.mapreduce.Mapper.Context; import org.locationtech.geowave.analytic.mapreduce.kde.MapContextCellCounter; public class NegativeCellIdCounter extends MapContextCellCounter { public NegativeCellIdCounter( final Context context, final long level, final long minLevel, final long maxLevel) { super(context, level, minLevel, maxLevel); } @Override protected long getCellId(final long cellId) { return -super.getCellId(cellId) - 1; } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kmeans/KMeansDistortionMapReduce.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kmeans; import java.io.IOException; import java.util.List; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.locationtech.geowave.analytic.AnalyticItemWrapper; import org.locationtech.geowave.analytic.AnalyticItemWrapperFactory; import org.locationtech.geowave.analytic.ScopedJobConfiguration; import org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory; import org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave; import org.locationtech.geowave.analytic.clustering.CentroidPairing; import org.locationtech.geowave.analytic.clustering.DistortionGroupManagement; import org.locationtech.geowave.analytic.clustering.DistortionGroupManagement.DistortionDataAdapter; import org.locationtech.geowave.analytic.clustering.DistortionGroupManagement.DistortionEntry; import org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment; import org.locationtech.geowave.analytic.extract.CentroidExtractor; import org.locationtech.geowave.analytic.extract.SimpleFeatureCentroidExtractor; import org.locationtech.geowave.analytic.kmeans.AssociationNotification; import org.locationtech.geowave.analytic.mapreduce.CountofDoubleWritable; import org.locationtech.geowave.analytic.param.CentroidParameters; import org.locationtech.geowave.analytic.param.GlobalParameters; import org.locationtech.geowave.analytic.param.JumpParameters; import org.locationtech.geowave.mapreduce.GeoWaveWritableInputMapper; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey; import org.locationtech.jts.geom.Point; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Calculate the distortation. * *

See Catherine A. Sugar and Gareth M. James (2003). "Finding the number of clusters in a data * set: An information theoretic approach" Journal of the American Statistical Association 98 * (January): 750–763 * * Context configuration parameters include: *

"KMeansDistortionMapReduce.Common.DistanceFunctionClass" -> {@link * org.locationtech.geowave.analytic.distance.DistanceFn} used to determine distance to centroid *

"KMeansDistortionMapReduce.Centroid.WrapperFactoryClass" -> {@link * AnalyticItemWrapperFactory} to extract wrap spatial objects with Centroid management * functions *

"KMeansDistortionMapReduce.Centroid.ExtractorClass" -> {@link * org.locationtech.geowave.analytic.extract.CentroidExtractor} *

"KMeansDistortionMapReduce.Jump.CountOfCentroids" -> May be different from actual. * * @see CentroidManagerGeoWave */ public class KMeansDistortionMapReduce { protected static final Logger LOGGER = LoggerFactory.getLogger(KMeansDistortionMapReduce.class); public static class KMeansDistortionMapper extends GeoWaveWritableInputMapper { private NestedGroupCentroidAssignment nestedGroupCentroidAssigner; private final Text outputKeyWritable = new Text("1"); private final CountofDoubleWritable outputValWritable = new CountofDoubleWritable(); private CentroidExtractor centroidExtractor; private AnalyticItemWrapperFactory itemWrapperFactory; AssociationNotification centroidAssociationFn = new AssociationNotification() { @Override public void notify(final CentroidPairing pairing) { outputKeyWritable.set(pairing.getCentroid().getGroupID()); final double extraFromItem[] = pairing.getPairedItem().getDimensionValues(); final double extraCentroid[] = pairing.getCentroid().getDimensionValues(); final Point p = centroidExtractor.getCentroid(pairing.getPairedItem().getWrappedItem()); final Point centroid = centroidExtractor.getCentroid(pairing.getCentroid().getWrappedItem()); // calculate error for dp // using identity matrix for the common covariance, therefore // E[(p - c)^-1 * cov * (p - c)] => (px - cx)^2 + (py - cy)^2 double expectation = 0.0; for (int i = 0; i < extraCentroid.length; i++) { expectation += Math.pow(extraFromItem[i] - extraCentroid[i], 2); } expectation += (Math.pow(p.getCoordinate().x - centroid.getCoordinate().x, 2) + Math.pow(p.getCoordinate().y - centroid.getCoordinate().y, 2)); // + Math.pow( // p.getCoordinate().z - centroid.getCoordinate().z, // 2)); outputValWritable.set(expectation, 1); } }; @Override protected void mapNativeValue( final GeoWaveInputKey key, final Object value, final org.apache.hadoop.mapreduce.Mapper.Context context) throws IOException, InterruptedException { nestedGroupCentroidAssigner.findCentroidForLevel( itemWrapperFactory.create(value), centroidAssociationFn); context.write(outputKeyWritable, outputValWritable); } @SuppressWarnings("unchecked") @Override protected void setup( final Mapper.Context context) throws IOException, InterruptedException { super.setup(context); final ScopedJobConfiguration config = new ScopedJobConfiguration( context.getConfiguration(), KMeansDistortionMapReduce.class, KMeansDistortionMapReduce.LOGGER); try { nestedGroupCentroidAssigner = new NestedGroupCentroidAssignment<>( context, KMeansDistortionMapReduce.class, KMeansDistortionMapReduce.LOGGER); } catch (final Exception e1) { throw new IOException(e1); } try { centroidExtractor = config.getInstance( CentroidParameters.Centroid.EXTRACTOR_CLASS, CentroidExtractor.class, SimpleFeatureCentroidExtractor.class); } catch (final Exception e1) { throw new IOException(e1); } try { itemWrapperFactory = config.getInstance( CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS, AnalyticItemWrapperFactory.class, SimpleFeatureItemWrapperFactory.class); } catch (final Exception e1) { throw new IOException(e1); } } } public static class KMeansDistorationCombiner extends Reducer { final CountofDoubleWritable outputValue = new CountofDoubleWritable(); @Override public void reduce( final Text key, final Iterable values, final Reducer.Context context) throws IOException, InterruptedException { double expectation = 0; double ptCount = 0; for (final CountofDoubleWritable value : values) { expectation += value.getValue(); ptCount += value.getCount(); } outputValue.set(expectation, ptCount); context.write(key, outputValue); } } public static class KMeansDistortionReduce extends Reducer { private Integer expectedK = null; protected final Text output = new Text(""); private CentroidManagerGeoWave centroidManager; private String batchId; @Override public void reduce( final Text key, final Iterable values, final Reducer.Context context) throws IOException, InterruptedException { double expectation = 0.0; final List> centroids = centroidManager.getCentroidsForGroup(key.toString()); // it is possible that the number of items in a group are smaller // than the cluster final Integer kCount; if (expectedK == null) { kCount = centroids.size(); } else { kCount = expectedK; } if (centroids.size() == 0) { return; } final double numDimesions = 2 + centroids.get(0).getExtraDimensions().length; double ptCount = 0; for (final CountofDoubleWritable value : values) { expectation += value.getValue(); ptCount += value.getCount(); } if (ptCount > 0) { expectation /= ptCount; final Double distortion = Math.pow(expectation / numDimesions, -(numDimesions / 2)); final DistortionEntry entry = new DistortionEntry(key.toString(), batchId, kCount, distortion); context.write( new GeoWaveOutputKey( DistortionDataAdapter.ADAPTER_TYPE_NAME, DistortionGroupManagement.DISTORTIONS_INDEX_ARRAY), entry); } } @Override protected void setup( final Reducer.Context context) throws IOException, InterruptedException { super.setup(context); final ScopedJobConfiguration config = new ScopedJobConfiguration( context.getConfiguration(), KMeansDistortionMapReduce.class, KMeansDistortionMapReduce.LOGGER); final int k = config.getInt(JumpParameters.Jump.COUNT_OF_CENTROIDS, -1); if (k > 0) { expectedK = k; } try { centroidManager = new CentroidManagerGeoWave<>( context, KMeansDistortionMapReduce.class, KMeansDistortionMapReduce.LOGGER); } catch (final Exception e) { KMeansDistortionMapReduce.LOGGER.warn("Unable to initialize centroid manager", e); throw new IOException("Unable to initialize centroid manager", e); } batchId = config.getString(GlobalParameters.Global.PARENT_BATCH_ID, centroidManager.getBatchId()); } } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kmeans/KMeansMapReduce.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kmeans; import java.io.IOException; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.locationtech.geowave.analytic.AnalyticItemWrapper; import org.locationtech.geowave.analytic.AnalyticItemWrapperFactory; import org.locationtech.geowave.analytic.GeoObjectDimensionValues; import org.locationtech.geowave.analytic.ScopedJobConfiguration; import org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory; import org.locationtech.geowave.analytic.clustering.CentroidManager; import org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave; import org.locationtech.geowave.analytic.clustering.CentroidPairing; import org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment; import org.locationtech.geowave.analytic.clustering.exception.MatchingCentroidNotFoundException; import org.locationtech.geowave.analytic.extract.CentroidExtractor; import org.locationtech.geowave.analytic.extract.SimpleFeatureCentroidExtractor; import org.locationtech.geowave.analytic.kmeans.AssociationNotification; import org.locationtech.geowave.analytic.mapreduce.GroupIDText; import org.locationtech.geowave.analytic.param.CentroidParameters; import org.locationtech.geowave.mapreduce.GeoWaveWritableInputMapper; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Point; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * K-Means mapper and reducer. Mapper determines the closest centroid for an item in the item's * assigned group. A group contains one or more centroids. The dimensions for the item are sent to * the reducer along with the closest centroid ID. * *

Reducer Outputs a new copy of a centroid with the geometry and other dimensions updated * towards their respective mean for the assigned items. * *

Properties: * * "KMeansMapReduce.Common.DistanceFunctionClass" - Used to determine distance to * centroid *

"KMeansMapReduce.Centroid.ExtractorClass" - Used to extract a centroid point from an item * geometry *

"KMeansMapReduce.Centroid.WrapperFactoryClass" - {@link AnalyticItemWrapperFactory} to * extract wrap spatial objects with Centroid management function *

"KMeansMapReduce.Centroid.ZoomLevel" -> The current zoom level @See CentroidManagerGeoWave * */ public class KMeansMapReduce { protected static final Logger LOGGER = LoggerFactory.getLogger(KMeansMapReduce.class); public static class KMeansMapper extends GeoWaveWritableInputMapper { private NestedGroupCentroidAssignment nestedGroupCentroidAssigner; private final GroupIDText outputKeyWritable = new GroupIDText(); private final BytesWritable outputValWritable = new BytesWritable(); private final GeoObjectDimensionValues association = new GeoObjectDimensionValues(); protected CentroidExtractor centroidExtractor; protected AnalyticItemWrapperFactory itemWrapperFactory; AssociationNotification centroidAssociationFn = new AssociationNotification() { @Override public void notify(final CentroidPairing pairing) { outputKeyWritable.set(pairing.getCentroid().getGroupID(), pairing.getCentroid().getID()); final double extra[] = pairing.getPairedItem().getDimensionValues(); final Point p = centroidExtractor.getCentroid(pairing.getPairedItem().getWrappedItem()); association.set( p.getCoordinate().x, p.getCoordinate().y, p.getCoordinate().z, extra, pairing.getDistance()); } }; @Override protected void mapNativeValue( final GeoWaveInputKey key, final Object value, final org.apache.hadoop.mapreduce.Mapper.Context context) throws IOException, InterruptedException { final AnalyticItemWrapper item = itemWrapperFactory.create(value); nestedGroupCentroidAssigner.findCentroidForLevel(item, centroidAssociationFn); final byte[] outData = association.toBinary(); outputValWritable.set(outData, 0, outData.length); context.write(outputKeyWritable, outputValWritable); } @Override protected void setup( final Mapper.Context context) throws IOException, InterruptedException { super.setup(context); final ScopedJobConfiguration config = new ScopedJobConfiguration( context.getConfiguration(), KMeansMapReduce.class, KMeansMapReduce.LOGGER); try { nestedGroupCentroidAssigner = new NestedGroupCentroidAssignment<>( context, KMeansMapReduce.class, KMeansMapReduce.LOGGER); } catch (final Exception e1) { throw new IOException(e1); } try { centroidExtractor = config.getInstance( CentroidParameters.Centroid.EXTRACTOR_CLASS, CentroidExtractor.class, SimpleFeatureCentroidExtractor.class); } catch (final Exception e1) { throw new IOException(e1); } try { itemWrapperFactory = config.getInstance( CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS, AnalyticItemWrapperFactory.class, SimpleFeatureItemWrapperFactory.class); itemWrapperFactory.initialize(context, KMeansMapReduce.class, KMeansMapReduce.LOGGER); } catch (final Exception e1) { throw new IOException(e1); } } } /** Optimization */ public static class KMeansCombiner extends Reducer { private final GeoObjectDimensionValues geoObject = new GeoObjectDimensionValues(); private final BytesWritable outputValWritable = new BytesWritable(); @Override public void reduce( final GroupIDText key, final Iterable values, final Reducer.Context context) throws IOException, InterruptedException { final GeoObjectDimensionValues totals = new GeoObjectDimensionValues(); for (final BytesWritable value : values) { geoObject.fromBinary(value.getBytes()); totals.add(geoObject); } final byte[] outData = totals.toBinary(); outputValWritable.set(outData, 0, outData.length); context.write(key, outputValWritable); } } public static class KMeansReduce extends Reducer { protected CentroidManager centroidManager; private final GeoObjectDimensionValues geoObject = new GeoObjectDimensionValues(); private String[] indexNames; @Override public void reduce( final GroupIDText key, final Iterable values, final Reducer.Context context) throws IOException, InterruptedException { final String centroidID = key.getID(); final String groupID = key.getGroupID(); final GeoObjectDimensionValues totals = new GeoObjectDimensionValues(); for (final BytesWritable value : values) { geoObject.fromBinary(value.getBytes()); totals.add(geoObject); } AnalyticItemWrapper centroid; try { centroid = getFeatureForCentroid(centroidID, groupID); } catch (final MatchingCentroidNotFoundException e) { LOGGER.error("Unable to get centroid " + centroidID + " for group " + groupID, e); return; } // do not update the cost, because this cost is associated with the // centroid PRIOR to this update. // centroid.setCost(totals.distance); centroid.resetAssociatonCount(); centroid.incrementAssociationCount(totals.getCount()); final double ptCount = totals.getCount(); // mean totals.x = totals.x / ptCount; totals.y = totals.y / ptCount; totals.z = totals.z / ptCount; final int s = centroid.getExtraDimensions().length; for (int i = 0; i < s; i++) { totals.values[i] = totals.values[i] / ptCount; } if (KMeansMapReduce.LOGGER.isTraceEnabled()) { KMeansMapReduce.LOGGER.trace(groupID + " contains " + centroidID); } final AnalyticItemWrapper nextCentroid = centroidManager.createNextCentroid( centroid.getWrappedItem(), groupID, new Coordinate(totals.x, totals.y, totals.z), centroid.getExtraDimensions(), totals.values); // new center context.write( new GeoWaveOutputKey(centroidManager.getDataTypeName(), indexNames), nextCentroid.getWrappedItem()); } private AnalyticItemWrapper getFeatureForCentroid(final String id, final String groupID) throws IOException, MatchingCentroidNotFoundException { return centroidManager.getCentroidById(id, groupID); } @Override protected void setup( final Reducer.Context context) throws IOException, InterruptedException { super.setup(context); try { centroidManager = new CentroidManagerGeoWave<>(context, KMeansMapReduce.class, KMeansMapReduce.LOGGER); indexNames = new String[] {centroidManager.getIndexName()}; } catch (final Exception e) { throw new IOException(e); } } } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kmeans/KSamplerMapReduce.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kmeans; import java.io.IOException; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.UUID; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Partitioner; import org.apache.hadoop.mapreduce.Reducer; import org.locationtech.geowave.analytic.AnalyticItemWrapper; import org.locationtech.geowave.analytic.AnalyticItemWrapperFactory; import org.locationtech.geowave.analytic.ScopedJobConfiguration; import org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory; import org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment; import org.locationtech.geowave.analytic.extract.CentroidExtractor; import org.locationtech.geowave.analytic.extract.SimpleFeatureCentroidExtractor; import org.locationtech.geowave.analytic.param.CentroidParameters; import org.locationtech.geowave.analytic.param.GlobalParameters; import org.locationtech.geowave.analytic.param.SampleParameters; import org.locationtech.geowave.analytic.sample.function.RandomSamplingRankFunction; import org.locationtech.geowave.analytic.sample.function.SamplingRankFunction; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.mapreduce.GeoWaveWritableInputMapper; import org.locationtech.geowave.mapreduce.GeoWaveWritableInputReducer; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey; import org.locationtech.jts.geom.Point; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Samples a random 'k' number of features from a population of geospatial features PER GROUP. * Outputs the samples in SimpleFeatures. Sampling is achieved by picking the top ranked input * objects. Rank is determined by a sample function implementing {@link SamplingRankFunction}. * *

The input features should have a groupID set if they intend to be sampled by group. * *

Keys are partitioned by the group ID in an attempt to process each group in a separate * reducer. * *

Sampled features are written to as a new SimpleFeature to a data store. The SimpleFeature * contains attributes: * * *

name - data id of the sampled point *

weight - can be anything including the sum of all assigned feature distances *

geometry - geometry of the sampled features *

count - to hold the number of assigned features *

groupID - the assigned group ID to the input objects * *

Properties: *

"KSamplerMapReduce.Sample.SampleSize" - number of input objects to sample. defaults to 1. *

"KSamplerMapReduce.Sample.DataTypeId" - Id of the data type to store the k samples - * defaults to "centroids" *

"KSamplerMapReduce.Centroid.ExtractorClass" - extracts a centroid from an item. This * parameter allows customization of determining one or more representative centroids for a * geometry. *

"KSamplerMapReduce.Sample.IndexId" - The Index ID used for output simple features. *

"KSamplerMapReduce.Sample.SampleRankFunction" - An implementation of {@link * SamplingRankFunction} used to rank the input object. *

"KSamplerMapReduce.Centroid.ZoomLevel" - Sets an attribute on the sampled objects * recording a zoom level used in the sampling process. The interpretation of the attribute is * not specified or assumed. *

"KSamplerMapReduce.Global.BatchId" ->the id of the batch; defaults to current time in * millis (for range comparisons) *

"KSamplerMapReduce.Centroid.WrapperFactoryClass" -> {@link AnalyticItemWrapperFactory} to * extract non-geometric dimensions * */ public class KSamplerMapReduce { protected static final Logger LOGGER = LoggerFactory.getLogger(KSamplerMapReduce.class); public static class SampleMap extends GeoWaveWritableInputMapper { protected GeoWaveInputKey outputKey = new GeoWaveInputKey(); private final KeyManager keyManager = new KeyManager(); private SamplingRankFunction samplingFunction; private ObjectWritable currentValue; private AnalyticItemWrapperFactory itemWrapperFactory; private int sampleSize = 1; private NestedGroupCentroidAssignment nestedGroupCentroidAssigner; // Override parent since there is not need to decode the value. @Override protected void mapWritableValue( final GeoWaveInputKey key, final ObjectWritable value, final Mapper.Context context) throws IOException, InterruptedException { // cached for efficiency since the output is the input object // the de-serialized input object is only used for sampling. // For simplicity, allow the de-serialization to occur in all cases, // even though some sampling // functions do not inspect the input object. currentValue = value; super.mapWritableValue(key, value, context); } @Override protected void mapNativeValue( final GeoWaveInputKey key, final Object value, final org.apache.hadoop.mapreduce.Mapper.Context context) throws IOException, InterruptedException { @SuppressWarnings("unchecked") final double rank = samplingFunction.rank(sampleSize, (T) value); if (rank > 0.0000000001) { final AnalyticItemWrapper wrapper = itemWrapperFactory.create(value); outputKey.setDataId( new ByteArray( keyManager.putData( nestedGroupCentroidAssigner.getGroupForLevel(wrapper), 1.0 - rank, // sorts // in // ascending // order key.getDataId().getBytes()))); outputKey.setInternalAdapterId(key.getInternalAdapterId()); outputKey.setGeoWaveKey(key.getGeoWaveKey()); context.write(outputKey, currentValue); } } @Override protected void setup( final Mapper.Context context) throws IOException, InterruptedException { super.setup(context); final ScopedJobConfiguration config = new ScopedJobConfiguration( context.getConfiguration(), KSamplerMapReduce.class, KSamplerMapReduce.LOGGER); sampleSize = config.getInt(SampleParameters.Sample.SAMPLE_SIZE, 1); try { nestedGroupCentroidAssigner = new NestedGroupCentroidAssignment<>( context, KSamplerMapReduce.class, KSamplerMapReduce.LOGGER); } catch (final Exception e1) { throw new IOException(e1); } try { samplingFunction = config.getInstance( SampleParameters.Sample.SAMPLE_RANK_FUNCTION, SamplingRankFunction.class, RandomSamplingRankFunction.class); samplingFunction.initialize(context, KSamplerMapReduce.class, KSamplerMapReduce.LOGGER); } catch (final Exception e1) { throw new IOException(e1); } try { itemWrapperFactory = config.getInstance( CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS, AnalyticItemWrapperFactory.class, SimpleFeatureItemWrapperFactory.class); itemWrapperFactory.initialize(context, KSamplerMapReduce.class, KSamplerMapReduce.LOGGER); } catch (final Exception e1) { throw new IOException(e1); } } } public static class SampleReducer extends GeoWaveWritableInputReducer { private int maxCount = 1; private CentroidExtractor centroidExtractor; private AnalyticItemWrapperFactory itemWrapperFactory; private String sampleDataTypeName = null; private String[] indexNames; private int zoomLevel = 1; private String batchID; private final Map outputCounts = new HashMap<>(); @Override protected void reduceNativeValues( final GeoWaveInputKey key, final Iterable values, final Reducer.Context context) throws IOException, InterruptedException { final String groupID = KeyManager.getGroupAsString(key.getDataId().getBytes()); for (final Object value : values) { final AnalyticItemWrapper sampleItem = itemWrapperFactory.create((T) value); Integer outputCount = outputCounts.get(groupID); outputCount = outputCount == null ? Integer.valueOf(0) : outputCount; if ((outputCount == null) || (outputCount < maxCount)) { final AnalyticItemWrapper centroid = createCentroid(groupID, sampleItem); if (centroid != null) { context.write( new GeoWaveOutputKey(sampleDataTypeName, indexNames), centroid.getWrappedItem()); outputCount++; outputCounts.put(groupID, outputCount); } } } } private AnalyticItemWrapper createCentroid( final String groupID, final AnalyticItemWrapper item) { final Point point = centroidExtractor.getCentroid(item.getWrappedItem()); final AnalyticItemWrapper nextCentroid = itemWrapperFactory.createNextItem( item.getWrappedItem(), groupID, point.getCoordinate(), item.getExtraDimensions(), item.getDimensionValues()); nextCentroid.setBatchID(batchID); nextCentroid.setGroupID(groupID); nextCentroid.setZoomLevel(zoomLevel); return nextCentroid; } @SuppressWarnings("unchecked") @Override protected void setup( final Reducer.Context context) throws IOException, InterruptedException { super.setup(context); final ScopedJobConfiguration config = new ScopedJobConfiguration( context.getConfiguration(), KSamplerMapReduce.class, KSamplerMapReduce.LOGGER); maxCount = config.getInt(SampleParameters.Sample.SAMPLE_SIZE, 1); zoomLevel = config.getInt(CentroidParameters.Centroid.ZOOM_LEVEL, 1); sampleDataTypeName = config.getString(SampleParameters.Sample.DATA_TYPE_NAME, "sample"); batchID = config.getString(GlobalParameters.Global.BATCH_ID, UUID.randomUUID().toString()); final String indexName = config.getString( SampleParameters.Sample.INDEX_NAME, SpatialDimensionalityTypeProvider.createIndexFromOptions( new SpatialOptions()).getName()); indexNames = new String[] {indexName}; try { centroidExtractor = config.getInstance( CentroidParameters.Centroid.EXTRACTOR_CLASS, CentroidExtractor.class, SimpleFeatureCentroidExtractor.class); } catch (final Exception e1) { throw new IOException(e1); } try { itemWrapperFactory = config.getInstance( CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS, AnalyticItemWrapperFactory.class, SimpleFeatureItemWrapperFactory.class); itemWrapperFactory.initialize(context, KSamplerMapReduce.class, KSamplerMapReduce.LOGGER); } catch (final Exception e1) { throw new IOException(e1); } } } public static class SampleKeyPartitioner extends Partitioner { @Override public int getPartition( final GeoWaveInputKey key, final ObjectWritable val, final int numPartitions) { final byte[] grpIDInBytes = KeyManager.getGroup(key.getDataId().getBytes()); final int partition = hash(grpIDInBytes) % numPartitions; return partition; } private int hash(final byte[] data) { int code = 1; int i = 0; for (final byte b : data) { code += b * Math.pow(31, data.length - 1 - (i++)); } return code; } } private static class KeyManager { private ByteBuffer keyBuffer = ByteBuffer.allocate(64); private static String getGroupAsString(final byte[] data) { return new String(getGroup(data), StringUtils.getGeoWaveCharset()); } private static byte[] getGroup(final byte[] data) { final ByteBuffer buffer = ByteBuffer.wrap(data); buffer.getDouble(); final int len = buffer.getInt(); return Arrays.copyOfRange(data, buffer.position(), (buffer.position() + len)); } private byte[] putData(final String groupID, final double weight, final byte[] dataIdBytes) { keyBuffer.rewind(); final byte[] groupIDBytes = groupID.getBytes(StringUtils.getGeoWaveCharset()); // try to reuse final int size = dataIdBytes.length + 16 + groupIDBytes.length; if (keyBuffer.capacity() < size) { keyBuffer = ByteBuffer.allocate(size); } keyBuffer.putDouble(weight); keyBuffer.putInt(groupIDBytes.length); keyBuffer.put(groupIDBytes); keyBuffer.putInt(dataIdBytes.length); keyBuffer.put(dataIdBytes); return keyBuffer.array(); } } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kmeans/UpdateCentroidCostMapReduce.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kmeans; import java.io.IOException; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.locationtech.geowave.analytic.AnalyticItemWrapper; import org.locationtech.geowave.analytic.AnalyticItemWrapperFactory; import org.locationtech.geowave.analytic.ScopedJobConfiguration; import org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory; import org.locationtech.geowave.analytic.clustering.CentroidManager; import org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave; import org.locationtech.geowave.analytic.clustering.CentroidPairing; import org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment; import org.locationtech.geowave.analytic.clustering.exception.MatchingCentroidNotFoundException; import org.locationtech.geowave.analytic.kmeans.AssociationNotification; import org.locationtech.geowave.analytic.mapreduce.CountofDoubleWritable; import org.locationtech.geowave.analytic.mapreduce.GroupIDText; import org.locationtech.geowave.analytic.param.CentroidParameters; import org.locationtech.geowave.mapreduce.GeoWaveWritableInputMapper; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Update the SINGLE cost of the clustering as a measure of distance from all points to their * closest center. * *

As an FYI: During the clustering algorithm, the cost should be monotonic decreasing. * * *

Context configuration parameters include: *

"UpdateCentroidCostMapReduce.Common.DistanceFunctionClass" -> Used to determine distance * to centroid *

"UpdateCentroidCostMapReduce.Centroid.WrapperFactoryClass" -> {@link * AnalyticItemWrapperFactory} to extract wrap spatial objects with Centroid management * functions * @see CentroidManagerGeoWave * */ public class UpdateCentroidCostMapReduce { protected static final Logger LOGGER = LoggerFactory.getLogger(UpdateCentroidCostMapReduce.class); public static class UpdateCentroidCostMap extends GeoWaveWritableInputMapper { private NestedGroupCentroidAssignment nestedGroupCentroidAssigner; private final CountofDoubleWritable dw = new CountofDoubleWritable(); protected final GroupIDText outputWritable = new GroupIDText(); protected AnalyticItemWrapperFactory itemWrapperFactory; private final AssociationNotification centroidAssociationFn = new AssociationNotification() { @Override public void notify(final CentroidPairing pairing) { outputWritable.set(pairing.getCentroid().getGroupID(), pairing.getCentroid().getID()); } }; @Override protected void mapNativeValue( final GeoWaveInputKey key, final Object value, final Mapper.Context context) throws IOException, InterruptedException { final AnalyticItemWrapper wrappedItem = itemWrapperFactory.create(value); dw.set( nestedGroupCentroidAssigner.findCentroidForLevel(wrappedItem, centroidAssociationFn), 1.0); context.write(outputWritable, dw); } @Override protected void setup( final Mapper.Context context) throws IOException, InterruptedException { super.setup(context); final ScopedJobConfiguration config = new ScopedJobConfiguration( context.getConfiguration(), UpdateCentroidCostMapReduce.class, UpdateCentroidCostMapReduce.LOGGER); try { nestedGroupCentroidAssigner = new NestedGroupCentroidAssignment<>( context, UpdateCentroidCostMapReduce.class, UpdateCentroidCostMapReduce.LOGGER); } catch (final Exception e1) { throw new IOException(e1); } try { itemWrapperFactory = config.getInstance( CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS, AnalyticItemWrapperFactory.class, SimpleFeatureItemWrapperFactory.class); itemWrapperFactory.initialize( context, UpdateCentroidCostMapReduce.class, UpdateCentroidCostMapReduce.LOGGER); } catch (final Exception e1) { throw new IOException(e1); } } } public static class UpdateCentroidCostCombiner extends Reducer { final CountofDoubleWritable outputValue = new CountofDoubleWritable(); @Override public void reduce( final GroupIDText key, final Iterable values, final Reducer.Context context) throws IOException, InterruptedException { double expectation = 0; double ptCount = 0; for (final CountofDoubleWritable value : values) { expectation += value.getValue(); ptCount += value.getCount(); } outputValue.set(expectation, ptCount); context.write(key, outputValue); } } public static class UpdateCentroidCostReducer extends Reducer { private CentroidManager centroidManager; private String[] indexNames; @Override protected void reduce( final GroupIDText key, final Iterable values, final Reducer.Context context) throws IOException, InterruptedException { final String id = key.getID(); final String groupID = key.getGroupID(); double sum = 0.0; double count = 0; for (final CountofDoubleWritable next : values) { sum += next.getValue(); count += next.getCount(); } AnalyticItemWrapper centroid; try { centroid = getFeatureForCentroid(id, groupID); } catch (final MatchingCentroidNotFoundException e) { LOGGER.error("Unable to get centroid " + id + " for group " + groupID, e); return; } centroid.setCost(sum); centroid.resetAssociatonCount(); centroid.incrementAssociationCount((long) count); UpdateCentroidCostMapReduce.LOGGER.info("Update centroid " + centroid.toString()); context.write( new GeoWaveOutputKey(centroidManager.getDataTypeName(), indexNames), centroid.getWrappedItem()); } private AnalyticItemWrapper getFeatureForCentroid(final String id, final String groupID) throws IOException, MatchingCentroidNotFoundException { return centroidManager.getCentroidById(id, groupID); } @Override protected void setup( final Reducer.Context context) throws IOException, InterruptedException { super.setup(context); try { centroidManager = new CentroidManagerGeoWave<>( context, UpdateCentroidCostMapReduce.class, UpdateCentroidCostMapReduce.LOGGER); indexNames = new String[] {centroidManager.getIndexName()}; } catch (final Exception e) { UpdateCentroidCostMapReduce.LOGGER.warn("Unable to initialize centroid manager", e); throw new IOException("Unable to initialize centroid manager"); } } } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kmeans/runner/IterationCountCalculateRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kmeans.runner; import java.io.IOException; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.conf.Configuration; import org.locationtech.geowave.analytic.AnalyticItemWrapper; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.clustering.CentroidManager; import org.locationtech.geowave.analytic.clustering.CentroidManager.CentroidProcessingFn; import org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave; import org.locationtech.geowave.analytic.mapreduce.MapReduceJobRunner; /** * Determine the number of iterations in the KMeans Parallel initialization step. Each iteration * samples a set of K points from the full population. The number of iterations is log(psi) where * psi is the initial cost of the system with a single centroid. Rounding is in effect. To obtain a * reasonable sample, the minimum is 2. * *

This class has been adapted to determine the maximum number of iterations required across * multiple groups. Each group is its own set of clusters. */ public class IterationCountCalculateRunner implements MapReduceJobRunner { private int iterationsCount = 1; public IterationCountCalculateRunner() {} public int getIterationsCount() { return iterationsCount; } public void setIterationsCount(final int iterationsCount) { this.iterationsCount = iterationsCount; } @Override public int run(final Configuration config, final PropertyManagement runTimeProperties) throws Exception { iterationsCount = this.getIterations(runTimeProperties); return 0; } private int getIterations(final PropertyManagement propertyManagement) throws IOException { final CentroidManager centroidManager = new CentroidManagerGeoWave<>(propertyManagement); final AtomicInteger resultHolder = new AtomicInteger(0); // Must iterate through the worst case. centroidManager.processForAllGroups(new CentroidProcessingFn() { @Override public int processGroup(final String groupID, final List> centroids) { resultHolder.set( Math.max( resultHolder.get(), (centroids.size() > 0) ? (int) Math.round(Math.log(maxCost(centroids))) : 0)); return 0; } }); return Math.max(iterationsCount, resultHolder.get()); } private double maxCost(final List> centroids) { double max = 0.0; for (final AnalyticItemWrapper centroid : centroids) { max = Math.max(max, centroid.getCost()); } return max; } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kmeans/runner/KMeansDistortionJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kmeans.runner; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.clustering.DistortionGroupManagement.DistortionDataAdapter; import org.locationtech.geowave.analytic.clustering.DistortionGroupManagement.DistortionEntry; import org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment; import org.locationtech.geowave.analytic.mapreduce.CountofDoubleWritable; import org.locationtech.geowave.analytic.mapreduce.GeoWaveAnalyticJobRunner; import org.locationtech.geowave.analytic.mapreduce.kmeans.KMeansDistortionMapReduce; import org.locationtech.geowave.analytic.param.CentroidParameters; import org.locationtech.geowave.analytic.param.ClusteringParameters; import org.locationtech.geowave.analytic.param.GlobalParameters; import org.locationtech.geowave.analytic.param.JumpParameters; import org.locationtech.geowave.analytic.param.ParameterEnum; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.mapreduce.input.GeoWaveInputFormat; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputFormat; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey; /** * Calculate the distortation. * *

See Catherine A. Sugar and Gareth M. James (2003). "Finding the number of clusters in a data * set: An information theoretic approach" Journal of the American Statistical Association 98 * (January): 750–763 */ public class KMeansDistortionJobRunner extends GeoWaveAnalyticJobRunner { private int k = 1; private DataStorePluginOptions dataStoreOptions; public KMeansDistortionJobRunner() { setReducerCount(8); } public void setDataStoreOptions(final DataStorePluginOptions dataStoreOptions) { this.dataStoreOptions = dataStoreOptions; } public void setCentroidsCount(final int k) { this.k = k; } @Override public void configure(final Job job) throws Exception { job.setMapperClass(KMeansDistortionMapReduce.KMeansDistortionMapper.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(CountofDoubleWritable.class); job.setReducerClass(KMeansDistortionMapReduce.KMeansDistortionReduce.class); job.setCombinerClass(KMeansDistortionMapReduce.KMeansDistorationCombiner.class); job.setOutputKeyClass(GeoWaveOutputKey.class); job.setOutputValueClass(DistortionEntry.class); job.setOutputFormatClass(GeoWaveOutputFormat.class); // extends wait time to 15 minutes (default: 600 seconds) final long milliSeconds = 1000L * 60L * 15L; final Configuration conf = job.getConfiguration(); conf.setLong("mapred.task.timeout", milliSeconds); ((ParameterEnum) JumpParameters.Jump.COUNT_OF_CENTROIDS).getHelper().setValue( conf, KMeansDistortionMapReduce.class, Integer.valueOf(k)); // Required since the Mapper uses the input format parameters to lookup // the adapter GeoWaveInputFormat.setStoreOptions(conf, dataStoreOptions); GeoWaveOutputFormat.addDataAdapter(conf, new DistortionDataAdapter()); } @Override public Class getScope() { return KMeansDistortionMapReduce.class; } @Override public int run(final Configuration config, final PropertyManagement runTimeProperties) throws Exception { setReducerCount( runTimeProperties.getPropertyAsInt( ClusteringParameters.Clustering.MAX_REDUCER_COUNT, super.getReducerCount())); runTimeProperties.setConfig( new ParameterEnum[] { CentroidParameters.Centroid.EXTRACTOR_CLASS, CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS, GlobalParameters.Global.PARENT_BATCH_ID}, config, getScope()); NestedGroupCentroidAssignment.setParameters(config, getScope(), runTimeProperties); // HP Fortify "Command Injection" false positive // What Fortify considers "externally-influenced input" // comes only from users with OS-level access anyway return super.run(config, runTimeProperties); } @Override protected String getJobName() { return "K-Means Distortion"; } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kmeans/runner/KMeansIterationsJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kmeans.runner; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.conf.Configuration; import org.locationtech.geowave.analytic.AnalyticItemWrapper; import org.locationtech.geowave.analytic.IndependentJobRunner; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory; import org.locationtech.geowave.analytic.clustering.CentroidManager; import org.locationtech.geowave.analytic.clustering.CentroidManager.CentroidProcessingFn; import org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave; import org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment; import org.locationtech.geowave.analytic.distance.DistanceFn; import org.locationtech.geowave.analytic.distance.FeatureCentroidDistanceFn; import org.locationtech.geowave.analytic.extract.SimpleFeatureCentroidExtractor; import org.locationtech.geowave.analytic.mapreduce.MapReduceJobController; import org.locationtech.geowave.analytic.mapreduce.MapReduceJobRunner; import org.locationtech.geowave.analytic.param.CentroidParameters; import org.locationtech.geowave.analytic.param.ClusteringParameters; import org.locationtech.geowave.analytic.param.CommonParameters; import org.locationtech.geowave.analytic.param.FormatConfiguration; import org.locationtech.geowave.analytic.param.ParameterEnum; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** Run 'K' means until convergence across ALL groups. */ public class KMeansIterationsJobRunner implements MapReduceJobRunner, IndependentJobRunner { protected static final Logger LOGGER = LoggerFactory.getLogger(KMeansIterationsJobRunner.class); private final KMeansJobRunner jobRunner = new KMeansJobRunner(); private double convergenceTol = 0.0001; public KMeansIterationsJobRunner() {} protected CentroidManager constructCentroidManager( final Configuration config, final PropertyManagement runTimeProperties) throws IOException { return new CentroidManagerGeoWave<>(runTimeProperties); } public void setInputFormatConfiguration(final FormatConfiguration inputFormatConfiguration) { jobRunner.setInputFormatConfiguration(inputFormatConfiguration); } public void setReducerCount(final int reducerCount) { jobRunner.setReducerCount(reducerCount); } @SuppressWarnings("unchecked") @Override public int run(final Configuration config, final PropertyManagement runTimeProperties) throws Exception { convergenceTol = runTimeProperties.getPropertyAsDouble( ClusteringParameters.Clustering.CONVERGANCE_TOLERANCE, convergenceTol); final DistanceFn distanceFunction = runTimeProperties.getClassInstance( CommonParameters.Common.DISTANCE_FUNCTION_CLASS, DistanceFn.class, FeatureCentroidDistanceFn.class); int maxIterationCount = runTimeProperties.getPropertyAsInt(ClusteringParameters.Clustering.MAX_ITERATIONS, 15); boolean converged = false; while (!converged && (maxIterationCount > 0)) { final int status = runJob(config, runTimeProperties); if (status != 0) { return status; } // new one each time to force a refresh of the centroids final CentroidManager centroidManager = constructCentroidManager(config, runTimeProperties); // check for convergence converged = checkForConvergence(centroidManager, distanceFunction); maxIterationCount--; } return 0; } protected int runJob(final Configuration config, final PropertyManagement runTimeProperties) throws Exception { runTimeProperties.storeIfEmpty( CentroidParameters.Centroid.EXTRACTOR_CLASS, SimpleFeatureCentroidExtractor.class); runTimeProperties.storeIfEmpty( CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS, SimpleFeatureItemWrapperFactory.class); runTimeProperties.storeIfEmpty( CommonParameters.Common.DISTANCE_FUNCTION_CLASS, FeatureCentroidDistanceFn.class); // HP Fortify "Command Injection" false positive // What Fortify considers "externally-influenced input" // comes only from users with OS-level access anyway return jobRunner.run(config, runTimeProperties); } private boolean checkForConvergence( final CentroidManager centroidManager, final DistanceFn distanceFunction) throws IOException { final AtomicInteger grpCount = new AtomicInteger(0); final AtomicInteger failuresCount = new AtomicInteger(0); final AtomicInteger centroidCount = new AtomicInteger(0); final boolean status = centroidManager.processForAllGroups(new CentroidProcessingFn() { @Override public int processGroup(final String groupID, final List> centroids) { grpCount.incrementAndGet(); centroidCount.addAndGet(centroids.size() / 2); if (LOGGER.isTraceEnabled()) { LOGGER.trace("Parent Group: {} ", groupID); for (final AnalyticItemWrapper troid : centroids) { LOGGER.warn("Child Group: {} ", troid.getID()); } } failuresCount.addAndGet( computeCostAndCleanUp(groupID, centroids, centroidManager, distanceFunction)); return 0; } }) == 0 ? true : false; // update default based on data size setReducerCount(grpCount.get() * centroidCount.get()); return status && (failuresCount.get() == 0); } protected int computeCostAndCleanUp( final String groupID, final List> centroids, final CentroidManager centroidManager, final DistanceFn distanceFunction) { double distance = 0; final List deletionKeys = new ArrayList<>(); // sort by id and then by iteration Collections.sort(centroids, new Comparator>() { @Override public int compare(final AnalyticItemWrapper arg0, final AnalyticItemWrapper arg1) { final int c = arg0.getName().compareTo(arg1.getName()); if (c == 0) { return arg0.getIterationID() - arg1.getIterationID(); } else { return c; } } }); AnalyticItemWrapper prior = null; for (final AnalyticItemWrapper centroid : centroids) { if (prior == null) { prior = centroid; continue; } else if (!prior.getName().equals(centroid.getName())) { // should we delete this...it is a centroid without assigned // points? This occurs when the number of centroids exceeds the // number of points in a cluster. // it is an edge case. // deletionKeys.add( prior.getID() ); LOGGER.warn( "Centroid is no longer viable " + prior.getID() + " from group " + prior.getGroupID()); prior = centroid; continue; } // the prior run centroids are still present from the geowave data // store; // their priors do not exist in the map distance += distanceFunction.measure(prior.getWrappedItem(), centroid.getWrappedItem()); deletionKeys.add(prior.getID()); if (LOGGER.isTraceEnabled()) { LOGGER.trace( "Within group {} replace {} with {}", new String[] {prior.getGroupID(), prior.getID(), centroid.getID()}); } prior = null; } distance /= centroids.size(); try { centroidManager.delete(deletionKeys.toArray(new String[deletionKeys.size()])); } catch (final IOException e) { throw new RuntimeException(e); } return (distance < convergenceTol) ? 0 : 1; } @Override public Collection> getParameters() { final Set> params = new HashSet<>(); params.addAll( Arrays.asList( new ParameterEnum[] { CentroidParameters.Centroid.INDEX_NAME, CentroidParameters.Centroid.DATA_TYPE_ID, CentroidParameters.Centroid.DATA_NAMESPACE_URI, CentroidParameters.Centroid.EXTRACTOR_CLASS, CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS, ClusteringParameters.Clustering.MAX_REDUCER_COUNT, ClusteringParameters.Clustering.MAX_ITERATIONS, ClusteringParameters.Clustering.CONVERGANCE_TOLERANCE, CommonParameters.Common.DISTANCE_FUNCTION_CLASS})); params.addAll(CentroidManagerGeoWave.getParameters()); params.addAll(NestedGroupCentroidAssignment.getParameters()); params.addAll(jobRunner.getParameters()); return params; } @Override public int run(final PropertyManagement runTimeProperties) throws Exception { return this.run(MapReduceJobController.getConfiguration(runTimeProperties), runTimeProperties); } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kmeans/runner/KMeansJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kmeans.runner; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.mapreduce.Job; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment; import org.locationtech.geowave.analytic.mapreduce.GeoWaveAnalyticJobRunner; import org.locationtech.geowave.analytic.mapreduce.GeoWaveOutputFormatConfiguration; import org.locationtech.geowave.analytic.mapreduce.GroupIDText; import org.locationtech.geowave.analytic.mapreduce.MapReduceJobRunner; import org.locationtech.geowave.analytic.mapreduce.kmeans.KMeansMapReduce; import org.locationtech.geowave.analytic.param.CentroidParameters; import org.locationtech.geowave.analytic.param.ClusteringParameters; import org.locationtech.geowave.analytic.param.ParameterEnum; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey; import org.opengis.feature.simple.SimpleFeature; /** Run 'K' means one time to move the centroids towards the mean. */ public class KMeansJobRunner extends GeoWaveAnalyticJobRunner implements MapReduceJobRunner { public KMeansJobRunner() { super.setOutputFormatConfiguration(new GeoWaveOutputFormatConfiguration()); } @Override public void setReducerCount(final int reducerCount) { super.setReducerCount(Math.min(2, reducerCount)); } @Override public void configure(final Job job) throws Exception { job.setMapperClass(KMeansMapReduce.KMeansMapper.class); job.setMapOutputKeyClass(GroupIDText.class); job.setMapOutputValueClass(BytesWritable.class); job.setReducerClass(KMeansMapReduce.KMeansReduce.class); job.setCombinerClass(KMeansMapReduce.KMeansCombiner.class); job.setReduceSpeculativeExecution(false); job.setOutputKeyClass(GeoWaveOutputKey.class); job.setOutputValueClass(SimpleFeature.class); } @Override public Class getScope() { return KMeansMapReduce.class; } @Override public int run(final Configuration configuration, final PropertyManagement runTimeProperties) throws Exception { NestedGroupCentroidAssignment.setParameters(configuration, getScope(), runTimeProperties); super.setReducerCount( runTimeProperties.getPropertyAsInt( ClusteringParameters.Clustering.MAX_REDUCER_COUNT, Math.max(2, super.getReducerCount()))); runTimeProperties.setConfig( new ParameterEnum[] { CentroidParameters.Centroid.EXTRACTOR_CLASS, CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS}, configuration, getScope()); // HP Fortify "Command Injection" false positive // What Fortify considers "externally-influenced input" // comes only from users with OS-level access anyway return super.run(configuration, runTimeProperties); } @Override protected String getJobName() { return "K-Means"; } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kmeans/runner/KMeansJumpJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kmeans.runner; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.Set; import java.util.UUID; import org.apache.hadoop.conf.Configuration; import org.locationtech.geowave.analytic.AnalyticItemWrapperFactory; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory; import org.locationtech.geowave.analytic.clustering.ClusteringUtils; import org.locationtech.geowave.analytic.clustering.DistortionGroupManagement; import org.locationtech.geowave.analytic.distance.FeatureCentroidDistanceFn; import org.locationtech.geowave.analytic.extract.SimpleFeatureCentroidExtractor; import org.locationtech.geowave.analytic.extract.SimpleFeatureGeometryExtractor; import org.locationtech.geowave.analytic.mapreduce.MapReduceJobController; import org.locationtech.geowave.analytic.mapreduce.MapReduceJobRunner; import org.locationtech.geowave.analytic.mapreduce.clustering.runner.ClusteringRunner; import org.locationtech.geowave.analytic.param.CentroidParameters; import org.locationtech.geowave.analytic.param.ClusteringParameters; import org.locationtech.geowave.analytic.param.CommonParameters; import org.locationtech.geowave.analytic.param.FormatConfiguration; import org.locationtech.geowave.analytic.param.GlobalParameters; import org.locationtech.geowave.analytic.param.JumpParameters; import org.locationtech.geowave.analytic.param.MapReduceParameters; import org.locationtech.geowave.analytic.param.ParameterEnum; import org.locationtech.geowave.analytic.param.SampleParameters; import org.locationtech.geowave.analytic.param.StoreParameters; import org.locationtech.geowave.analytic.param.StoreParameters.StoreParam; import org.locationtech.geowave.analytic.store.PersistableStore; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.opengis.feature.simple.SimpleFeature; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * The KMeans Jump algorithm * *

Catherine A. Sugar and Gareth M. James (2003). "Finding the number of clusters in a data set: * An information theoretic approach" Journal of the American Statistical Association 98 (January): * 750–763 */ public class KMeansJumpJobRunner extends MapReduceJobController implements ClusteringRunner { static final Logger LOGGER = LoggerFactory.getLogger(KMeansJumpJobRunner.class); final KMeansDistortionJobRunner jumpRunner = new KMeansDistortionJobRunner(); final KMeansParallelJobRunnerDelegate kmeansRunner = new KMeansParallelJobRunnerDelegate(); private int currentZoomLevel = 1; public KMeansJumpJobRunner() { // defaults setZoomLevel(1); // child runners init( new MapReduceJobRunner[] {kmeansRunner, jumpRunner,}, new PostOperationTask[] {DoNothingTask, DoNothingTask}); } @Override public void setZoomLevel(final int zoomLevel) { currentZoomLevel = zoomLevel; kmeansRunner.setZoomLevel(zoomLevel); } @Override public void setInputFormatConfiguration(final FormatConfiguration inputFormatConfiguration) { jumpRunner.setInputFormatConfiguration(inputFormatConfiguration); kmeansRunner.setInputFormatConfiguration(inputFormatConfiguration); } @Override @SuppressWarnings("unchecked") public int run(final Configuration configuration, final PropertyManagement propertyManagement) throws Exception { propertyManagement.store(CentroidParameters.Centroid.ZOOM_LEVEL, currentZoomLevel); propertyManagement.storeIfEmpty(GlobalParameters.Global.BATCH_ID, UUID.randomUUID().toString()); propertyManagement.storeIfEmpty( CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS, SimpleFeatureItemWrapperFactory.class); propertyManagement.storeIfEmpty( CommonParameters.Common.DISTANCE_FUNCTION_CLASS, FeatureCentroidDistanceFn.class); propertyManagement.storeIfEmpty( CentroidParameters.Centroid.EXTRACTOR_CLASS, SimpleFeatureCentroidExtractor.class); propertyManagement.storeIfEmpty( CommonParameters.Common.DIMENSION_EXTRACT_CLASS, SimpleFeatureGeometryExtractor.class); propertyManagement.copy( CentroidParameters.Centroid.DATA_TYPE_ID, SampleParameters.Sample.DATA_TYPE_NAME); propertyManagement.copy( CentroidParameters.Centroid.INDEX_NAME, SampleParameters.Sample.INDEX_NAME); ClusteringUtils.createAdapter(propertyManagement); ClusteringUtils.createIndex(propertyManagement); final String currentBatchId = propertyManagement.getPropertyAsString( GlobalParameters.Global.BATCH_ID, UUID.randomUUID().toString()); try { final NumericRange rangeOfIterations = propertyManagement.getPropertyAsRange( JumpParameters.Jump.RANGE_OF_CENTROIDS, new NumericRange(2, 200)); propertyManagement.store(GlobalParameters.Global.PARENT_BATCH_ID, currentBatchId); final DataStorePluginOptions dataStoreOptions = ((PersistableStore) propertyManagement.getProperty( StoreParam.INPUT_STORE)).getDataStoreOptions(); final DistortionGroupManagement distortionGroupManagement = new DistortionGroupManagement(dataStoreOptions); for (int k = (int) Math.max(2, Math.round(rangeOfIterations.getMin())); k < Math.round( rangeOfIterations.getMax()); k++) { // regardless of the algorithm, the sample set is fixed in size propertyManagement.store(SampleParameters.Sample.MIN_SAMPLE_SIZE, k); propertyManagement.store(SampleParameters.Sample.MAX_SAMPLE_SIZE, k); propertyManagement.store(SampleParameters.Sample.SAMPLE_SIZE, k); jumpRunner.setCentroidsCount(k); jumpRunner.setDataStoreOptions(dataStoreOptions); final String iterationBatchId = currentBatchId + "_" + k; propertyManagement.store(GlobalParameters.Global.BATCH_ID, iterationBatchId); jumpRunner.setReducerCount(k); final int status = super.run(configuration, propertyManagement); if (status != 0) { return status; } } propertyManagement.store(GlobalParameters.Global.BATCH_ID, currentBatchId); @SuppressWarnings("rawtypes") final Class analyticItemWrapperFC = propertyManagement.getPropertyAsClass( CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS, AnalyticItemWrapperFactory.class); /** * Associate the batch id with the best set of groups so the caller can find the clusters for * the given batch */ final int result = distortionGroupManagement.retainBestGroups( (AnalyticItemWrapperFactory) analyticItemWrapperFC.newInstance(), propertyManagement.getPropertyAsString(CentroidParameters.Centroid.DATA_TYPE_ID), propertyManagement.getPropertyAsString(CentroidParameters.Centroid.INDEX_NAME), currentBatchId, currentZoomLevel); return result; } catch (final Exception ex) { LOGGER.error("Cannot create distortions", ex); return 1; } } @Override public Collection> getParameters() { final Set> params = new HashSet<>(); params.addAll(kmeansRunner.singleSamplekmeansJobRunner.getParameters()); params.addAll(kmeansRunner.parallelJobRunner.getParameters()); params.addAll( Arrays.asList( new ParameterEnum[] { JumpParameters.Jump.RANGE_OF_CENTROIDS, JumpParameters.Jump.KPLUSPLUS_MIN, ClusteringParameters.Clustering.MAX_REDUCER_COUNT, CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS, CentroidParameters.Centroid.INDEX_NAME, CentroidParameters.Centroid.DATA_TYPE_ID, CentroidParameters.Centroid.DATA_NAMESPACE_URI, CentroidParameters.Centroid.EXTRACTOR_CLASS, CommonParameters.Common.DISTANCE_FUNCTION_CLASS, CommonParameters.Common.DIMENSION_EXTRACT_CLASS, StoreParameters.StoreParam.INPUT_STORE, GlobalParameters.Global.BATCH_ID})); params.addAll(MapReduceParameters.getParameters()); params.remove(CentroidParameters.Centroid.ZOOM_LEVEL); params.remove(SampleParameters.Sample.DATA_TYPE_NAME); params.remove(SampleParameters.Sample.INDEX_NAME); return params; } private static class KMeansParallelJobRunnerDelegate implements MapReduceJobRunner { final KMeansSingleSampleJobRunner singleSamplekmeansJobRunner = new KMeansSingleSampleJobRunner<>(); final KMeansParallelJobRunner parallelJobRunner = new KMeansParallelJobRunner(); @Override public int run(final Configuration config, final PropertyManagement runTimeProperties) throws Exception { final int k = runTimeProperties.getPropertyAsInt(SampleParameters.Sample.SAMPLE_SIZE, 1); final int minkplusplus = runTimeProperties.getPropertyAsInt(JumpParameters.Jump.KPLUSPLUS_MIN, 3); if (k >= minkplusplus) { return parallelJobRunner.run(config, runTimeProperties); } else { return singleSamplekmeansJobRunner.run(config, runTimeProperties); } } public void setZoomLevel(final int zoomLevel) { parallelJobRunner.setZoomLevel(zoomLevel); singleSamplekmeansJobRunner.setZoomLevel(zoomLevel); } public void setInputFormatConfiguration(final FormatConfiguration inputFormatConfiguration) { parallelJobRunner.setInputFormatConfiguration(inputFormatConfiguration); singleSamplekmeansJobRunner.setInputFormatConfiguration(inputFormatConfiguration); } } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kmeans/runner/KMeansParallelJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kmeans.runner; import java.util.Collection; import java.util.HashSet; import java.util.Set; import java.util.UUID; import org.apache.hadoop.conf.Configuration; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory; import org.locationtech.geowave.analytic.clustering.ClusteringUtils; import org.locationtech.geowave.analytic.distance.FeatureCentroidDistanceFn; import org.locationtech.geowave.analytic.extract.SimpleFeatureCentroidExtractor; import org.locationtech.geowave.analytic.extract.SimpleFeatureGeometryExtractor; import org.locationtech.geowave.analytic.mapreduce.MapReduceJobController; import org.locationtech.geowave.analytic.mapreduce.MapReduceJobRunner; import org.locationtech.geowave.analytic.mapreduce.clustering.runner.ClusteringRunner; import org.locationtech.geowave.analytic.param.CentroidParameters; import org.locationtech.geowave.analytic.param.CommonParameters; import org.locationtech.geowave.analytic.param.FormatConfiguration; import org.locationtech.geowave.analytic.param.GlobalParameters; import org.locationtech.geowave.analytic.param.ParameterEnum; import org.locationtech.geowave.analytic.param.SampleParameters; import org.opengis.feature.simple.SimpleFeature; /** * The KMeans Parallel algorithm,labeled Algorithm 2 within in section 3.3 of * *

Bahmani, Kumar, Moseley, Vassilvitskii and Vattani. Scalable K-means++. VLDB Endowment Vol. 5, * No. 7. 2012. * * Couple things to note: *

(1) Updating the cost of each sampled point occurs as the first step within sampling loop; * the initial sample is performed outside the loop. *

(2) A final update cost occurs outside the sampling loop just prior to stripping off the * top 'K' centers. * */ public class KMeansParallelJobRunner extends MapReduceJobController implements ClusteringRunner { final SampleMultipleSetsJobRunner sampleSetsRunner = new SampleMultipleSetsJobRunner<>(); final StripWeakCentroidsRunner stripWeakCentroidsRunner = new StripWeakCentroidsRunner<>(); final KMeansIterationsJobRunner kmeansJobRunner = new KMeansIterationsJobRunner<>(); private int currentZoomLevel = 1; public KMeansParallelJobRunner() { // defaults setZoomLevel(1); // sts of child runners init( new MapReduceJobRunner[] { sampleSetsRunner, stripWeakCentroidsRunner, // run this one more // time with // 'smaller' size kmeansJobRunner}, new PostOperationTask[] {DoNothingTask, DoNothingTask, new PostOperationTask() { @Override public void runTask(final Configuration config, final MapReduceJobRunner runner) { kmeansJobRunner.setReducerCount(stripWeakCentroidsRunner.getCurrentCentroidCount()); } }, DoNothingTask}); } @Override public void setZoomLevel(final int zoomLevel) { currentZoomLevel = zoomLevel; sampleSetsRunner.setZoomLevel(zoomLevel); } @Override public void setInputFormatConfiguration(final FormatConfiguration inputFormatConfiguration) { sampleSetsRunner.setInputFormatConfiguration(inputFormatConfiguration); kmeansJobRunner.setInputFormatConfiguration(inputFormatConfiguration); } @Override public int run(final Configuration configuration, final PropertyManagement propertyManagement) throws Exception { return runJob(configuration, propertyManagement); } private int runJob(final Configuration config, final PropertyManagement propertyManagement) throws Exception { propertyManagement.store(CentroidParameters.Centroid.ZOOM_LEVEL, currentZoomLevel); propertyManagement.storeIfEmpty(GlobalParameters.Global.BATCH_ID, UUID.randomUUID().toString()); propertyManagement.storeIfEmpty( CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS, SimpleFeatureItemWrapperFactory.class); propertyManagement.storeIfEmpty( CommonParameters.Common.DISTANCE_FUNCTION_CLASS, FeatureCentroidDistanceFn.class); propertyManagement.storeIfEmpty( CentroidParameters.Centroid.EXTRACTOR_CLASS, SimpleFeatureCentroidExtractor.class); propertyManagement.storeIfEmpty( CommonParameters.Common.DIMENSION_EXTRACT_CLASS, SimpleFeatureGeometryExtractor.class); stripWeakCentroidsRunner.setRange( propertyManagement.getPropertyAsInt(SampleParameters.Sample.MIN_SAMPLE_SIZE, 2), propertyManagement.getPropertyAsInt(SampleParameters.Sample.MAX_SAMPLE_SIZE, 1000)); ClusteringUtils.createAdapter(propertyManagement); ClusteringUtils.createIndex(propertyManagement); return super.run(config, propertyManagement); } @Override public Collection> getParameters() { final Set> params = new HashSet<>(); params.addAll(kmeansJobRunner.getParameters()); params.addAll(sampleSetsRunner.getParameters()); // while override params.remove(CentroidParameters.Centroid.ZOOM_LEVEL); return params; } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kmeans/runner/KMeansSingleSampleJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kmeans.runner; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.Set; import java.util.UUID; import org.apache.hadoop.conf.Configuration; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory; import org.locationtech.geowave.analytic.clustering.ClusteringUtils; import org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment; import org.locationtech.geowave.analytic.distance.FeatureCentroidDistanceFn; import org.locationtech.geowave.analytic.extract.SimpleFeatureCentroidExtractor; import org.locationtech.geowave.analytic.extract.SimpleFeatureGeometryExtractor; import org.locationtech.geowave.analytic.mapreduce.MapReduceJobController; import org.locationtech.geowave.analytic.mapreduce.MapReduceJobRunner; import org.locationtech.geowave.analytic.mapreduce.clustering.runner.ClusteringRunner; import org.locationtech.geowave.analytic.param.CentroidParameters; import org.locationtech.geowave.analytic.param.ClusteringParameters; import org.locationtech.geowave.analytic.param.CommonParameters; import org.locationtech.geowave.analytic.param.FormatConfiguration; import org.locationtech.geowave.analytic.param.GlobalParameters; import org.locationtech.geowave.analytic.param.MapReduceParameters; import org.locationtech.geowave.analytic.param.ParameterEnum; import org.locationtech.geowave.analytic.param.SampleParameters; import org.locationtech.geowave.analytic.param.StoreParameters; /** */ public class KMeansSingleSampleJobRunner extends MapReduceJobController implements ClusteringRunner { final KSamplerJobRunner sampleSetsRunner = new KSamplerJobRunner(); final KMeansIterationsJobRunner kmeansJobRunner = new KMeansIterationsJobRunner<>(); private int currentZoomLevel = 1; public KMeansSingleSampleJobRunner() { // defaults setZoomLevel(1); // sets of child runners init( new MapReduceJobRunner[] {sampleSetsRunner, kmeansJobRunner}, new PostOperationTask[] {DoNothingTask, DoNothingTask}); } @Override public void setZoomLevel(final int zoomLevel) { currentZoomLevel = zoomLevel; sampleSetsRunner.setZoomLevel(zoomLevel); } @Override public void setInputFormatConfiguration(final FormatConfiguration inputFormatConfiguration) { sampleSetsRunner.setInputFormatConfiguration(inputFormatConfiguration); kmeansJobRunner.setInputFormatConfiguration(inputFormatConfiguration); } @Override public int run(final Configuration configuration, final PropertyManagement propertyManagement) throws Exception { return runJob(configuration, propertyManagement); } private int runJob(final Configuration config, final PropertyManagement propertyManagement) throws Exception { propertyManagement.store(CentroidParameters.Centroid.ZOOM_LEVEL, currentZoomLevel); propertyManagement.storeIfEmpty(GlobalParameters.Global.BATCH_ID, UUID.randomUUID().toString()); propertyManagement.storeIfEmpty( CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS, SimpleFeatureItemWrapperFactory.class); propertyManagement.storeIfEmpty( CommonParameters.Common.DISTANCE_FUNCTION_CLASS, FeatureCentroidDistanceFn.class); propertyManagement.storeIfEmpty( CentroidParameters.Centroid.EXTRACTOR_CLASS, SimpleFeatureCentroidExtractor.class); propertyManagement.storeIfEmpty( CommonParameters.Common.DIMENSION_EXTRACT_CLASS, SimpleFeatureGeometryExtractor.class); ClusteringUtils.createAdapter(propertyManagement); ClusteringUtils.createIndex(propertyManagement); return super.run(config, propertyManagement); } @Override public Collection> getParameters() { final Set> params = new HashSet<>(); params.addAll(kmeansJobRunner.getParameters()); params.addAll( Arrays.asList( new ParameterEnum[] { ClusteringParameters.Clustering.MAX_REDUCER_COUNT, SampleParameters.Sample.SAMPLE_SIZE, SampleParameters.Sample.SAMPLE_RANK_FUNCTION, CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS, CentroidParameters.Centroid.INDEX_NAME, CentroidParameters.Centroid.DATA_TYPE_ID, CentroidParameters.Centroid.DATA_NAMESPACE_URI, CentroidParameters.Centroid.EXTRACTOR_CLASS, CommonParameters.Common.DISTANCE_FUNCTION_CLASS, CommonParameters.Common.DIMENSION_EXTRACT_CLASS, StoreParameters.StoreParam.INPUT_STORE, GlobalParameters.Global.BATCH_ID, ClusteringParameters.Clustering.MAX_REDUCER_COUNT})); params.addAll(MapReduceParameters.getParameters()); params.addAll(NestedGroupCentroidAssignment.getParameters()); // override params.remove(CentroidParameters.Centroid.ZOOM_LEVEL); params.remove(SampleParameters.Sample.DATA_TYPE_NAME); params.remove(SampleParameters.Sample.INDEX_NAME); return params; } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kmeans/runner/KSamplerJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kmeans.runner; import java.util.UUID; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.mapreduce.Job; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment; import org.locationtech.geowave.analytic.mapreduce.GeoWaveAnalyticJobRunner; import org.locationtech.geowave.analytic.mapreduce.GeoWaveOutputFormatConfiguration; import org.locationtech.geowave.analytic.mapreduce.MapReduceJobRunner; import org.locationtech.geowave.analytic.mapreduce.kmeans.KSamplerMapReduce; import org.locationtech.geowave.analytic.param.CentroidParameters; import org.locationtech.geowave.analytic.param.GlobalParameters; import org.locationtech.geowave.analytic.param.ParameterEnum; import org.locationtech.geowave.analytic.param.SampleParameters; import org.locationtech.geowave.analytic.sample.function.RandomSamplingRankFunction; import org.locationtech.geowave.analytic.sample.function.SamplingRankFunction; import org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialTemporalOptions; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey; /** * Samples 'K' number of data items by evaluating a {@link SamplingRankFunction} * *

For KMeans Parallel, the initial step requires seeding the centroids with a single point. In * this case, K=1 and the rank function is random. This means the top selected geometry is random. * In addition, each subsequent iteration samples based on probability function and K is some * provided sample size. */ public class KSamplerJobRunner extends GeoWaveAnalyticJobRunner implements MapReduceJobRunner { protected int zoomLevel = 1; private Class samplingRankFunctionClass = RandomSamplingRankFunction.class; public KSamplerJobRunner() { super.setOutputFormatConfiguration(new GeoWaveOutputFormatConfiguration()); } public void setSamplingRankFunctionClass( final Class samplingRankFunctionClass) { this.samplingRankFunctionClass = samplingRankFunctionClass; } public void setZoomLevel(final int zoomLevel) { this.zoomLevel = zoomLevel; } @Override public Class getScope() { return KSamplerMapReduce.class; } @Override public void configure(final Job job) throws Exception { job.setMapperClass(KSamplerMapReduce.SampleMap.class); job.setMapOutputKeyClass(GeoWaveInputKey.class); job.setMapOutputValueClass(ObjectWritable.class); job.setReducerClass(KSamplerMapReduce.SampleReducer.class); job.setPartitionerClass(KSamplerMapReduce.SampleKeyPartitioner.class); job.setReduceSpeculativeExecution(false); job.setOutputKeyClass(GeoWaveOutputKey.class); job.setOutputValueClass(Object.class); } private InternalDataAdapter getAdapter(final PropertyManagement runTimeProperties) throws Exception { final PersistentAdapterStore adapterStore = super.getAdapterStore(runTimeProperties); final InternalAdapterStore internalAdapterStore = getInternalAdapterStore(runTimeProperties); final Short sampleInternalAdapterId = internalAdapterStore.getAdapterId( runTimeProperties.getPropertyAsString( SampleParameters.Sample.DATA_TYPE_NAME, "sample")); if (sampleInternalAdapterId == null) { return null; } return adapterStore.getAdapter(sampleInternalAdapterId); } private Index getIndex(final PropertyManagement runTimeProperties) throws Exception { final IndexStore indexStore = super.getIndexStore(runTimeProperties); return indexStore.getIndex( runTimeProperties.getPropertyAsString(SampleParameters.Sample.INDEX_NAME, "index")); } @Override public int run(final Configuration config, final PropertyManagement runTimeProperties) throws Exception { runTimeProperties.storeIfEmpty(GlobalParameters.Global.BATCH_ID, UUID.randomUUID().toString()); runTimeProperties.storeIfEmpty(SampleParameters.Sample.DATA_TYPE_NAME, "sample"); runTimeProperties.store(CentroidParameters.Centroid.ZOOM_LEVEL, zoomLevel); runTimeProperties.storeIfEmpty( SampleParameters.Sample.INDEX_NAME, SpatialTemporalDimensionalityTypeProvider.createIndexFromOptions( new SpatialTemporalOptions()).getName()); runTimeProperties.setConfig( new ParameterEnum[] { GlobalParameters.Global.BATCH_ID, SampleParameters.Sample.INDEX_NAME, SampleParameters.Sample.SAMPLE_SIZE, SampleParameters.Sample.DATA_TYPE_NAME, CentroidParameters.Centroid.EXTRACTOR_CLASS, CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS, CentroidParameters.Centroid.ZOOM_LEVEL}, config, getScope()); ((ParameterEnum>) SampleParameters.Sample.SAMPLE_RANK_FUNCTION).getHelper().setValue( config, getScope(), samplingRankFunctionClass); NestedGroupCentroidAssignment.setParameters(config, getScope(), runTimeProperties); addDataAdapter(config, getAdapter(runTimeProperties)); addIndex(config, getIndex(runTimeProperties)); super.setReducerCount(zoomLevel); // HP Fortify "Command Injection" false positive // What Fortify considers "externally-influenced input" // comes only from users with OS-level access anyway return super.run(config, runTimeProperties); } @Override protected String getJobName() { return "K-Sampler"; } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kmeans/runner/RankSamplerJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kmeans.runner; import org.apache.hadoop.conf.Configuration; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave; import org.locationtech.geowave.analytic.distance.DistanceFn; import org.locationtech.geowave.analytic.mapreduce.MapReduceJobRunner; import org.locationtech.geowave.analytic.param.SampleParameters; import org.locationtech.geowave.analytic.sample.BahmanEtAlSampleProbabilityFn; import org.locationtech.geowave.analytic.sample.function.CentroidDistanceBasedSamplingRankFunction; /** * Sample K points given a sample function. The sampled K points are are stored as centroids within * GeoWave. The sampling weight may be determined by the relation of a point to a current set of * centroids, thus a {@link DistanceFn} instance is required. */ public class RankSamplerJobRunner extends KSamplerJobRunner implements MapReduceJobRunner { public RankSamplerJobRunner() { setSamplingRankFunctionClass(CentroidDistanceBasedSamplingRankFunction.class); } @Override public int run(final Configuration config, final PropertyManagement runTimeProperties) throws Exception { CentroidManagerGeoWave.setParameters(config, getScope(), runTimeProperties); runTimeProperties.storeIfEmpty( SampleParameters.Sample.PROBABILITY_FUNCTION, BahmanEtAlSampleProbabilityFn.class); CentroidDistanceBasedSamplingRankFunction.setParameters(config, getScope(), runTimeProperties); // HP Fortify "Command Injection" false positive // What Fortify considers "externally-influenced input" // comes only from users with OS-level access anyway return super.run(config, runTimeProperties); } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kmeans/runner/SampleMultipleSetsJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kmeans.runner; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.Set; import org.apache.hadoop.conf.Configuration; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory; import org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave; import org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment; import org.locationtech.geowave.analytic.distance.FeatureCentroidDistanceFn; import org.locationtech.geowave.analytic.extract.SimpleFeatureCentroidExtractor; import org.locationtech.geowave.analytic.extract.SimpleFeatureGeometryExtractor; import org.locationtech.geowave.analytic.mapreduce.MapReduceJobController; import org.locationtech.geowave.analytic.mapreduce.MapReduceJobRunner; import org.locationtech.geowave.analytic.param.CentroidParameters; import org.locationtech.geowave.analytic.param.ClusteringParameters; import org.locationtech.geowave.analytic.param.CommonParameters; import org.locationtech.geowave.analytic.param.FormatConfiguration; import org.locationtech.geowave.analytic.param.GlobalParameters; import org.locationtech.geowave.analytic.param.MapReduceParameters; import org.locationtech.geowave.analytic.param.ParameterEnum; import org.locationtech.geowave.analytic.param.SampleParameters; import org.locationtech.geowave.analytic.param.StoreParameters; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /* * Loop and sample multiple sets of K centers. * * Fulfills steps 3 through 5 in the Kmeans Parellel initialize Algorithm 2,as documented in section * 3.3 in * * Bahmani, Kumar, Moseley, Vassilvitskii and Vattani. Scalable K-means++. VLDB Endowment Vol. 5, * No. 7. 2012. * * The number of iterations is assumed to be log(psi), according the paper. * * As an added bonus, remove those centers that did not have sufficient number of matches, leaving * the top sampleSize/iterations. * */ public class SampleMultipleSetsJobRunner extends MapReduceJobController implements MapReduceJobRunner { protected static final Logger LOGGER = LoggerFactory.getLogger(SampleMultipleSetsJobRunner.class); private final KSamplerJobRunner initialSampleRunner = new KSamplerJobRunner(); private final UpdateCentroidCostJobRunner updateCostRunner = new UpdateCentroidCostJobRunner(); private final RankSamplerJobRunner jobGrowSampleRunner = new RankSamplerJobRunner(); private final StripWeakCentroidsRunner stripWeakCentroidsRunner = new StripWeakCentroidsRunner<>(); private final IterationCountCalculateRunner iterationCountCalculateRunner = new IterationCountCalculateRunner<>(); private int iterations = 1; private int zoomLevel = 1; public SampleMultipleSetsJobRunner() { stage1Setup(); } private void stage1Setup() { init( new MapReduceJobRunner[] { initialSampleRunner, updateCostRunner, iterationCountCalculateRunner}, new PostOperationTask[] {DoNothingTask, DoNothingTask, DoNothingTask}); } public int getCurrentCentroidCount() { return stripWeakCentroidsRunner.getCurrentCentroidCount(); } private void stage2Setup(final PropertyManagement runTimeProperties) { setIterations(iterationCountCalculateRunner.getIterationsCount()); init( new MapReduceJobRunner[] {jobGrowSampleRunner, updateCostRunner, stripWeakCentroidsRunner}, new PostOperationTask[] {DoNothingTask, DoNothingTask, new PostOperationTask() { @Override public void runTask(final Configuration config, final MapReduceJobRunner runner) { updateCostRunner.setReducerCount( Math.min( stripWeakCentroidsRunner.getCurrentCentroidCount(), runTimeProperties.getPropertyAsInt( ClusteringParameters.Clustering.MAX_REDUCER_COUNT, 32))); } }}); } @Override public int run(final Configuration config, final PropertyManagement runTimeProperties) throws Exception { // run stage 1 updateCostRunner.setReducerCount(1); this.stripWeakCentroidsRunner.setRange( runTimeProperties.getPropertyAsInt(SampleParameters.Sample.MIN_SAMPLE_SIZE, 2), runTimeProperties.getPropertyAsInt(SampleParameters.Sample.MAX_SAMPLE_SIZE, 1000)); runTimeProperties.store( SampleParameters.Sample.SAMPLE_SIZE, runTimeProperties.getPropertyAsInt(SampleParameters.Sample.MAX_SAMPLE_SIZE, 1000)); setIterations(runTimeProperties.getPropertyAsInt(SampleParameters.Sample.SAMPLE_ITERATIONS, 1)); runTimeProperties.storeIfEmpty( CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS, SimpleFeatureItemWrapperFactory.class); runTimeProperties.storeIfEmpty( CommonParameters.Common.DISTANCE_FUNCTION_CLASS, FeatureCentroidDistanceFn.class); runTimeProperties.storeIfEmpty( CentroidParameters.Centroid.EXTRACTOR_CLASS, SimpleFeatureCentroidExtractor.class); runTimeProperties.storeIfEmpty( CommonParameters.Common.DIMENSION_EXTRACT_CLASS, SimpleFeatureGeometryExtractor.class); runTimeProperties.copy( CentroidParameters.Centroid.DATA_TYPE_ID, SampleParameters.Sample.DATA_TYPE_NAME); runTimeProperties.copy( CentroidParameters.Centroid.INDEX_NAME, SampleParameters.Sample.INDEX_NAME); runTimeProperties.store(CentroidParameters.Centroid.ZOOM_LEVEL, zoomLevel); stage1Setup(); final int status1 = super.run(config, runTimeProperties); if (status1 != 0) { return status1; } stage2Setup(runTimeProperties); for (int i = 0; i < iterations; i++) { final int status2 = super.run(config, runTimeProperties); if (status2 != 0) { return status2; } } return 0; } @Override public Collection> getParameters() { final Set> params = new HashSet<>(); params.addAll( Arrays.asList( new ParameterEnum[] { SampleParameters.Sample.MAX_SAMPLE_SIZE, SampleParameters.Sample.SAMPLE_ITERATIONS, SampleParameters.Sample.MIN_SAMPLE_SIZE, CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS, CentroidParameters.Centroid.INDEX_NAME, CentroidParameters.Centroid.DATA_TYPE_ID, CentroidParameters.Centroid.DATA_NAMESPACE_URI, CentroidParameters.Centroid.EXTRACTOR_CLASS, CommonParameters.Common.DISTANCE_FUNCTION_CLASS, CommonParameters.Common.DIMENSION_EXTRACT_CLASS, StoreParameters.StoreParam.INPUT_STORE, GlobalParameters.Global.BATCH_ID})); params.addAll(MapReduceParameters.getParameters()); params.addAll(NestedGroupCentroidAssignment.getParameters()); params.addAll(CentroidManagerGeoWave.getParameters()); params.addAll(initialSampleRunner.getParameters()); return params; } public void setInputFormatConfiguration(final FormatConfiguration inputFormatConfiguration) { initialSampleRunner.setInputFormatConfiguration(inputFormatConfiguration); updateCostRunner.setInputFormatConfiguration(inputFormatConfiguration); jobGrowSampleRunner.setInputFormatConfiguration(inputFormatConfiguration); } private void setIterations(final int iterations) { this.iterations = Math.max(this.iterations, iterations); } public void setZoomLevel(final int zoomLevel) { this.zoomLevel = zoomLevel; initialSampleRunner.setZoomLevel(zoomLevel); jobGrowSampleRunner.setZoomLevel(zoomLevel); } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kmeans/runner/StripWeakCentroidsRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kmeans.runner; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.Iterator; import java.util.List; import org.apache.commons.math3.stat.descriptive.moment.StandardDeviation; import org.apache.hadoop.conf.Configuration; import org.locationtech.geowave.analytic.AnalyticItemWrapper; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.clustering.CentroidManager; import org.locationtech.geowave.analytic.clustering.CentroidManager.CentroidProcessingFn; import org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave; import org.locationtech.geowave.analytic.mapreduce.MapReduceJobRunner; import org.locationtech.geowave.core.index.FloatCompareUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** Remove weak centers. Looking for a large gaps of distances AND retain a minimum set. */ public class StripWeakCentroidsRunner implements MapReduceJobRunner { protected static final Logger LOGGER = LoggerFactory.getLogger(StripWeakCentroidsRunner.class); private int minimum = 1; private int maximum = 1000; private int currentCentroidCount = 0; private BreakStrategy breakStrategy = new TailMaxBreakStrategy<>(); public StripWeakCentroidsRunner() {} public void setBreakStrategy(final BreakStrategy breakStrategy) { this.breakStrategy = breakStrategy; } /** @param minimum new minimum number of centroids to retain, regardless of weak center; */ public void setRange(final int minimum, final int maximum) { this.minimum = minimum; this.maximum = maximum; } /** * Available only after execution. * * @return The count of current centroids after execution */ public int getCurrentCentroidCount() { return currentCentroidCount; } protected CentroidManager constructCentroidManager( final Configuration config, final PropertyManagement runTimeProperties) throws IOException { return new CentroidManagerGeoWave<>(runTimeProperties); } @Override public int run(final Configuration config, final PropertyManagement runTimeProperties) throws Exception { currentCentroidCount = 0; final CentroidManager centroidManager = constructCentroidManager(config, runTimeProperties); return centroidManager.processForAllGroups(new CentroidProcessingFn() { @Override public int processGroup(final String groupID, final List> centroids) { if (centroids.size() <= minimum) { currentCentroidCount = centroids.size(); return 0; } Collections.sort(centroids, new Comparator>() { @Override public int compare(final AnalyticItemWrapper arg0, final AnalyticItemWrapper arg1) { // be careful of overflow // also, descending return (arg1.getAssociationCount() - arg0.getAssociationCount()) < 0 ? -1 : 1; } }); int position = breakStrategy.getBreakPoint(centroids); // make sure we do not delete too many // trim bottom third position = Math.min(Math.max(minimum, position), maximum); final String toDelete[] = new String[centroids.size() - position]; LOGGER.info("Deleting {} out of {}", toDelete.length, centroids.size()); int count = 0; final Iterator> it = centroids.iterator(); while (it.hasNext()) { final AnalyticItemWrapper centroid = it.next(); if (count++ >= position) { toDelete[count - position - 1] = centroid.getID(); } } try { centroidManager.delete(toDelete); } catch (final IOException e) { LOGGER.warn("Unable to delete the centriod mamager", e); return -1; } currentCentroidCount += position; return 0; } }); } public static class MaxChangeBreakStrategy implements BreakStrategy { @Override public int getBreakPoint(final List> centroids) { int position = centroids.size(); int count = 0; final StandardDeviation st = new StandardDeviation(); double total = 0.0; double prior = Double.NaN; for (final AnalyticItemWrapper centroid : centroids) { if (!Double.isNaN(prior)) { final double chg = Math.abs(prior - centroid.getAssociationCount()); st.increment(chg); total += chg; } prior = centroid.getAssociationCount(); } double max = getInitialMaximum(st, total); prior = Double.NaN; // look for largest change for (final AnalyticItemWrapper centroid : centroids) { if (centroid.getAssociationCount() <= 1) { if (position == 0) { position = count; } break; } if (!Double.isNaN(prior)) { final double chg = Math.abs(prior - centroid.getAssociationCount()); if (FloatCompareUtils.checkDoublesEqual(Math.max(max, chg), chg)) { position = count; max = chg; } } prior = centroid.getAssociationCount(); count++; } return position; } protected double getInitialMaximum(final StandardDeviation stats, final double total) { return 0.0; } } private static class ChangeFromLast implements Comparable { int position; double chg; public ChangeFromLast(final int position, final double chg) { super(); this.position = position; this.chg = chg; } @Override public String toString() { return "ChangeFromLast [position=" + position + ", chg=" + chg + "]"; } @Override public int compareTo(final ChangeFromLast arg0) { return new Double((arg0).chg).compareTo(chg); } @Override public boolean equals(final Object obj) { if (obj == null) { return false; } if (!(obj instanceof ChangeFromLast)) { return false; } return compareTo((ChangeFromLast) obj) == 0; } @Override public int hashCode() { return Double.valueOf(chg).hashCode(); } } public static class StableChangeBreakStrategy implements BreakStrategy { @Override public int getBreakPoint(final List> centroids) { final List changes = new ArrayList<>(centroids.size()); final StandardDeviation st = new StandardDeviation(); double prior = Double.NaN; double total = 0; int count = 0; // look for largest change for (final AnalyticItemWrapper centroid : centroids) { final double chgValue = (!Double.isNaN(prior)) ? Math.abs(prior - centroid.getAssociationCount()) : 0.0; changes.add(new ChangeFromLast(count, chgValue)); prior = centroid.getAssociationCount(); count++; } Collections.sort(changes); int position = centroids.size(); count = 0; ChangeFromLast priorChg = null; for (final ChangeFromLast changeFromLast : changes) { if (priorChg != null) { final double chgOfChg = Math.abs(priorChg.chg - changeFromLast.chg); total += chgOfChg; st.increment(chgOfChg); } priorChg = changeFromLast; count++; } double max = getInitialMaximum(st, total); position = changes.get(0).position; if (changes.get(0).chg < max) { return centroids.size(); } priorChg = null; // look for largest change for (final ChangeFromLast changeFromLast : changes) { if (priorChg != null) { final double chgOfChg = Math.abs(priorChg.chg - changeFromLast.chg); if (chgOfChg > max) { position = Math.max(position, changeFromLast.position); max = chgOfChg; } } priorChg = changeFromLast; } return position; } protected double getInitialMaximum(final StandardDeviation stats, final double total) { return 0.0; } } public static class TailMaxBreakStrategy extends MaxChangeBreakStrategy implements BreakStrategy { @Override protected double getInitialMaximum(final StandardDeviation stats, final double total) { return (total / stats.getN()) + stats.getResult(); } } public static class TailStableChangeBreakStrategy extends StableChangeBreakStrategy implements BreakStrategy { @Override protected double getInitialMaximum(final StandardDeviation stats, final double total) { return (total / stats.getN()) + stats.getResult(); } } public interface BreakStrategy { public int getBreakPoint(List> centroids); } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/kmeans/runner/UpdateCentroidCostJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kmeans.runner; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.Job; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave; import org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment; import org.locationtech.geowave.analytic.mapreduce.CountofDoubleWritable; import org.locationtech.geowave.analytic.mapreduce.GeoWaveAnalyticJobRunner; import org.locationtech.geowave.analytic.mapreduce.GeoWaveOutputFormatConfiguration; import org.locationtech.geowave.analytic.mapreduce.GroupIDText; import org.locationtech.geowave.analytic.mapreduce.MapReduceJobRunner; import org.locationtech.geowave.analytic.mapreduce.kmeans.UpdateCentroidCostMapReduce; import org.locationtech.geowave.analytic.param.CentroidParameters; import org.locationtech.geowave.analytic.param.ParameterEnum; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey; import org.opengis.feature.simple.SimpleFeature; /** Update the centroid with its cost, measured by the average distance of assigned points. */ public class UpdateCentroidCostJobRunner extends GeoWaveAnalyticJobRunner implements MapReduceJobRunner { public UpdateCentroidCostJobRunner() { super.setOutputFormatConfiguration(new GeoWaveOutputFormatConfiguration()); } @Override public Class getScope() { return UpdateCentroidCostMapReduce.class; } @Override public int run(final Configuration config, final PropertyManagement runTimeProperties) throws Exception { CentroidManagerGeoWave.setParameters(config, getScope(), runTimeProperties); NestedGroupCentroidAssignment.setParameters(config, getScope(), runTimeProperties); runTimeProperties.setConfig( new ParameterEnum[] {CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS}, config, getScope()); // HP Fortify "Command Injection" false positive // What Fortify considers "externally-influenced input" // comes only from users with OS-level access anyway return super.run(config, runTimeProperties); } @Override public void configure(final Job job) throws Exception { job.setMapperClass(UpdateCentroidCostMapReduce.UpdateCentroidCostMap.class); job.setMapOutputKeyClass(GroupIDText.class); job.setMapOutputValueClass(CountofDoubleWritable.class); job.setCombinerClass(UpdateCentroidCostMapReduce.UpdateCentroidCostCombiner.class); job.setReducerClass(UpdateCentroidCostMapReduce.UpdateCentroidCostReducer.class); job.setReduceSpeculativeExecution(false); job.setOutputKeyClass(GeoWaveOutputKey.class); job.setOutputValueClass(SimpleFeature.class); } @Override protected String getJobName() { return "Update Centroid Cost"; } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/nn/GeoWaveExtractNNJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.nn; import java.util.Collection; import java.util.HashSet; import java.util.Set; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.mapreduce.GeoWaveInputFormatConfiguration; import org.locationtech.geowave.analytic.mapreduce.MapReduceJobController; import org.locationtech.geowave.analytic.mapreduce.SequenceFileOutputFormatConfiguration; import org.locationtech.geowave.analytic.param.MapReduceParameters; import org.locationtech.geowave.analytic.param.ParameterEnum; public class GeoWaveExtractNNJobRunner extends NNJobRunner { public GeoWaveExtractNNJobRunner() { super(); setInputFormatConfiguration(new GeoWaveInputFormatConfiguration()); setOutputFormatConfiguration(new SequenceFileOutputFormatConfiguration()); super.setReducerCount(4); } @Override public Collection> getParameters() { final Set> params = new HashSet<>(); params.addAll(super.getParameters()); params.addAll(MapReduceParameters.getParameters()); return params; } @Override public int run(final PropertyManagement runTimeProperties) throws Exception { return this.run(MapReduceJobController.getConfiguration(runTimeProperties), runTimeProperties); } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/nn/NNData.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.nn; public class NNData implements Comparable> { private T neighbor; private double distance; public NNData() {} public NNData(final T neighbor, final double distance) { super(); this.neighbor = neighbor; this.distance = distance; } public double getDistance() { return distance; } public void setDistance(final double distance) { this.distance = distance; } protected T getNeighbor() { return neighbor; } protected void setNeighbor(final T neighbor) { this.neighbor = neighbor; } @Override public int hashCode() { final int prime = 31; int result = 1; long temp; temp = Double.doubleToLongBits(distance); result = (prime * result) + (int) (temp ^ (temp >>> 32)); result = (prime * result) + ((neighbor == null) ? 0 : neighbor.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } @SuppressWarnings("unchecked") final NNData other = (NNData) obj; if (Double.doubleToLongBits(distance) != Double.doubleToLongBits(other.distance)) { return false; } if (neighbor == null) { if (other.neighbor != null) { return false; } } else if (!neighbor.equals(other.neighbor)) { return false; } return true; } @Override public int compareTo(final NNData otherNNData) { final int dist = Double.compare(distance, otherNNData.distance); // do not care about the ordering based on the neighbor data. // just need to force some ordering if they are not the same. return dist == 0 ? hashCode() - otherNNData.hashCode() : dist; } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/nn/NNJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.nn; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.Set; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.locationtech.geowave.analytic.AdapterWithObjectWritable; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.mapreduce.GeoWaveAnalyticJobRunner; import org.locationtech.geowave.analytic.mapreduce.nn.NNMapReduce.PartitionDataWritable; import org.locationtech.geowave.analytic.mapreduce.nn.NNMapReduce.PassthruPartitioner; import org.locationtech.geowave.analytic.param.CommonParameters; import org.locationtech.geowave.analytic.param.ParameterEnum; import org.locationtech.geowave.analytic.param.PartitionParameters.Partition; import org.locationtech.geowave.analytic.partitioner.OrthodromicDistancePartitioner; import org.locationtech.geowave.analytic.partitioner.Partitioner; public class NNJobRunner extends GeoWaveAnalyticJobRunner { @Override public void configure(final Job job) throws Exception { job.setMapperClass(NNMapReduce.NNMapper.class); job.setReducerClass(NNMapReduce.NNSimpleFeatureIDOutputReducer.class); job.setMapOutputKeyClass(PartitionDataWritable.class); job.setMapOutputValueClass(AdapterWithObjectWritable.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); job.setSpeculativeExecution(false); } @Override public Class getScope() { return NNMapReduce.class; } @Override public int run(final Configuration config, final PropertyManagement runTimeProperties) throws Exception { final Partitioner partitioner = runTimeProperties.getClassInstance( Partition.PARTITIONER_CLASS, Partitioner.class, OrthodromicDistancePartitioner.class); final Partitioner secondaryPartitioner = runTimeProperties.getClassInstance( Partition.SECONDARY_PARTITIONER_CLASS, Partitioner.class, PassthruPartitioner.class); partitioner.setup(runTimeProperties, getScope(), config); if (secondaryPartitioner.getClass() != partitioner.getClass()) { secondaryPartitioner.setup(runTimeProperties, getScope(), config); } runTimeProperties.setConfig( new ParameterEnum[] { Partition.PARTITIONER_CLASS, Partition.SECONDARY_PARTITIONER_CLASS, Partition.MAX_DISTANCE, Partition.MAX_MEMBER_SELECTION, Partition.GEOMETRIC_DISTANCE_UNIT, Partition.DISTANCE_THRESHOLDS, CommonParameters.Common.DISTANCE_FUNCTION_CLASS}, config, getScope()); // HP Fortify "Command Injection" false positive // What Fortify considers "externally-influenced input" // comes only from users with OS-level access anyway return super.run(config, runTimeProperties); } @Override public Collection> getParameters() { final Set> params = new HashSet<>(); params.addAll(super.getParameters()); params.addAll( Arrays.asList( new ParameterEnum[] { Partition.PARTITIONER_CLASS, Partition.MAX_DISTANCE, Partition.SECONDARY_PARTITIONER_CLASS, Partition.MAX_MEMBER_SELECTION, Partition.GEOMETRIC_DISTANCE_UNIT, Partition.DISTANCE_THRESHOLDS, CommonParameters.Common.DISTANCE_FUNCTION_CLASS})); return params; } @Override protected String getJobName() { return "Nearest Neighbors"; } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/nn/NNMapReduce.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.nn; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.locationtech.geowave.analytic.AdapterWithObjectWritable; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.ScopedJobConfiguration; import org.locationtech.geowave.analytic.distance.DistanceFn; import org.locationtech.geowave.analytic.distance.FeatureGeometryDistanceFn; import org.locationtech.geowave.analytic.nn.DefaultNeighborList; import org.locationtech.geowave.analytic.nn.DistanceProfile; import org.locationtech.geowave.analytic.nn.DistanceProfileGenerateFn; import org.locationtech.geowave.analytic.nn.NNProcessor; import org.locationtech.geowave.analytic.nn.NNProcessor.CompleteNotifier; import org.locationtech.geowave.analytic.nn.NeighborList; import org.locationtech.geowave.analytic.nn.NeighborListFactory; import org.locationtech.geowave.analytic.nn.TypeConverter; import org.locationtech.geowave.analytic.param.CommonParameters; import org.locationtech.geowave.analytic.param.ParameterEnum; import org.locationtech.geowave.analytic.param.ParameterHelper; import org.locationtech.geowave.analytic.param.PartitionParameters; import org.locationtech.geowave.analytic.param.PartitionParameters.Partition; import org.locationtech.geowave.analytic.partitioner.OrthodromicDistancePartitioner; import org.locationtech.geowave.analytic.partitioner.Partitioner; import org.locationtech.geowave.analytic.partitioner.Partitioner.PartitionData; import org.locationtech.geowave.analytic.partitioner.Partitioner.PartitionDataCallback; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.mapreduce.HadoopWritableSerializationTool; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.opengis.feature.simple.SimpleFeature; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.primitives.UnsignedBytes; /** * Find the nearest neighbors to a each item. * *

The solution represented here partitions the data using a partitioner. The nearest neighbors * are inspected within those partitions. Each partition is processed in memory. If the partitioner * is agnostic to density, then the number of nearest neighbors inspected in a partition may exceed * memory. Selecting the appropriate partitioning is critical. It may be best to work bottom up, * partitioning at a finer grain and iterating through larger partitions. * *

The reducer has four extension points: * * *

(1) createSetForNeighbors() create a set for primary and secondary neighbor lists. The set * implementation can control the amount of memory used. The algorithm loads the primary and * secondary sets before performing the neighbor analysis. An implementer can constrain the set * size, removing items not considered relevant. *

(2) createSummary() permits extensions to create an summary object for the entire * partition *

(3) processNeighbors() permits extensions to process the neighbor list for each primary * item and update the summary object *

(4) processSummary() permits the reducer to produce an output from the summary object * *

* Properties: * "NNMapReduce.Partition.PartitionerClass" -> {@link * org.locationtech.geowave.analytic.partitioner.Partitioner} *

"NNMapReduce.Common.DistanceFunctionClass" -> Used to determine distance to between simple * features {@link org.locationtech.geowave.analytic.distance.DistanceFn} *

"NNMapReduce.Partition.PartitionerClass" -> {@link * org.locationtech.geowave.analytic.partitioner.Partitioner} *

"NNMapReduce.Partition.MaxMemberSelection" -> Maximum number of neighbors (pick the top K * closest, where this variable is K) (integer) *

"NNMapReduce.Partition.PartitionDistance" -> Maximum distance between item and its * neighbors. (double) * */ public class NNMapReduce { protected static final Logger LOGGER = LoggerFactory.getLogger(NNMapReduce.class); /** Nearest neighbors...take one */ public static class NNMapper extends Mapper { protected Partitioner partitioner; protected HadoopWritableSerializationTool serializationTool; protected final AdapterWithObjectWritable outputValue = new AdapterWithObjectWritable(); protected final PartitionDataWritable partitionDataWritable = new PartitionDataWritable(); @Override protected void map( final GeoWaveInputKey key, final Object value, final Mapper.Context context) throws IOException, InterruptedException { @SuppressWarnings("unchecked") final T unwrappedValue = (T) ((value instanceof ObjectWritable) ? serializationTool.fromWritable(key.getInternalAdapterId(), (ObjectWritable) value) : value); try { partitioner.partition(unwrappedValue, new PartitionDataCallback() { @Override public void partitionWith(final PartitionData partitionData) throws Exception { outputValue.setInternalAdapterId(key.getInternalAdapterId()); AdapterWithObjectWritable.fillWritableWithAdapter( serializationTool, outputValue, key.getInternalAdapterId(), key.getDataId(), unwrappedValue); partitionDataWritable.setPartitionData(partitionData); context.write(partitionDataWritable, outputValue); } }); } catch (final IOException e) { throw e; } catch (final Exception e) { throw new IOException(e); } } @SuppressWarnings("unchecked") @Override protected void setup( final Mapper.Context context) throws IOException, InterruptedException { super.setup(context); final ScopedJobConfiguration config = new ScopedJobConfiguration(context.getConfiguration(), NNMapReduce.class, LOGGER); serializationTool = new HadoopWritableSerializationTool(context); try { partitioner = config.getInstance( PartitionParameters.Partition.PARTITIONER_CLASS, Partitioner.class, OrthodromicDistancePartitioner.class); partitioner.initialize(context, NNMapReduce.class); } catch (final Exception e1) { throw new IOException(e1); } } } public abstract static class NNReducer extends Reducer { protected HadoopWritableSerializationTool serializationTool; protected DistanceFn distanceFn; protected double maxDistance = 1.0; protected int maxNeighbors = Integer.MAX_VALUE; protected Partitioner partitioner; protected TypeConverter typeConverter = new TypeConverter() { @SuppressWarnings("unchecked") @Override public VALUEIN convert(final ByteArray id, final Object o) { return (VALUEIN) o; } }; protected DistanceProfileGenerateFn distanceProfileFn = new LocalDistanceProfileGenerateFn(); @Override protected void reduce( final PartitionDataWritable key, final Iterable values, final Reducer.Context context) throws IOException, InterruptedException { final NNProcessor processor = new NNProcessor<>( partitioner, typeConverter, distanceProfileFn, maxDistance, key.partitionData); processor.setUpperBoundPerPartition(maxNeighbors); final PARTITION_SUMMARY summary = createSummary(); for (final AdapterWithObjectWritable inputValue : values) { final Object value = AdapterWithObjectWritable.fromWritableWithAdapter(serializationTool, inputValue); processor.add(inputValue.getDataId(), key.partitionData.isPrimary(), value); } preprocess(context, processor, summary); processor.process(this.createNeighborsListFactory(summary), new CompleteNotifier() { @Override public void complete( final ByteArray id, final VALUEIN value, final NeighborList primaryList) throws IOException, InterruptedException { context.progress(); processNeighbors(key.partitionData, id, value, primaryList, context, summary); processor.remove(id); } }); processSummary(key.partitionData, summary, context); } public NeighborListFactory createNeighborsListFactory( final PARTITION_SUMMARY summary) { return new DefaultNeighborList.DefaultNeighborListFactory<>(); } protected void preprocess( final Reducer.Context context, final NNProcessor processor, final PARTITION_SUMMARY summary) throws IOException, InterruptedException {} /** @return an object that represents a summary of the neighbors processed */ protected abstract PARTITION_SUMMARY createSummary(); /** * Allow extended classes to do some final processing for the partition. */ protected abstract void processSummary( PartitionData partitionData, PARTITION_SUMMARY summary, Reducer.Context context) throws IOException, InterruptedException; /** allow the extending classes to return sets with constraints and management algorithms */ protected Set createSetForNeighbors(final boolean isSetForPrimary) { return new HashSet<>(); } protected abstract void processNeighbors( PartitionData partitionData, ByteArray primaryId, VALUEIN primary, NeighborList neighbors, Reducer.Context context, PARTITION_SUMMARY summary) throws IOException, InterruptedException; @SuppressWarnings("unchecked") @Override protected void setup( final Reducer.Context context) throws IOException, InterruptedException { final ScopedJobConfiguration config = new ScopedJobConfiguration( context.getConfiguration(), NNMapReduce.class, NNMapReduce.LOGGER); serializationTool = new HadoopWritableSerializationTool(context); try { distanceFn = config.getInstance( CommonParameters.Common.DISTANCE_FUNCTION_CLASS, DistanceFn.class, FeatureGeometryDistanceFn.class); } catch (InstantiationException | IllegalAccessException e) { throw new IOException(e); } maxDistance = config.getDouble(PartitionParameters.Partition.MAX_DISTANCE, 1.0); try { LOGGER.info("Using secondary partitioning"); partitioner = config.getInstance( PartitionParameters.Partition.SECONDARY_PARTITIONER_CLASS, Partitioner.class, PassthruPartitioner.class); ((ParameterHelper) Partition.PARTITION_PRECISION.getHelper()).setValue( context.getConfiguration(), NNMapReduce.class, new Double(1.0)); partitioner.initialize(context, NNMapReduce.class); } catch (final Exception e1) { throw new IOException(e1); } maxNeighbors = config.getInt( PartitionParameters.Partition.MAX_MEMBER_SELECTION, NNProcessor.DEFAULT_UPPER_BOUND_PARTIION_SIZE); LOGGER.info("Maximum Neighbors = {}", maxNeighbors); } protected class LocalDistanceProfileGenerateFn implements DistanceProfileGenerateFn { // for GC concerns in the default NN case DistanceProfile singleNotThreadSafeImage = new DistanceProfile<>(); @Override public DistanceProfile computeProfile(final VALUEIN item1, final VALUEIN item2) { singleNotThreadSafeImage.setDistance(distanceFn.measure(item1, item2)); return singleNotThreadSafeImage; } } } public static class NNSimpleFeatureIDOutputReducer extends NNReducer { final Text primaryText = new Text(); final Text neighborsText = new Text(); final byte[] sepBytes = new byte[] {0x2c}; @Override protected void processNeighbors( final PartitionData partitionData, final ByteArray primaryId, final SimpleFeature primary, final NeighborList neighbors, final Reducer.Context context, final Boolean summary) throws IOException, InterruptedException { if ((neighbors == null) || (neighbors.size() == 0)) { return; } primaryText.clear(); neighborsText.clear(); byte[] utfBytes; try { utfBytes = primary.getID().getBytes("UTF-8"); primaryText.append(utfBytes, 0, utfBytes.length); for (final Map.Entry neighbor : neighbors) { if (neighborsText.getLength() > 0) { neighborsText.append(sepBytes, 0, sepBytes.length); } utfBytes = neighbor.getValue().getID().getBytes("UTF-8"); neighborsText.append(utfBytes, 0, utfBytes.length); } context.write(primaryText, neighborsText); } catch (final UnsupportedEncodingException e) { throw new RuntimeException("UTF-8 Encoding invalid for Simople feature ID", e); } } @Override protected Boolean createSummary() { return Boolean.TRUE; } @Override protected void processSummary( final PartitionData partitionData, final Boolean summary, final org.apache.hadoop.mapreduce.Reducer.Context context) { // do nothing } } public static class PartitionDataWritable implements Writable, WritableComparable { protected PartitionData partitionData; public PartitionDataWritable() {} protected void setPartitionData(final PartitionData partitionData) { this.partitionData = partitionData; } public PartitionData getPartitionData() { return partitionData; } public PartitionDataWritable(final PartitionData partitionData) { this.partitionData = partitionData; } @Override public void readFields(final DataInput input) throws IOException { partitionData = new PartitionData(); partitionData.readFields(input); } @Override public void write(final DataOutput output) throws IOException { partitionData.write(output); } @Override public int compareTo(final PartitionDataWritable o) { final int val = UnsignedBytes.lexicographicalComparator().compare( partitionData.getCompositeKey().getBytes(), o.partitionData.getCompositeKey().getBytes()); if ((val == 0) && (o.partitionData.getGroupId() != null) && (partitionData.getGroupId() != null)) { return UnsignedBytes.lexicographicalComparator().compare( partitionData.getGroupId().getBytes(), o.partitionData.getGroupId().getBytes()); } return val; } @Override public String toString() { return partitionData.toString(); } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((partitionData == null) ? 0 : partitionData.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final PartitionDataWritable other = (PartitionDataWritable) obj; if (partitionData == null) { if (other.partitionData != null) { return false; } } else if (!partitionData.equals(other.partitionData)) { return false; } return true; } } public static class PassthruPartitioner implements Partitioner { /** */ private static final long serialVersionUID = -1022316020113365561L; @Override public void initialize(final JobContext context, final Class scope) throws IOException {} private static final List FixedPartition = Collections.singletonList( new PartitionData(new ByteArray(new byte[] {}), new ByteArray("1"), true)); @Override public List getCubeIdentifiers(final T entry) { return FixedPartition; } @Override public void partition(final T entry, final PartitionDataCallback callback) throws Exception { callback.partitionWith(FixedPartition.get(0)); } @Override public Collection> getParameters() { return Collections.emptyList(); } @Override public void setup( final PropertyManagement runTimeProperties, final Class scope, final Configuration configuration) {} } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/operations/AnalyticOperationCLIProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.operations; import org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi; public class AnalyticOperationCLIProvider implements CLIOperationProviderSpi { private static final Class[] OPERATIONS = new Class[] { AnalyticSection.class, DBScanCommand.class, KdeCommand.class, KmeansJumpCommand.class, KmeansParallelCommand.class, NearestNeighborCommand.class}; @Override public Class[] getOperations() { return OPERATIONS; } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/operations/AnalyticSection.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.operations; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.operations.GeoWaveTopLevelSection; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "analytic", parentOperation = GeoWaveTopLevelSection.class) @Parameters(commandDescription = "Commands to run analytics on GeoWave data sets") public class AnalyticSection extends DefaultOperation { } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/operations/DBScanCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.operations; import java.io.File; import java.util.ArrayList; import java.util.List; import java.util.Properties; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.mapreduce.dbscan.DBScanIterationsJobRunner; import org.locationtech.geowave.analytic.mapreduce.operations.options.CommonOptions; import org.locationtech.geowave.analytic.mapreduce.operations.options.DBScanOptions; import org.locationtech.geowave.analytic.mapreduce.operations.options.PropertyManagementConverter; import org.locationtech.geowave.analytic.param.ExtractParameters.Extract; import org.locationtech.geowave.analytic.param.StoreParameters; import org.locationtech.geowave.analytic.store.PersistableStore; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.mapreduce.operations.ConfigHDFSCommand; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import com.beust.jcommander.ParametersDelegate; @GeowaveOperation(name = "dbscan", parentOperation = AnalyticSection.class) @Parameters(commandDescription = "Density based scanner") public class DBScanCommand extends ServiceEnabledCommand { @Parameter(description = "") private List parameters = new ArrayList<>(); @ParametersDelegate private CommonOptions commonOptions = new CommonOptions(); @ParametersDelegate private DBScanOptions dbScanOptions = new DBScanOptions(); private DataStorePluginOptions inputStoreOptions = null; @Override public void execute(final OperationParams params) throws Exception { // Ensure we have all the required arguments if (parameters.size() != 1) { throw new ParameterException("Requires arguments: "); } computeResults(params); } @Override public boolean runAsync() { return true; } public List getParameters() { return parameters; } public void setParameters(final String storeName) { parameters = new ArrayList<>(); parameters.add(storeName); } public CommonOptions getCommonOptions() { return commonOptions; } public void setCommonOptions(final CommonOptions commonOptions) { this.commonOptions = commonOptions; } public DBScanOptions getDbScanOptions() { return dbScanOptions; } public void setDbScanOptions(final DBScanOptions dbScanOptions) { this.dbScanOptions = dbScanOptions; } public DataStorePluginOptions getInputStoreOptions() { return inputStoreOptions; } @Override public Void computeResults(final OperationParams params) throws Exception { final String inputStoreName = parameters.get(0); // Config file final File configFile = getGeoWaveConfigFile(params); if (commonOptions.getMapReduceHdfsHostPort() == null) { final Properties configProperties = ConfigOptions.loadProperties(configFile); final String hdfsFSUrl = ConfigHDFSCommand.getHdfsUrl(configProperties); commonOptions.setMapReduceHdfsHostPort(hdfsFSUrl); } // Attempt to load store. inputStoreOptions = CLIUtils.loadStore(inputStoreName, configFile, params.getConsole()); // Save a reference to the store in the property management. final PersistableStore persistedStore = new PersistableStore(inputStoreOptions); final PropertyManagement properties = new PropertyManagement(); properties.store(StoreParameters.StoreParam.INPUT_STORE, persistedStore); // Convert properties from DBScanOptions and CommonOptions final PropertyManagementConverter converter = new PropertyManagementConverter(properties); converter.readProperties(commonOptions); converter.readProperties(dbScanOptions); properties.store(Extract.QUERY, commonOptions.buildQuery()); final DBScanIterationsJobRunner runner = new DBScanIterationsJobRunner(); final int status = runner.run(properties); if (status != 0) { throw new RuntimeException("Failed to execute: " + status); } return null; } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/operations/KdeCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.operations; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.locationtech.geowave.analytic.mapreduce.kde.KDECommandLineOptions; import org.locationtech.geowave.analytic.mapreduce.kde.KDEJobRunner; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.util.DataStoreUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import com.beust.jcommander.ParametersDelegate; @GeowaveOperation(name = "kde", parentOperation = AnalyticSection.class) @Parameters(commandDescription = "Kernel density estimate") public class KdeCommand extends ServiceEnabledCommand { private static final Logger LOGGER = LoggerFactory.getLogger(KdeCommand.class); @Parameter(description = " ") private List parameters = new ArrayList<>(); @ParametersDelegate private KDECommandLineOptions kdeOptions = new KDECommandLineOptions(); private DataStorePluginOptions inputStoreOptions = null; private DataStorePluginOptions outputStoreOptions = null; @Override public void execute(final OperationParams params) throws Exception { computeResults(params); } @Override public boolean runAsync() { return true; } public KDEJobRunner createRunner(final OperationParams params) throws IOException { // Ensure we have all the required arguments if (parameters.size() != 2) { throw new ParameterException("Requires arguments: "); } final String inputStore = parameters.get(0); final String outputStore = parameters.get(1); // Config file final File configFile = getGeoWaveConfigFile(params); Index outputPrimaryIndex = null; // Attempt to load input store. inputStoreOptions = CLIUtils.loadStore(inputStore, configFile, params.getConsole()); // Attempt to load output store. outputStoreOptions = CLIUtils.loadStore(outputStore, configFile, params.getConsole()); if ((kdeOptions.getOutputIndex() != null) && !kdeOptions.getOutputIndex().trim().isEmpty()) { final String outputIndex = kdeOptions.getOutputIndex(); // Load the Indices final List outputIndices = DataStoreUtils.loadIndices(outputStoreOptions.createIndexStore(), outputIndex); for (final Index primaryIndex : outputIndices) { if (SpatialDimensionalityTypeProvider.isSpatial(primaryIndex)) { outputPrimaryIndex = primaryIndex; } else { LOGGER.error( "spatial temporal is not supported for output index. Only spatial index is supported."); throw new IOException( "spatial temporal is not supported for output index. Only spatial index is supported."); } } } final KDEJobRunner runner = new KDEJobRunner( kdeOptions, inputStoreOptions, outputStoreOptions, configFile, outputPrimaryIndex); return runner; } public List getParameters() { return parameters; } public void setParameters(final String inputStore, final String outputStore) { parameters = new ArrayList<>(); parameters.add(inputStore); parameters.add(outputStore); } public KDECommandLineOptions getKdeOptions() { return kdeOptions; } public void setKdeOptions(final KDECommandLineOptions kdeOptions) { this.kdeOptions = kdeOptions; } public DataStorePluginOptions getInputStoreOptions() { return inputStoreOptions; } public DataStorePluginOptions getOutputStoreOptions() { return outputStoreOptions; } @Override public Void computeResults(final OperationParams params) throws Exception { final KDEJobRunner runner = createRunner(params); final int status = runner.runJob(); if (status != 0) { throw new RuntimeException("Failed to execute: " + status); } return null; } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/operations/KmeansJumpCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.operations; import java.io.File; import java.util.ArrayList; import java.util.List; import java.util.Properties; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.mapreduce.clustering.runner.MultiLevelJumpKMeansClusteringJobRunner; import org.locationtech.geowave.analytic.mapreduce.operations.options.CommonOptions; import org.locationtech.geowave.analytic.mapreduce.operations.options.KMeansCommonOptions; import org.locationtech.geowave.analytic.mapreduce.operations.options.KMeansJumpOptions; import org.locationtech.geowave.analytic.mapreduce.operations.options.PropertyManagementConverter; import org.locationtech.geowave.analytic.param.ExtractParameters.Extract; import org.locationtech.geowave.analytic.param.StoreParameters; import org.locationtech.geowave.analytic.store.PersistableStore; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.Command; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.mapreduce.operations.ConfigHDFSCommand; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import com.beust.jcommander.ParametersDelegate; @GeowaveOperation(name = "kmeansjump", parentOperation = AnalyticSection.class) @Parameters(commandDescription = "KMeans clustering using jump method") public class KmeansJumpCommand extends DefaultOperation implements Command { @Parameter(description = "") private List parameters = new ArrayList<>(); @ParametersDelegate private CommonOptions commonOptions = new CommonOptions(); @ParametersDelegate private KMeansCommonOptions kmeansCommonOptions = new KMeansCommonOptions(); @ParametersDelegate private KMeansJumpOptions kmeansJumpOptions = new KMeansJumpOptions(); private DataStorePluginOptions inputStoreOptions = null; @Override public void execute(final OperationParams params) throws Exception { // Ensure we have all the required arguments if (parameters.size() != 1) { throw new ParameterException("Requires arguments: "); } final String inputStoreName = parameters.get(0); // Config file final File configFile = getGeoWaveConfigFile(params); if (commonOptions.getMapReduceHdfsHostPort() == null) { final Properties configProperties = ConfigOptions.loadProperties(configFile); final String hdfsFSUrl = ConfigHDFSCommand.getHdfsUrl(configProperties); commonOptions.setMapReduceHdfsHostPort(hdfsFSUrl); } // Attempt to load store. inputStoreOptions = CLIUtils.loadStore(inputStoreName, configFile, params.getConsole()); // Save a reference to the store in the property management. final PersistableStore persistedStore = new PersistableStore(inputStoreOptions); final PropertyManagement properties = new PropertyManagement(); properties.store(StoreParameters.StoreParam.INPUT_STORE, persistedStore); // Convert properties from DBScanOptions and CommonOptions final PropertyManagementConverter converter = new PropertyManagementConverter(properties); converter.readProperties(commonOptions); converter.readProperties(kmeansCommonOptions); converter.readProperties(kmeansJumpOptions); properties.store(Extract.QUERY, commonOptions.buildQuery()); final MultiLevelJumpKMeansClusteringJobRunner runner = new MultiLevelJumpKMeansClusteringJobRunner(); final int status = runner.run(properties); if (status != 0) { throw new RuntimeException("Failed to execute: " + status); } } public List getParameters() { return parameters; } public void setParameters(final String storeName) { parameters = new ArrayList<>(); parameters.add(storeName); } public CommonOptions getCommonOptions() { return commonOptions; } public void setCommonOptions(final CommonOptions commonOptions) { this.commonOptions = commonOptions; } public KMeansCommonOptions getKmeansCommonOptions() { return kmeansCommonOptions; } public void setKmeansCommonOptions(final KMeansCommonOptions kmeansCommonOptions) { this.kmeansCommonOptions = kmeansCommonOptions; } public KMeansJumpOptions getKmeansJumpOptions() { return kmeansJumpOptions; } public void setKmeansJumpOptions(final KMeansJumpOptions kmeansJumpOptions) { this.kmeansJumpOptions = kmeansJumpOptions; } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/operations/KmeansParallelCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.operations; import java.io.File; import java.util.ArrayList; import java.util.List; import java.util.Properties; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.mapreduce.clustering.runner.MultiLevelKMeansClusteringJobRunner; import org.locationtech.geowave.analytic.mapreduce.operations.options.CommonOptions; import org.locationtech.geowave.analytic.mapreduce.operations.options.KMeansCommonOptions; import org.locationtech.geowave.analytic.mapreduce.operations.options.KMeansParallelOptions; import org.locationtech.geowave.analytic.mapreduce.operations.options.PropertyManagementConverter; import org.locationtech.geowave.analytic.param.ExtractParameters.Extract; import org.locationtech.geowave.analytic.param.StoreParameters; import org.locationtech.geowave.analytic.store.PersistableStore; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.Command; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.mapreduce.operations.ConfigHDFSCommand; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import com.beust.jcommander.ParametersDelegate; @GeowaveOperation(name = "kmeansparallel", parentOperation = AnalyticSection.class) @Parameters(commandDescription = "KMeans parallel clustering") public class KmeansParallelCommand extends DefaultOperation implements Command { @Parameter(description = "") private List parameters = new ArrayList<>(); @ParametersDelegate private CommonOptions commonOptions = new CommonOptions(); @ParametersDelegate private KMeansCommonOptions kmeansCommonOptions = new KMeansCommonOptions(); @ParametersDelegate private KMeansParallelOptions kmeansParallelOptions = new KMeansParallelOptions(); DataStorePluginOptions inputStoreOptions = null; @Override public void execute(final OperationParams params) throws Exception { // Ensure we have all the required arguments if (parameters.size() != 1) { throw new ParameterException("Requires arguments: "); } final String inputStoreName = parameters.get(0); // Config file final File configFile = getGeoWaveConfigFile(params); if (commonOptions.getMapReduceHdfsHostPort() == null) { final Properties configProperties = ConfigOptions.loadProperties(configFile); final String hdfsFSUrl = ConfigHDFSCommand.getHdfsUrl(configProperties); commonOptions.setMapReduceHdfsHostPort(hdfsFSUrl); } // Attempt to load store. inputStoreOptions = CLIUtils.loadStore(inputStoreName, configFile, params.getConsole()); // Save a reference to the store in the property management. final PersistableStore persistedStore = new PersistableStore(inputStoreOptions); final PropertyManagement properties = new PropertyManagement(); properties.store(StoreParameters.StoreParam.INPUT_STORE, persistedStore); // Convert properties from DBScanOptions and CommonOptions final PropertyManagementConverter converter = new PropertyManagementConverter(properties); converter.readProperties(commonOptions); converter.readProperties(kmeansCommonOptions); converter.readProperties(kmeansParallelOptions); properties.store(Extract.QUERY, commonOptions.buildQuery()); final MultiLevelKMeansClusteringJobRunner runner = new MultiLevelKMeansClusteringJobRunner(); final int status = runner.run(properties); if (status != 0) { throw new RuntimeException("Failed to execute: " + status); } } public List getParameters() { return parameters; } public void setParameters(final String storeName) { parameters = new ArrayList<>(); parameters.add(storeName); } public CommonOptions getCommonOptions() { return commonOptions; } public void setCommonOptions(final CommonOptions commonOptions) { this.commonOptions = commonOptions; } public KMeansCommonOptions getKmeansCommonOptions() { return kmeansCommonOptions; } public void setKmeansCommonOptions(final KMeansCommonOptions kmeansCommonOptions) { this.kmeansCommonOptions = kmeansCommonOptions; } public DataStorePluginOptions getInputStoreOptions() { return inputStoreOptions; } public KMeansParallelOptions getKmeansParallelOptions() { return kmeansParallelOptions; } public void setKmeansParallelOptions(final KMeansParallelOptions kmeansParallelOptions) { this.kmeansParallelOptions = kmeansParallelOptions; } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/operations/NearestNeighborCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.operations; import java.io.File; import java.util.ArrayList; import java.util.List; import java.util.Properties; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.mapreduce.nn.GeoWaveExtractNNJobRunner; import org.locationtech.geowave.analytic.mapreduce.operations.options.CommonOptions; import org.locationtech.geowave.analytic.mapreduce.operations.options.NearestNeighborOptions; import org.locationtech.geowave.analytic.mapreduce.operations.options.PropertyManagementConverter; import org.locationtech.geowave.analytic.param.ExtractParameters.Extract; import org.locationtech.geowave.analytic.param.StoreParameters; import org.locationtech.geowave.analytic.store.PersistableStore; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.mapreduce.operations.ConfigHDFSCommand; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import com.beust.jcommander.ParametersDelegate; @GeowaveOperation(name = "nn", parentOperation = AnalyticSection.class) @Parameters(commandDescription = "Nearest neighbors") public class NearestNeighborCommand extends ServiceEnabledCommand { @Parameter(description = "") private List parameters = new ArrayList<>(); @ParametersDelegate private CommonOptions commonOptions = new CommonOptions(); @ParametersDelegate private NearestNeighborOptions nnOptions = new NearestNeighborOptions(); private DataStorePluginOptions inputStoreOptions = null; @Override public void execute(final OperationParams params) throws Exception { computeResults(params); } @Override public boolean runAsync() { return true; } public List getParameters() { return parameters; } public void setParameters(final String storeName) { parameters = new ArrayList<>(); parameters.add(storeName); } public CommonOptions getCommonOptions() { return commonOptions; } public void setCommonOptions(final CommonOptions commonOptions) { this.commonOptions = commonOptions; } public NearestNeighborOptions getNnOptions() { return nnOptions; } public void setNnOptions(final NearestNeighborOptions nnOptions) { this.nnOptions = nnOptions; } public DataStorePluginOptions getInputStoreOptions() { return inputStoreOptions; } @Override public Void computeResults(final OperationParams params) throws Exception { // Ensure we have all the required arguments if ((parameters.size() != 1) && (inputStoreOptions == null)) { throw new ParameterException("Requires arguments: "); } final String inputStoreName = parameters.get(0); // Config file final File configFile = getGeoWaveConfigFile(params); if (commonOptions.getMapReduceHdfsHostPort() == null) { final Properties configProperties = ConfigOptions.loadProperties(configFile); final String hdfsFSUrl = ConfigHDFSCommand.getHdfsUrl(configProperties); commonOptions.setMapReduceHdfsHostPort(hdfsFSUrl); } // Attempt to load store. inputStoreOptions = CLIUtils.loadStore(inputStoreName, getGeoWaveConfigFile(params), params.getConsole()); // Save a reference to the store in the property management. final PersistableStore persistedStore = new PersistableStore(inputStoreOptions); final PropertyManagement properties = new PropertyManagement(); properties.store(StoreParameters.StoreParam.INPUT_STORE, persistedStore); // Convert properties from DBScanOptions and CommonOptions final PropertyManagementConverter converter = new PropertyManagementConverter(properties); converter.readProperties(commonOptions); converter.readProperties(nnOptions); properties.store(Extract.QUERY, commonOptions.buildQuery()); final GeoWaveExtractNNJobRunner runner = new GeoWaveExtractNNJobRunner(); final int status = runner.run(properties); if (status != 0) { throw new RuntimeException("Failed to execute: " + status); } return null; } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/operations/options/CommonOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.operations.options; import org.locationtech.geowave.analytic.param.CommonParameters; import org.locationtech.geowave.analytic.param.ExtractParameters; import org.locationtech.geowave.analytic.param.InputParameters; import org.locationtech.geowave.analytic.param.MapReduceParameters; import org.locationtech.geowave.analytic.param.OutputParameters; import org.locationtech.geowave.analytic.param.annotations.CommonParameter; import org.locationtech.geowave.analytic.param.annotations.ExtractParameter; import org.locationtech.geowave.analytic.param.annotations.InputParameter; import org.locationtech.geowave.analytic.param.annotations.MapReduceParameter; import org.locationtech.geowave.analytic.param.annotations.OutputParameter; import org.locationtech.geowave.core.cli.annotations.PrefixParameter; import org.locationtech.geowave.core.store.api.Query; import org.locationtech.geowave.core.store.api.QueryBuilder; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParametersDelegate; public class CommonOptions { @MapReduceParameter(MapReduceParameters.MRConfig.CONFIG_FILE) @Parameter(names = {"-conf", "--mapReduceConfigFile"}, description = "MapReduce Configuration") private String mapReduceConfigFile; @MapReduceParameter(MapReduceParameters.MRConfig.HDFS_BASE_DIR) @Parameter( names = {"-hdfsbase", "--mapReduceHdfsBaseDir"}, required = true, description = "Fully qualified path to the base directory in hdfs") private String mapReduceHdfsBaseDir; @MapReduceParameter(MapReduceParameters.MRConfig.HDFS_HOST_PORT) @Parameter( names = {"-hdfs", "--mapReduceHdfsHostPort"}, description = "HDFS hostname and port in the format hostname:port") private String mapReduceHdfsHostPort; @MapReduceParameter(MapReduceParameters.MRConfig.JOBTRACKER_HOST_PORT) @Parameter( names = {"-jobtracker", "--mapReduceJobtrackerHostPort"}, description = "[REQUIRED (or resourceman)] Hadoop job tracker hostname and port in the format hostname:port") private String mapReduceJobtrackerHostPort; @MapReduceParameter(MapReduceParameters.MRConfig.YARN_RESOURCE_MANAGER) @Parameter( names = {"-resourceman", "--mapReduceYarnResourceManager"}, description = "[REQUIRED (or jobtracker)] Yarn resource manager hostname and port in the format hostname:port") private String mapReduceYarnResourceManager; @CommonParameter(CommonParameters.Common.DISTANCE_FUNCTION_CLASS) @Parameter( names = {"-cdf", "--commonDistanceFunctionClass"}, description = "Distance Function Class implements org.locationtech.geowave.analytics.distance.DistanceFn") private String commonDistanceFunctionClass; @ParametersDelegate @PrefixParameter(prefix = "query") private QueryOptionsCommand queryOptions = new QueryOptionsCommand(); @ExtractParameter(ExtractParameters.Extract.MAX_INPUT_SPLIT) @Parameter( names = {"-emx", "--extractMaxInputSplit"}, required = true, description = "Maximum hdfs input split size") private String extractMaxInputSplit; @ExtractParameter(ExtractParameters.Extract.MIN_INPUT_SPLIT) @Parameter( names = {"-emn", "--extractMinInputSplit"}, required = true, description = "Minimum hdfs input split size") private String extractMinInputSplit; @ExtractParameter(ExtractParameters.Extract.QUERY) @Parameter(names = {"-eq", "--extractQuery"}, description = "Query") private String extractQuery; @OutputParameter(OutputParameters.Output.OUTPUT_FORMAT) @Parameter(names = {"-ofc", "--outputOutputFormat"}, description = "Output Format Class") private String outputOutputFormat; @InputParameter(InputParameters.Input.INPUT_FORMAT) @Parameter(names = {"-ifc", "--inputFormatClass"}, description = "Input Format Class") private String inputFormatClass; @InputParameter(InputParameters.Input.HDFS_INPUT_PATH) @Parameter(names = {"-iip", "--inputHdfsPath"}, hidden = true, description = "Input Path") private String inputHdfsPath; @OutputParameter(OutputParameters.Output.REDUCER_COUNT) @Parameter( names = {"-orc", "--outputReducerCount"}, description = "Number of Reducers For Output") private String outputReducerCount; public String getCommonDistanceFunctionClass() { return commonDistanceFunctionClass; } public void setCommonDistanceFunctionClass(final String commonDistanceFunctionClass) { this.commonDistanceFunctionClass = commonDistanceFunctionClass; } public QueryOptionsCommand getQueryOptions() { return queryOptions; } public void setQueryOptions(final QueryOptionsCommand extractQueryOptions) { queryOptions = extractQueryOptions; } public String getExtractMaxInputSplit() { return extractMaxInputSplit; } public void setExtractMaxInputSplit(final String extractMaxInputSplit) { this.extractMaxInputSplit = extractMaxInputSplit; } public String getExtractMinInputSplit() { return extractMinInputSplit; } public void setExtractMinInputSplit(final String extractMinInputSplit) { this.extractMinInputSplit = extractMinInputSplit; } public String getExtractQuery() { return extractQuery; } public void setExtractQuery(final String extractQuery) { this.extractQuery = extractQuery; } public String getOutputOutputFormat() { return outputOutputFormat; } public void setOutputOutputFormat(final String outputOutputFormat) { this.outputOutputFormat = outputOutputFormat; } public String getOutputReducerCount() { return outputReducerCount; } public void setOutputReducerCount(final String outputReducerCount) { this.outputReducerCount = outputReducerCount; } public String getInputFormatClass() { return inputFormatClass; } public void setInputFormatClass(final String inputFormatClass) { this.inputFormatClass = inputFormatClass; } public String getInputHdfsPath() { return inputHdfsPath; } public void setInputHdfsPath(final String inputHdfsPath) { this.inputHdfsPath = inputHdfsPath; } /** * Build the query options from the command line arguments. */ public Query buildQuery() { final QueryBuilder bldr = QueryBuilder.newBuilder(); if ((queryOptions.getTypeNames() != null) && (queryOptions.getTypeNames().length > 0)) { bldr.setTypeNames(queryOptions.getTypeNames()); } if (queryOptions.getAuthorizations() != null) { bldr.setAuthorizations(queryOptions.getAuthorizations()); } if (queryOptions.getIndexName() != null) { bldr.indexName(queryOptions.getIndexName()); } return bldr.build(); } public String getMapReduceConfigFile() { return mapReduceConfigFile; } public void setMapReduceConfigFile(final String mapReduceConfigFile) { this.mapReduceConfigFile = mapReduceConfigFile; } public String getMapReduceHdfsBaseDir() { return mapReduceHdfsBaseDir; } public void setMapReduceHdfsBaseDir(final String mapReduceHdfsBaseDir) { this.mapReduceHdfsBaseDir = mapReduceHdfsBaseDir; } public String getMapReduceHdfsHostPort() { return mapReduceHdfsHostPort; } public void setMapReduceHdfsHostPort(final String mapReduceHdfsHostPort) { this.mapReduceHdfsHostPort = mapReduceHdfsHostPort; } public String getMapReduceJobtrackerHostPort() { return mapReduceJobtrackerHostPort; } public void setMapReduceJobtrackerHostPort(final String mapReduceJobtrackerHostPort) { this.mapReduceJobtrackerHostPort = mapReduceJobtrackerHostPort; } public String getMapReduceYarnResourceManager() { return mapReduceYarnResourceManager; } public void setMapReduceYarnResourceManager(final String mapReduceYarnResourceManager) { this.mapReduceYarnResourceManager = mapReduceYarnResourceManager; } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/operations/options/DBScanOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.operations.options; import org.locationtech.geowave.analytic.param.ClusteringParameters; import org.locationtech.geowave.analytic.param.GlobalParameters; import org.locationtech.geowave.analytic.param.HullParameters; import org.locationtech.geowave.analytic.param.OutputParameters; import org.locationtech.geowave.analytic.param.PartitionParameters; import org.locationtech.geowave.analytic.param.annotations.ClusteringParameter; import org.locationtech.geowave.analytic.param.annotations.GlobalParameter; import org.locationtech.geowave.analytic.param.annotations.HullParameter; import org.locationtech.geowave.analytic.param.annotations.OutputParameter; import org.locationtech.geowave.analytic.param.annotations.PartitionParameter; import com.beust.jcommander.Parameter; public class DBScanOptions { @OutputParameter(OutputParameters.Output.HDFS_OUTPUT_PATH) @Parameter(names = {"-oop", "--outputHdfsOutputPath"}, description = "Output HDFS File Path") private String outputHdfsOutputPath; @PartitionParameter(PartitionParameters.Partition.DISTANCE_THRESHOLDS) @Parameter( names = {"-pdt", "--partitionDistanceThresholds"}, description = "Comma separated list of distance thresholds, per dimension") private String partitioningDistanceThresholds; @PartitionParameter(PartitionParameters.Partition.GEOMETRIC_DISTANCE_UNIT) @Parameter( names = {"-pdu", "--partitionGeometricDistanceUnit"}, description = "Geometric distance unit (m=meters,km=kilometers, see symbols for javax.units.BaseUnit)") private String partitioningGeometricDistanceUnit; @ClusteringParameter(ClusteringParameters.Clustering.MAX_ITERATIONS) @Parameter( names = {"-cmi", "--clusteringMaxIterations"}, required = true, description = "Maximum number of iterations when finding optimal clusters") private String clusteringMaxIterations; @ClusteringParameter(ClusteringParameters.Clustering.MINIMUM_SIZE) @Parameter( names = {"-cms", "--clusteringMinimumSize"}, required = true, description = "Minimum Cluster Size") private String clusteringMinimumSize; @GlobalParameter(GlobalParameters.Global.BATCH_ID) @Parameter(names = {"-b", "--globalBatchId"}, description = "Batch ID") private String globalBatchId; @HullParameter(HullParameters.Hull.DATA_TYPE_ID) @Parameter(names = {"-hdt", "--hullDataTypeId"}, description = "Data Type ID for a centroid item") private String hullDataTypeId; @HullParameter(HullParameters.Hull.PROJECTION_CLASS) @Parameter( names = {"-hpe", "--hullProjectionClass"}, description = "Class to project on to 2D space. Implements org.locationtech.geowave.analytics.tools.Projection") private String hullProjectionClass; @OutputParameter(OutputParameters.Output.DATA_NAMESPACE_URI) @Parameter( names = {"-ons", "--outputDataNamespaceUri"}, description = "Output namespace for objects that will be written to GeoWave") private String outputDataNamespaceUri; @OutputParameter(OutputParameters.Output.DATA_TYPE_ID) @Parameter( names = {"-odt", "--outputDataTypeId"}, description = "Output Data ID assigned to objects that will be written to GeoWave") private String outputDataTypeId; @OutputParameter(OutputParameters.Output.INDEX_ID) @Parameter( names = {"-oid", "--outputIndexId"}, description = "Output Index ID for objects that will be written to GeoWave") private String outputIndexId; @PartitionParameter(PartitionParameters.Partition.MAX_MEMBER_SELECTION) @Parameter( names = {"-pms", "--partitionMaxMemberSelection"}, description = "Maximum number of members selected from a partition") private String partitionMaxMemberSelection; @PartitionParameter(PartitionParameters.Partition.PARTITIONER_CLASS) @Parameter( names = {"-pc", "--partitionPartitionerClass"}, description = "Index Identifier for Centroids") private String partitionPartitionerClass; @PartitionParameter(PartitionParameters.Partition.PARTITION_DECREASE_RATE) @Parameter( names = {"-pdr", "--partitionPartitionDecreaseRate"}, description = "Rate of decrease for precision(within (0,1])") private String partitionPartitionDecreaseRate; @PartitionParameter(PartitionParameters.Partition.MAX_DISTANCE) @Parameter( names = {"-pmd", "--partitionMaxDistance"}, required = true, description = "Maximum Partition Distance") private String partitionMaxDistance; @PartitionParameter(PartitionParameters.Partition.PARTITION_PRECISION) @Parameter(names = {"-pp", "--partitionPartitionPrecision"}, description = "Partition Precision") private String partitionPartitionPrecision; @PartitionParameter(PartitionParameters.Partition.SECONDARY_PARTITIONER_CLASS) @Parameter( names = {"-psp", "--partitionSecondaryPartitionerClass"}, description = "Perform secondary partitioning with the provided class") private String partitionSecondaryPartitionerClass; public String getPartitioningDistanceThresholds() { return partitioningDistanceThresholds; } public void setPartitioningDistanceThresholds(final String clusteringDistanceThresholds) { partitioningDistanceThresholds = clusteringDistanceThresholds; } public String getPartitioningGeometricDistanceUnit() { return partitioningGeometricDistanceUnit; } public void setPartitioningGeometricDistanceUnit(final String clusteringGeometricDistanceUnit) { partitioningGeometricDistanceUnit = clusteringGeometricDistanceUnit; } public String getClusteringMaxIterations() { return clusteringMaxIterations; } public void setClusteringMaxIterations(final String clusteringMaxIterations) { this.clusteringMaxIterations = clusteringMaxIterations; } public String getClusteringMinimumSize() { return clusteringMinimumSize; } public void setClusteringMinimumSize(final String clusteringMinimumSize) { this.clusteringMinimumSize = clusteringMinimumSize; } public String getGlobalBatchId() { return globalBatchId; } public void setGlobalBatchId(final String globalBatchId) { this.globalBatchId = globalBatchId; } public String getHullDataTypeId() { return hullDataTypeId; } public void setHullDataTypeId(final String hullDataTypeId) { this.hullDataTypeId = hullDataTypeId; } public String getHullProjectionClass() { return hullProjectionClass; } public void setHullProjectionClass(final String hullProjectionClass) { this.hullProjectionClass = hullProjectionClass; } public String getOutputDataNamespaceUri() { return outputDataNamespaceUri; } public void setOutputDataNamespaceUri(final String outputDataNamespaceUri) { this.outputDataNamespaceUri = outputDataNamespaceUri; } public String getOutputDataTypeId() { return outputDataTypeId; } public void setOutputDataTypeId(final String outputDataTypeId) { this.outputDataTypeId = outputDataTypeId; } public String getOutputIndexId() { return outputIndexId; } public void setOutputIndexId(final String outputIndexId) { this.outputIndexId = outputIndexId; } public String getPartitionMaxMemberSelection() { return partitionMaxMemberSelection; } public void setPartitionMaxMemberSelection(final String partitionMaxMemberSelection) { this.partitionMaxMemberSelection = partitionMaxMemberSelection; } public String getPartitionPartitionerClass() { return partitionPartitionerClass; } public void setPartitionPartitionerClass(final String partitionPartitionerClass) { this.partitionPartitionerClass = partitionPartitionerClass; } public String getPartitionPartitionDecreaseRate() { return partitionPartitionDecreaseRate; } public void setPartitionPartitionDecreaseRate(final String partitionPartitionDecreaseRate) { this.partitionPartitionDecreaseRate = partitionPartitionDecreaseRate; } public String getPartitionMaxDistance() { return partitionMaxDistance; } public void setPartitionMaxDistance(final String partitionMaxDistance) { this.partitionMaxDistance = partitionMaxDistance; } public String getPartitionPartitionPrecision() { return partitionPartitionPrecision; } public void setPartitionPartitionPrecision(final String partitionPartitionPrecision) { this.partitionPartitionPrecision = partitionPartitionPrecision; } public String getPartitionSecondaryPartitionerClass() { return partitionSecondaryPartitionerClass; } public void setPartitionSecondaryPartitionerClass( final String partitionSecondaryPartitionerClass) { this.partitionSecondaryPartitionerClass = partitionSecondaryPartitionerClass; } public String getOutputHdfsOutputPath() { return outputHdfsOutputPath; } public void setOutputHdfsOutputPath(final String outputHdfsOutputPath) { this.outputHdfsOutputPath = outputHdfsOutputPath; } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/operations/options/KMeansCommonOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.operations.options; import org.locationtech.geowave.analytic.param.CentroidParameters; import org.locationtech.geowave.analytic.param.ClusteringParameters; import org.locationtech.geowave.analytic.param.CommonParameters; import org.locationtech.geowave.analytic.param.ExtractParameters; import org.locationtech.geowave.analytic.param.GlobalParameters; import org.locationtech.geowave.analytic.param.HullParameters; import org.locationtech.geowave.analytic.param.annotations.CentroidParameter; import org.locationtech.geowave.analytic.param.annotations.ClusteringParameter; import org.locationtech.geowave.analytic.param.annotations.CommonParameter; import org.locationtech.geowave.analytic.param.annotations.ExtractParameter; import org.locationtech.geowave.analytic.param.annotations.GlobalParameter; import org.locationtech.geowave.analytic.param.annotations.HullParameter; import com.beust.jcommander.Parameter; public class KMeansCommonOptions { @CentroidParameter(CentroidParameters.Centroid.EXTRACTOR_CLASS) @Parameter( names = {"-cce", "--centroidExtractorClass"}, description = "Centroid Exractor Class implements org.locationtech.geowave.analytics.extract.CentroidExtractor") private String centroidExtractorClass; @CentroidParameter(CentroidParameters.Centroid.INDEX_NAME) @Parameter(names = {"-cid", "--centroidIndexId"}, description = "Index Identifier for Centroids") private String centroidIndexId; @CentroidParameter(CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS) @Parameter( names = {"-cfc", "--centroidWrapperFactoryClass"}, description = "A factory class that implements org.locationtech.geowave.analytics.tools.AnalyticItemWrapperFactory") private String centroidWrapperFactoryClass; @CentroidParameter(CentroidParameters.Centroid.ZOOM_LEVEL) @Parameter(names = {"-czl", "--centroidZoomLevel"}, description = "Zoom Level Number") private String centroidZoomLevel; @ClusteringParameter(ClusteringParameters.Clustering.CONVERGANCE_TOLERANCE) @Parameter( names = {"-cct", "--clusteringConverganceTolerance"}, description = "Convergence Tolerance") private String clusteringConverganceTolerance; @ClusteringParameter(ClusteringParameters.Clustering.MAX_ITERATIONS) @Parameter( names = {"-cmi", "--clusteringMaxIterations"}, required = true, description = "Maximum number of iterations when finding optimal clusters") private String clusteringMaxIterations; @ClusteringParameter(ClusteringParameters.Clustering.MAX_REDUCER_COUNT) @Parameter( names = {"-crc", "--clusteringMaxReducerCount"}, description = "Maximum Clustering Reducer Count") private String clusteringMaxReducerCount; @ClusteringParameter(ClusteringParameters.Clustering.ZOOM_LEVELS) @Parameter( names = {"-zl", "--clusteringZoomLevels"}, required = true, description = "Number of Zoom Levels to Process") private String clusteringZoomLevels; @CommonParameter(CommonParameters.Common.DIMENSION_EXTRACT_CLASS) @Parameter( names = {"-dde", "--commonDimensionExtractClass"}, description = "Dimension Extractor Class implements org.locationtech.geowave.analytics.extract.DimensionExtractor") private String commonDimensionExtractClass; @ExtractParameter(ExtractParameters.Extract.DATA_NAMESPACE_URI) @Parameter( names = {"-ens", "--extractDataNamespaceUri"}, description = "Output Data Namespace URI") private String extractDataNamespaceUri; @ExtractParameter(ExtractParameters.Extract.DIMENSION_EXTRACT_CLASS) @Parameter( names = {"-ede", "--extractDimensionExtractClass"}, description = "Class to extract dimensions into a simple feature output") private String extractDimensionExtractClass; @ExtractParameter(ExtractParameters.Extract.OUTPUT_DATA_TYPE_ID) @Parameter(names = {"-eot", "--extractOutputDataTypeId"}, description = "Output Data Type ID") private String extractOutputDataTypeId; @ExtractParameter(ExtractParameters.Extract.REDUCER_COUNT) @Parameter( names = {"-erc", "--extractReducerCount"}, description = "Number of Reducers For initial data extraction and de-duplication") private String extractReducerCount; @GlobalParameter(GlobalParameters.Global.BATCH_ID) @Parameter(names = {"-b", "--globalBatchId"}, description = "Batch ID") private String globalBatchId; @GlobalParameter(GlobalParameters.Global.PARENT_BATCH_ID) @Parameter(names = {"-pb", "--globalParentBatchId"}, description = "Batch ID") private String globalParentBatchId; @HullParameter(HullParameters.Hull.DATA_NAMESPACE_URI) @Parameter( names = {"-hns", "--hullDataNamespaceUri"}, description = "Data Type Namespace for a centroid item") private String hullDataNamespaceUri; @HullParameter(HullParameters.Hull.DATA_TYPE_ID) @Parameter(names = {"-hdt", "--hullDataTypeId"}, description = "Data Type ID for a centroid item") private String hullDataTypeId; @HullParameter(HullParameters.Hull.INDEX_NAME) @Parameter(names = {"-hid", "--hullIndexId"}, description = "Index Identifier for Centroids") private String hullIndexId; @HullParameter(HullParameters.Hull.PROJECTION_CLASS) @Parameter( names = {"-hpe", "--hullProjectionClass"}, description = "Class to project on to 2D space. Implements org.locationtech.geowave.analytics.tools.Projection") private String hullProjectionClass; @HullParameter(HullParameters.Hull.REDUCER_COUNT) @Parameter(names = {"-hrc", "--hullReducerCount"}, description = "Centroid Reducer Count") private String hullReducerCount; @HullParameter(HullParameters.Hull.WRAPPER_FACTORY_CLASS) @Parameter( names = {"-hfc", "--hullWrapperFactoryClass"}, description = "Class to create analytic item to capture hulls. Implements org.locationtech.geowave.analytics.tools.AnalyticItemWrapperFactory") private String hullWrapperFactoryClass; public String getCentroidExtractorClass() { return centroidExtractorClass; } public void setCentroidExtractorClass(final String centroidExtractorClass) { this.centroidExtractorClass = centroidExtractorClass; } public String getCentroidIndexId() { return centroidIndexId; } public void setCentroidIndexId(final String centroidIndexId) { this.centroidIndexId = centroidIndexId; } public String getCentroidWrapperFactoryClass() { return centroidWrapperFactoryClass; } public void setCentroidWrapperFactoryClass(final String centroidWrapperFactoryClass) { this.centroidWrapperFactoryClass = centroidWrapperFactoryClass; } public String getCentroidZoomLevel() { return centroidZoomLevel; } public void setCentroidZoomLevel(final String centroidZoomLevel) { this.centroidZoomLevel = centroidZoomLevel; } public String getClusteringConverganceTolerance() { return clusteringConverganceTolerance; } public void setClusteringConverganceTolerance(final String clusteringConverganceTolerance) { this.clusteringConverganceTolerance = clusteringConverganceTolerance; } public String getClusteringMaxIterations() { return clusteringMaxIterations; } public void setClusteringMaxIterations(final String clusteringMaxIterations) { this.clusteringMaxIterations = clusteringMaxIterations; } public String getClusteringMaxReducerCount() { return clusteringMaxReducerCount; } public void setClusteringMaxReducerCount(final String clusteringMaxReducerCount) { this.clusteringMaxReducerCount = clusteringMaxReducerCount; } public String getClusteringZoomLevels() { return clusteringZoomLevels; } public void setClusteringZoomLevels(final String clusteringZoomLevels) { this.clusteringZoomLevels = clusteringZoomLevels; } public String getCommonDimensionExtractClass() { return commonDimensionExtractClass; } public void setCommonDimensionExtractClass(final String commonDimensionExtractClass) { this.commonDimensionExtractClass = commonDimensionExtractClass; } public String getExtractDataNamespaceUri() { return extractDataNamespaceUri; } public void setExtractDataNamespaceUri(final String extractDataNamespaceUri) { this.extractDataNamespaceUri = extractDataNamespaceUri; } public String getExtractDimensionExtractClass() { return extractDimensionExtractClass; } public void setExtractDimensionExtractClass(final String extractDimensionExtractClass) { this.extractDimensionExtractClass = extractDimensionExtractClass; } public String getExtractOutputDataTypeId() { return extractOutputDataTypeId; } public void setExtractOutputDataTypeId(final String extractOutputDataTypeId) { this.extractOutputDataTypeId = extractOutputDataTypeId; } public String getExtractReducerCount() { return extractReducerCount; } public void setExtractReducerCount(final String extractReducerCount) { this.extractReducerCount = extractReducerCount; } public String getGlobalBatchId() { return globalBatchId; } public void setGlobalBatchId(final String globalBatchId) { this.globalBatchId = globalBatchId; } public String getGlobalParentBatchId() { return globalParentBatchId; } public void setGlobalParentBatchId(final String globalParentBatchId) { this.globalParentBatchId = globalParentBatchId; } public String getHullDataNamespaceUri() { return hullDataNamespaceUri; } public void setHullDataNamespaceUri(final String hullDataNamespaceUri) { this.hullDataNamespaceUri = hullDataNamespaceUri; } public String getHullDataTypeId() { return hullDataTypeId; } public void setHullDataTypeId(final String hullDataTypeId) { this.hullDataTypeId = hullDataTypeId; } public String getHullIndexId() { return hullIndexId; } public void setHullIndexId(final String hullIndexId) { this.hullIndexId = hullIndexId; } public String getHullProjectionClass() { return hullProjectionClass; } public void setHullProjectionClass(final String hullProjectionClass) { this.hullProjectionClass = hullProjectionClass; } public String getHullReducerCount() { return hullReducerCount; } public void setHullReducerCount(final String hullReducerCount) { this.hullReducerCount = hullReducerCount; } public String getHullWrapperFactoryClass() { return hullWrapperFactoryClass; } public void setHullWrapperFactoryClass(final String hullWrapperFactoryClass) { this.hullWrapperFactoryClass = hullWrapperFactoryClass; } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/operations/options/KMeansJumpOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.operations.options; import org.locationtech.geowave.analytic.param.JumpParameters; import org.locationtech.geowave.analytic.param.SampleParameters; import org.locationtech.geowave.analytic.param.annotations.JumpParameter; import org.locationtech.geowave.analytic.param.annotations.SampleParameter; import org.locationtech.geowave.core.index.numeric.NumericRange; import com.beust.jcommander.IStringConverter; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; public class KMeansJumpOptions { @JumpParameter(JumpParameters.Jump.KPLUSPLUS_MIN) @Parameter( names = {"-jkp", "--jumpKplusplusMin"}, required = true, description = "The minimum k when K means ++ takes over sampling.") private String jumpKplusplusMin; @JumpParameter(JumpParameters.Jump.RANGE_OF_CENTROIDS) @Parameter( names = {"-jrc", "--jumpRangeOfCentroids"}, required = true, description = "Comma-separated range of centroids (e.g. 2,100)", converter = NumericRangeConverter.class) private NumericRange jumpRangeOfCentroids; @SampleParameter(SampleParameters.Sample.SAMPLE_RANK_FUNCTION) @Parameter( names = {"-srf", "--sampleSampleRankFunction"}, hidden = true, description = "The rank function used when sampling the first N highest rank items.") private String sampleSampleRankFunction; @SampleParameter(SampleParameters.Sample.SAMPLE_SIZE) @Parameter(names = {"-sss", "--sampleSampleSize"}, hidden = true, description = "Sample Size") private String sampleSampleSize; public String getJumpKplusplusMin() { return jumpKplusplusMin; } public void setJumpKplusplusMin(final String jumpKplusplusMin) { this.jumpKplusplusMin = jumpKplusplusMin; } public NumericRange getJumpRangeOfCentroids() { return jumpRangeOfCentroids; } public void setJumpRangeOfCentroids(final NumericRange jumpRangeOfCentroids) { this.jumpRangeOfCentroids = jumpRangeOfCentroids; } public String getSampleSampleRankFunction() { return sampleSampleRankFunction; } public void setSampleSampleRankFunction(final String sampleSampleRankFunction) { this.sampleSampleRankFunction = sampleSampleRankFunction; } public String getSampleSampleSize() { return sampleSampleSize; } public void setSampleSampleSize(final String sampleSampleSize) { this.sampleSampleSize = sampleSampleSize; } public static class NumericRangeConverter implements IStringConverter { @Override public NumericRange convert(final String value) { final String p = value.toString(); final String[] parts = p.split(","); try { if (parts.length == 2) { return new NumericRange( Double.parseDouble(parts[0].trim()), Double.parseDouble(parts[1].trim())); } else { return new NumericRange(0, Double.parseDouble(p)); } } catch (final Exception ex) { throw new ParameterException("Invalid range parameter " + value, ex); } } } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/operations/options/KMeansParallelOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.operations.options; import org.locationtech.geowave.analytic.param.SampleParameters; import org.locationtech.geowave.analytic.param.annotations.SampleParameter; import com.beust.jcommander.Parameter; public class KMeansParallelOptions { @SampleParameter(SampleParameters.Sample.MAX_SAMPLE_SIZE) @Parameter( names = {"-sxs", "--sampleMaxSampleSize"}, required = true, description = "Max Sample Size") private String sampleMaxSampleSize; @SampleParameter(SampleParameters.Sample.MIN_SAMPLE_SIZE) @Parameter( names = {"-sms", "--sampleMinSampleSize"}, required = true, description = "Minimum Sample Size") private String sampleMinSampleSize; @SampleParameter(SampleParameters.Sample.SAMPLE_ITERATIONS) @Parameter( names = {"-ssi", "--sampleSampleIterations"}, required = true, description = "Minimum number of sample iterations") private String sampleSampleIterations; public String getSampleMaxSampleSize() { return sampleMaxSampleSize; } public void setSampleMaxSampleSize(final String sampleMaxSampleSize) { this.sampleMaxSampleSize = sampleMaxSampleSize; } public String getSampleMinSampleSize() { return sampleMinSampleSize; } public void setSampleMinSampleSize(final String sampleMinSampleSize) { this.sampleMinSampleSize = sampleMinSampleSize; } public String getSampleSampleIterations() { return sampleSampleIterations; } public void setSampleSampleIterations(final String sampleSampleIterations) { this.sampleSampleIterations = sampleSampleIterations; } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/operations/options/NearestNeighborOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.operations.options; import org.locationtech.geowave.analytic.param.OutputParameters; import org.locationtech.geowave.analytic.param.PartitionParameters; import org.locationtech.geowave.analytic.param.annotations.OutputParameter; import org.locationtech.geowave.analytic.param.annotations.PartitionParameter; import com.beust.jcommander.Parameter; public class NearestNeighborOptions { @OutputParameter(OutputParameters.Output.HDFS_OUTPUT_PATH) @Parameter( names = {"-oop", "--outputHdfsOutputPath"}, required = true, description = "Output HDFS File Path") private String outputHdfsOutputPath; @PartitionParameter(PartitionParameters.Partition.MAX_MEMBER_SELECTION) @Parameter( names = {"-pms", "--partitionMaxMemberSelection"}, description = "Maximum number of members selected from a partition") private String partitionMaxMemberSelection; @PartitionParameter(PartitionParameters.Partition.PARTITIONER_CLASS) @Parameter( names = {"-pc", "--partitionPartitionerClass"}, description = "Index Identifier for Centroids") private String partitionPartitionerClass; @PartitionParameter(PartitionParameters.Partition.MAX_DISTANCE) @Parameter( names = {"-pmd", "--partitionMaxDistance"}, required = true, description = "Maximum Partition Distance") private String partitionMaxDistance; @PartitionParameter(PartitionParameters.Partition.PARTITION_PRECISION) @Parameter(names = {"-pp", "--partitionPartitionPrecision"}, description = "Partition Precision") private String partitionPartitionPrecision; @PartitionParameter(PartitionParameters.Partition.DISTANCE_THRESHOLDS) @Parameter( names = {"-pdt", "--partitionDistanceThresholds"}, description = "Comma separated list of distance thresholds, per dimension") private String partitioningDistanceThresholds; @PartitionParameter(PartitionParameters.Partition.GEOMETRIC_DISTANCE_UNIT) @Parameter( names = {"-pdu", "--partitionGeometricDistanceUnit"}, description = "Geometric distance unit (m=meters,km=kilometers, see symbols for javax.units.BaseUnit)") private String partitioningGeometricDistanceUnit; @PartitionParameter(PartitionParameters.Partition.SECONDARY_PARTITIONER_CLASS) @Parameter( names = {"-psp", "--partitionSecondaryPartitionerClass"}, description = "Perform secondary partitioning with the provided class") private String partitionSecondaryPartitionerClass; public String getOutputHdfsOutputPath() { return outputHdfsOutputPath; } public void setOutputHdfsOutputPath(final String outputHdfsOutputPath) { this.outputHdfsOutputPath = outputHdfsOutputPath; } public String getPartitionMaxMemberSelection() { return partitionMaxMemberSelection; } public void setPartitionMaxMemberSelection(final String partitionMaxMemberSelection) { this.partitionMaxMemberSelection = partitionMaxMemberSelection; } public String getPartitionPartitionerClass() { return partitionPartitionerClass; } public void setPartitionPartitionerClass(final String partitionPartitionerClass) { this.partitionPartitionerClass = partitionPartitionerClass; } public String getPartitionMaxDistance() { return partitionMaxDistance; } public void setPartitionMaxDistance(final String partitionMaxDistance) { this.partitionMaxDistance = partitionMaxDistance; } public String getPartitionSecondaryPartitionerClass() { return partitionSecondaryPartitionerClass; } public void setPartitionSecondaryPartitionerClass( final String partitionSecondaryPartitionerClass) { this.partitionSecondaryPartitionerClass = partitionSecondaryPartitionerClass; } public String getPartitionPartitionPrecision() { return partitionPartitionPrecision; } public void setPartitionPartitionPrecision(final String partitionPartitionPrecision) { this.partitionPartitionPrecision = partitionPartitionPrecision; } public String getPartitioningDistanceThresholds() { return partitioningDistanceThresholds; } public void setPartitioningDistanceThresholds(final String partitioningDistanceThresholds) { this.partitioningDistanceThresholds = partitioningDistanceThresholds; } public String getPartitioningGeometricDistanceUnit() { return partitioningGeometricDistanceUnit; } public void setPartitioningGeometricDistanceUnit(final String partitioningGeometricDistanceUnit) { this.partitioningGeometricDistanceUnit = partitioningGeometricDistanceUnit; } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/operations/options/PropertyManagementConverter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.operations.options; import java.lang.reflect.AnnotatedElement; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.param.ParameterEnum; import org.locationtech.geowave.analytic.param.annotations.CentroidParameter; import org.locationtech.geowave.analytic.param.annotations.ClusteringParameter; import org.locationtech.geowave.analytic.param.annotations.CommonParameter; import org.locationtech.geowave.analytic.param.annotations.ExtractParameter; import org.locationtech.geowave.analytic.param.annotations.GlobalParameter; import org.locationtech.geowave.analytic.param.annotations.HullParameter; import org.locationtech.geowave.analytic.param.annotations.InputParameter; import org.locationtech.geowave.analytic.param.annotations.JumpParameter; import org.locationtech.geowave.analytic.param.annotations.MapReduceParameter; import org.locationtech.geowave.analytic.param.annotations.OutputParameter; import org.locationtech.geowave.analytic.param.annotations.PartitionParameter; import org.locationtech.geowave.analytic.param.annotations.SampleParameter; import org.locationtech.geowave.core.cli.prefix.JCommanderPrefixTranslator; import org.locationtech.geowave.core.cli.prefix.JCommanderTranslationMap; import org.locationtech.geowave.core.cli.prefix.TranslationEntry; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This is a stop-gap measure which allows us to copy parameters read from the command line into the * PropertyManagement object. */ public class PropertyManagementConverter { static final Logger LOGGER = LoggerFactory.getLogger(PropertyManagementConverter.class); final PropertyManagement properties; public PropertyManagementConverter(final PropertyManagement properties) { this.properties = properties; } public PropertyManagement getProperties() { return properties; } /** * Find annotations in the object, and copy the values to the PropertyManagement * * @param object */ public void readProperties(final Object object) { final JCommanderPrefixTranslator translator = new JCommanderPrefixTranslator(); translator.addObject(object); final JCommanderTranslationMap map = translator.translate(); for (final TranslationEntry entry : map.getEntries().values()) { // Has annotation? final AnnotatedElement element = entry.getMember(); final CentroidParameter centroid = element.getAnnotation(CentroidParameter.class); final ClusteringParameter clustering = element.getAnnotation(ClusteringParameter.class); final CommonParameter common = element.getAnnotation(CommonParameter.class); final ExtractParameter extract = element.getAnnotation(ExtractParameter.class); final GlobalParameter global = element.getAnnotation(GlobalParameter.class); final HullParameter hull = element.getAnnotation(HullParameter.class); final InputParameter input = element.getAnnotation(InputParameter.class); final JumpParameter jump = element.getAnnotation(JumpParameter.class); final MapReduceParameter mapReduce = element.getAnnotation(MapReduceParameter.class); final OutputParameter output = element.getAnnotation(OutputParameter.class); final PartitionParameter partition = element.getAnnotation(PartitionParameter.class); final SampleParameter sample = element.getAnnotation(SampleParameter.class); if (centroid != null) { handleEnum(entry, centroid.value()); } if (clustering != null) { handleEnum(entry, clustering.value()); } if (common != null) { handleEnum(entry, common.value()); } if (extract != null) { handleEnum(entry, extract.value()); } if (global != null) { handleEnum(entry, global.value()); } if (hull != null) { handleEnum(entry, hull.value()); } if (input != null) { handleEnum(entry, input.value()); } if (jump != null) { handleEnum(entry, jump.value()); } if (mapReduce != null) { handleEnum(entry, mapReduce.value()); } if (output != null) { handleEnum(entry, output.value()); } if (partition != null) { handleEnum(entry, partition.value()); } if (sample != null) { handleEnum(entry, sample.value()); } } } /** * For a single value, copy the value from the object to PropertyManagement. * * @param entry * @param enumVal */ @SuppressWarnings("unchecked") private void handleEnum(final TranslationEntry entry, final ParameterEnum[] enumVals) { final Object value = entry.getParam().get(entry.getObject()); if (value != null) { if (LOGGER.isDebugEnabled()) { LOGGER.debug( String.format( "Analytic Property Value: %s = %s", entry.getAsPropertyName(), value.toString())); } for (final ParameterEnum enumVal : enumVals) { ((ParameterEnum) enumVal).getHelper().setValue(properties, value); } } } } ================================================ FILE: analytics/mapreduce/src/main/java/org/locationtech/geowave/analytic/mapreduce/operations/options/QueryOptionsCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.operations.options; import com.beust.jcommander.Parameter; public class QueryOptionsCommand { @Parameter( names = "--auth", description = "The comma-separated list of authorizations used during extract; by default all authorizations are used.") private String[] authorizations; @Parameter( names = "--typeNames", required = true, description = "The comma-separated list of data typess to query; by default all data types are used.") private String[] typeNames = null; @Parameter( names = "--indexName", description = "The specific index to query; by default one is chosen for each adapter.") private String indexName = null; public QueryOptionsCommand() {} public String[] getAuthorizations() { return authorizations; } public void setAuthorizations(final String[] authorizations) { this.authorizations = authorizations; } public String[] getTypeNames() { return typeNames; } public void setTypeNames(final String[] typeNames) { this.typeNames = typeNames; } public String getIndexName() { return indexName; } public void setIndexName(final String indexName) { this.indexName = indexName; } } ================================================ FILE: analytics/mapreduce/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi ================================================ org.locationtech.geowave.analytic.mapreduce.operations.AnalyticOperationCLIProvider ================================================ FILE: analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/TestMapReducePersistableRegistry.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce; import org.locationtech.geowave.analytic.mapreduce.kmeans.TestObjectDataAdapter; import org.locationtech.geowave.core.index.persist.InternalPersistableRegistry; import org.locationtech.geowave.core.index.persist.PersistableRegistrySpi; public class TestMapReducePersistableRegistry implements PersistableRegistrySpi, InternalPersistableRegistry { @Override public PersistableIdAndConstructor[] getSupportedPersistables() { return new PersistableIdAndConstructor[] { new PersistableIdAndConstructor((short) 10750, TestObjectDataAdapter::new),}; } } ================================================ FILE: analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/clustering/runner/ConvexHullJobRunnerTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.clustering.runner; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.Counters; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat; import org.apache.hadoop.util.Tool; import org.geotools.feature.type.BasicFeatureTypes; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.analytic.AnalyticFeature; import org.locationtech.geowave.analytic.Projection; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.ScopedJobConfiguration; import org.locationtech.geowave.analytic.SimpleFeatureProjection; import org.locationtech.geowave.analytic.clustering.ClusteringUtils; import org.locationtech.geowave.analytic.mapreduce.GeoWaveAnalyticJobRunner; import org.locationtech.geowave.analytic.mapreduce.MapReduceIntegration; import org.locationtech.geowave.analytic.mapreduce.SequenceFileInputFormatConfiguration; import org.locationtech.geowave.analytic.mapreduce.clustering.ConvexHullMapReduce; import org.locationtech.geowave.analytic.param.CentroidParameters; import org.locationtech.geowave.analytic.param.GlobalParameters; import org.locationtech.geowave.analytic.param.HullParameters; import org.locationtech.geowave.analytic.param.InputParameters; import org.locationtech.geowave.analytic.param.MapReduceParameters.MRConfig; import org.locationtech.geowave.analytic.param.ParameterHelper; import org.locationtech.geowave.analytic.param.StoreParameters.StoreParam; import org.locationtech.geowave.analytic.store.PersistableStore; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.core.store.GeoWaveStoreFinder; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.memory.MemoryRequiredOptions; import org.locationtech.geowave.core.store.memory.MemoryStoreFactoryFamily; import org.opengis.feature.simple.SimpleFeatureType; public class ConvexHullJobRunnerTest { private final ConvexHullJobRunner hullRunner = new ConvexHullJobRunner(); private final PropertyManagement runTimeProperties = new PropertyManagement(); @Rule public TestName name = new TestName(); @Before public void init() { final SimpleFeatureType ftype = AnalyticFeature.createGeometryFeatureAdapter( "centroidtest", new String[] {"extra1"}, BasicFeatureTypes.DEFAULT_NAMESPACE, ClusteringUtils.CLUSTERING_CRS).getFeatureType(); hullRunner.setMapReduceIntegrater(new MapReduceIntegration() { @Override public int submit( final Configuration configuration, final PropertyManagement runTimeProperties, final GeoWaveAnalyticJobRunner tool) throws Exception { tool.setConf(configuration); ((ParameterHelper) StoreParam.INPUT_STORE.getHelper()).setValue( configuration, ConvexHullMapReduce.class, StoreParam.INPUT_STORE.getHelper().getValue(runTimeProperties)); return tool.run(new String[] {}); } @Override public Counters waitForCompletion(final Job job) throws ClassNotFoundException, IOException, InterruptedException { Assert.assertEquals(SequenceFileInputFormat.class, job.getInputFormatClass()); Assert.assertEquals(10, job.getNumReduceTasks()); final ScopedJobConfiguration configWrapper = new ScopedJobConfiguration(job.getConfiguration(), ConvexHullMapReduce.class); Assert.assertEquals("file://foo/bin", job.getConfiguration().get("mapred.input.dir")); final PersistableStore persistableStore = (PersistableStore) StoreParam.INPUT_STORE.getHelper().getValue( job, ConvexHullMapReduce.class, null); final IndexStore indexStore = persistableStore.getDataStoreOptions().createIndexStore(); try { Assert.assertTrue(indexStore.indexExists("spatial")); final PersistableStore persistableAdapterStore = (PersistableStore) StoreParam.INPUT_STORE.getHelper().getValue( job, ConvexHullMapReduce.class, null); final PersistentAdapterStore adapterStore = persistableAdapterStore.getDataStoreOptions().createAdapterStore(); Assert.assertTrue( adapterStore.adapterExists( persistableAdapterStore.getDataStoreOptions().createInternalAdapterStore().getAdapterId( "centroidtest"))); final Projection projection = configWrapper.getInstance( HullParameters.Hull.PROJECTION_CLASS, Projection.class, SimpleFeatureProjection.class); Assert.assertEquals(SimpleFeatureProjection.class, projection.getClass()); } catch (final InstantiationException e) { throw new IOException("Unable to configure system", e); } catch (final IllegalAccessException e) { throw new IOException("Unable to configure system", e); } Assert.assertEquals(10, job.getNumReduceTasks()); Assert.assertEquals(2, configWrapper.getInt(CentroidParameters.Centroid.ZOOM_LEVEL, -1)); return new Counters(); } @Override public Job getJob(final Tool tool) throws IOException { return new Job(tool.getConf()); } @Override public Configuration getConfiguration(final PropertyManagement runTimeProperties) throws IOException { return new Configuration(); } }); hullRunner.setInputFormatConfiguration(new SequenceFileInputFormatConfiguration()); runTimeProperties.store(MRConfig.HDFS_BASE_DIR, "/"); runTimeProperties.store(InputParameters.Input.HDFS_INPUT_PATH, new Path("file://foo/bin")); runTimeProperties.store(GlobalParameters.Global.BATCH_ID, "b1234"); runTimeProperties.store(HullParameters.Hull.DATA_TYPE_ID, "hullType"); runTimeProperties.store(HullParameters.Hull.REDUCER_COUNT, 10); runTimeProperties.store(HullParameters.Hull.INDEX_NAME, "spatial"); final DataStorePluginOptions pluginOptions = new DataStorePluginOptions(); GeoWaveStoreFinder.getRegisteredStoreFactoryFamilies().put( "memory", new MemoryStoreFactoryFamily()); pluginOptions.selectPlugin("memory"); final MemoryRequiredOptions opts = (MemoryRequiredOptions) pluginOptions.getFactoryOptions(); final String namespace = "test_" + getClass().getName() + "_" + name.getMethodName(); opts.setGeoWaveNamespace(namespace); final PersistableStore store = new PersistableStore(pluginOptions); runTimeProperties.store(StoreParam.INPUT_STORE, store); final FeatureDataAdapter adapter = new FeatureDataAdapter(ftype); pluginOptions.createAdapterStore().addAdapter( adapter.asInternalAdapter( pluginOptions.createInternalAdapterStore().addTypeName(adapter.getTypeName()))); } @Test public void test() throws Exception { hullRunner.run(runTimeProperties); } } ================================================ FILE: analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/clustering/runner/GroupAssigmentJobRunnerTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.clustering.runner; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.Counters; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat; import org.apache.hadoop.util.Tool; import org.geotools.feature.type.BasicFeatureTypes; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.analytic.AnalyticFeature; import org.locationtech.geowave.analytic.AnalyticItemWrapperFactory; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.ScopedJobConfiguration; import org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory; import org.locationtech.geowave.analytic.clustering.ClusteringUtils; import org.locationtech.geowave.analytic.distance.DistanceFn; import org.locationtech.geowave.analytic.distance.FeatureCentroidDistanceFn; import org.locationtech.geowave.analytic.distance.GeometryCentroidDistanceFn; import org.locationtech.geowave.analytic.mapreduce.GeoWaveAnalyticJobRunner; import org.locationtech.geowave.analytic.mapreduce.MapReduceIntegration; import org.locationtech.geowave.analytic.mapreduce.SequenceFileInputFormatConfiguration; import org.locationtech.geowave.analytic.mapreduce.clustering.GroupAssignmentMapReduce; import org.locationtech.geowave.analytic.param.CentroidParameters; import org.locationtech.geowave.analytic.param.CommonParameters; import org.locationtech.geowave.analytic.param.GlobalParameters; import org.locationtech.geowave.analytic.param.MapReduceParameters.MRConfig; import org.locationtech.geowave.analytic.param.ParameterHelper; import org.locationtech.geowave.analytic.param.StoreParameters.StoreParam; import org.locationtech.geowave.analytic.store.PersistableStore; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.core.store.GeoWaveStoreFinder; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.memory.MemoryRequiredOptions; import org.locationtech.geowave.core.store.memory.MemoryStoreFactoryFamily; import org.opengis.feature.simple.SimpleFeatureType; public class GroupAssigmentJobRunnerTest { final GroupAssigmentJobRunner runner = new GroupAssigmentJobRunner(); final PropertyManagement runTimeProperties = new PropertyManagement(); @Rule public TestName name = new TestName(); @Before public void init() { final SimpleFeatureType ftype = AnalyticFeature.createGeometryFeatureAdapter( "centroidtest", new String[] {"extra1"}, BasicFeatureTypes.DEFAULT_NAMESPACE, ClusteringUtils.CLUSTERING_CRS).getFeatureType(); runner.setMapReduceIntegrater(new MapReduceIntegration() { @Override public int submit( final Configuration configuration, final PropertyManagement runTimeProperties, final GeoWaveAnalyticJobRunner tool) throws Exception { tool.setConf(configuration); ((ParameterHelper) StoreParam.INPUT_STORE.getHelper()).setValue( configuration, GroupAssignmentMapReduce.class, StoreParam.INPUT_STORE.getHelper().getValue(runTimeProperties)); return tool.run(new String[] {}); } @Override public Counters waitForCompletion(final Job job) throws ClassNotFoundException, IOException, InterruptedException { Assert.assertEquals(SequenceFileInputFormat.class, job.getInputFormatClass()); Assert.assertEquals(10, job.getNumReduceTasks()); final ScopedJobConfiguration configWrapper = new ScopedJobConfiguration(job.getConfiguration(), GroupAssignmentMapReduce.class); Assert.assertEquals("file://foo/bin", job.getConfiguration().get("mapred.input.dir")); Assert.assertEquals(3, configWrapper.getInt(CentroidParameters.Centroid.ZOOM_LEVEL, -1)); Assert.assertEquals( "b1234", configWrapper.getString(GlobalParameters.Global.PARENT_BATCH_ID, "")); Assert.assertEquals( "b12345", configWrapper.getString(GlobalParameters.Global.BATCH_ID, "")); try { final AnalyticItemWrapperFactory wrapper = configWrapper.getInstance( CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS, AnalyticItemWrapperFactory.class, SimpleFeatureItemWrapperFactory.class); Assert.assertEquals(SimpleFeatureItemWrapperFactory.class, wrapper.getClass()); final DistanceFn distancFn = configWrapper.getInstance( CommonParameters.Common.DISTANCE_FUNCTION_CLASS, DistanceFn.class, GeometryCentroidDistanceFn.class); Assert.assertEquals(FeatureCentroidDistanceFn.class, distancFn.getClass()); } catch (final InstantiationException e) { throw new IOException("Unable to configure system", e); } catch (final IllegalAccessException e) { throw new IOException("Unable to configure system", e); } return new Counters(); } @Override public Job getJob(final Tool tool) throws IOException { return new Job(tool.getConf()); } @Override public Configuration getConfiguration(final PropertyManagement runTimeProperties) throws IOException { return new Configuration(); } }); runner.setInputFormatConfiguration( new SequenceFileInputFormatConfiguration(new Path("file://foo/bin"))); runner.setZoomLevel(3); runner.setReducerCount(10); runTimeProperties.store(MRConfig.HDFS_BASE_DIR, "/"); runTimeProperties.store(GlobalParameters.Global.BATCH_ID, "b12345"); runTimeProperties.store(GlobalParameters.Global.PARENT_BATCH_ID, "b1234"); runTimeProperties.store( CommonParameters.Common.DISTANCE_FUNCTION_CLASS, FeatureCentroidDistanceFn.class); final DataStorePluginOptions pluginOptions = new DataStorePluginOptions(); GeoWaveStoreFinder.getRegisteredStoreFactoryFamilies().put( "memory", new MemoryStoreFactoryFamily()); pluginOptions.selectPlugin("memory"); final MemoryRequiredOptions opts = (MemoryRequiredOptions) pluginOptions.getFactoryOptions(); final String namespace = "test_" + getClass().getName() + "_" + name.getMethodName(); opts.setGeoWaveNamespace(namespace); final PersistableStore store = new PersistableStore(pluginOptions); runTimeProperties.store(StoreParam.INPUT_STORE, store); final FeatureDataAdapter adapter = new FeatureDataAdapter(ftype); pluginOptions.createAdapterStore().addAdapter( adapter.asInternalAdapter( pluginOptions.createInternalAdapterStore().addTypeName(adapter.getTypeName()))); } @Test public void test() throws Exception { runner.run(runTimeProperties); } } ================================================ FILE: analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/dbscan/DBScanMapReduceTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.dbscan; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Random; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.mrunit.mapreduce.MapDriver; import org.apache.hadoop.mrunit.mapreduce.ReduceDriver; import org.apache.hadoop.mrunit.types.Pair; import org.geotools.feature.type.BasicFeatureTypes; import org.junit.Before; import org.junit.Test; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.adapter.vector.FeatureWritable; import org.locationtech.geowave.analytic.AdapterWithObjectWritable; import org.locationtech.geowave.analytic.AnalyticFeature; import org.locationtech.geowave.analytic.Projection; import org.locationtech.geowave.analytic.SimpleFeatureProjection; import org.locationtech.geowave.analytic.clustering.ClusteringUtils; import org.locationtech.geowave.analytic.mapreduce.kmeans.SimpleFeatureImplSerialization; import org.locationtech.geowave.analytic.mapreduce.nn.NNMapReduce; import org.locationtech.geowave.analytic.mapreduce.nn.NNMapReduce.PartitionDataWritable; import org.locationtech.geowave.analytic.param.ClusteringParameters; import org.locationtech.geowave.analytic.param.HullParameters; import org.locationtech.geowave.analytic.param.PartitionParameters; import org.locationtech.geowave.analytic.partitioner.Partitioner.PartitionData; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase; import org.locationtech.geowave.mapreduce.JobContextAdapterStore; import org.locationtech.geowave.mapreduce.JobContextInternalAdapterStore; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.PrecisionModel; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; public class DBScanMapReduceTest { MapDriver mapDriver; ReduceDriver reduceDriver; SimpleFeatureType ftype; final GeometryFactory factory = new GeometryFactory(new PrecisionModel(0.000001), 4326); short adapterId = 1234; final NNMapReduce.NNMapper nnMapper = new NNMapReduce.NNMapper<>(); final NNMapReduce.NNReducer> nnReducer = new DBScanMapReduce.DBScanMapHullReducer(); @Before public void setUp() throws IOException { mapDriver = MapDriver.newMapDriver(nnMapper); reduceDriver = ReduceDriver.newReduceDriver(nnReducer); mapDriver.getConfiguration().set( GeoWaveConfiguratorBase.enumToConfKey( NNMapReduce.class, PartitionParameters.Partition.DISTANCE_THRESHOLDS), "10,10"); reduceDriver.getConfiguration().setDouble( GeoWaveConfiguratorBase.enumToConfKey( NNMapReduce.class, PartitionParameters.Partition.MAX_DISTANCE), 10); ftype = AnalyticFeature.createGeometryFeatureAdapter( "centroid", new String[] {"extra1"}, BasicFeatureTypes.DEFAULT_NAMESPACE, ClusteringUtils.CLUSTERING_CRS).getFeatureType(); reduceDriver.getConfiguration().setClass( GeoWaveConfiguratorBase.enumToConfKey( DBScanMapReduce.class, HullParameters.Hull.PROJECTION_CLASS), SimpleFeatureProjection.class, Projection.class); final FeatureDataAdapter adapter = new FeatureDataAdapter(ftype); JobContextAdapterStore.addDataAdapter(mapDriver.getConfiguration(), adapter); JobContextAdapterStore.addDataAdapter(reduceDriver.getConfiguration(), adapter); JobContextInternalAdapterStore.addTypeName( mapDriver.getConfiguration(), adapter.getTypeName(), adapterId); JobContextInternalAdapterStore.addTypeName( reduceDriver.getConfiguration(), adapter.getTypeName(), adapterId); serializations(); } private SimpleFeature createTestFeature(final String name, final Coordinate coord) { return AnalyticFeature.createGeometryFeature( ftype, "b1", name, name, "NA", 20.30203, factory.createPoint(coord), new String[] {"extra1"}, new double[] {0.022}, 1, 1, 0); } private void serializations() { final String[] strings = reduceDriver.getConfiguration().getStrings("io.serializations"); final String[] newStrings = new String[strings.length + 1]; System.arraycopy(strings, 0, newStrings, 0, strings.length); newStrings[newStrings.length - 1] = SimpleFeatureImplSerialization.class.getName(); reduceDriver.getConfiguration().setStrings("io.serializations", newStrings); mapDriver.getConfiguration().setStrings("io.serializations", newStrings); } @Test public void testReducer() throws IOException { final SimpleFeature feature1 = createTestFeature("f1", new Coordinate(30.0, 30.00000001)); final SimpleFeature feature2 = createTestFeature("f2", new Coordinate(50.001, 50.001)); final SimpleFeature feature3 = createTestFeature("f3", new Coordinate(30.00000001, 30.00000001)); final SimpleFeature feature4 = createTestFeature("f4", new Coordinate(50.0011, 50.00105)); final SimpleFeature feature5 = createTestFeature("f5", new Coordinate(50.00112, 50.00111)); final SimpleFeature feature6 = createTestFeature("f6", new Coordinate(30.00000001, 30.00000002)); final SimpleFeature feature7 = createTestFeature("f7", new Coordinate(50.00113, 50.00114)); final SimpleFeature feature8 = createTestFeature("f8", new Coordinate(40.00000001, 40.000000002)); mapDriver.addInput(new GeoWaveInputKey(adapterId, new ByteArray(feature1.getID())), feature1); mapDriver.addInput(new GeoWaveInputKey(adapterId, new ByteArray(feature2.getID())), feature2); mapDriver.addInput(new GeoWaveInputKey(adapterId, new ByteArray(feature3.getID())), feature3); mapDriver.addInput(new GeoWaveInputKey(adapterId, new ByteArray(feature4.getID())), feature4); mapDriver.addInput(new GeoWaveInputKey(adapterId, new ByteArray(feature5.getID())), feature5); mapDriver.addInput(new GeoWaveInputKey(adapterId, new ByteArray(feature6.getID())), feature6); mapDriver.addInput(new GeoWaveInputKey(adapterId, new ByteArray(feature7.getID())), feature7); mapDriver.addInput(new GeoWaveInputKey(adapterId, new ByteArray(feature8.getID())), feature8); final List> mapperResults = mapDriver.run(); assertNotNull(getPartitionDataFor(mapperResults, feature1.getID(), true)); assertNotNull(getPartitionDataFor(mapperResults, feature2.getID(), true)); assertNotNull(getPartitionDataFor(mapperResults, feature2.getID(), true)); assertNotNull(getPartitionDataFor(mapperResults, feature3.getID(), true)); assertEquals( getPartitionDataFor(mapperResults, feature1.getID(), true).getCompositeKey(), getPartitionDataFor(mapperResults, feature3.getID(), true).getCompositeKey()); assertEquals( getPartitionDataFor(mapperResults, feature6.getID(), true).getCompositeKey(), getPartitionDataFor(mapperResults, feature3.getID(), true).getCompositeKey()); assertEquals( getPartitionDataFor(mapperResults, feature5.getID(), true).getCompositeKey(), getPartitionDataFor(mapperResults, feature7.getID(), true).getCompositeKey()); assertEquals( getPartitionDataFor(mapperResults, feature5.getID(), true).getCompositeKey(), getPartitionDataFor(mapperResults, feature4.getID(), true).getCompositeKey()); final List>> partitions = getReducerDataFromMapperInput(mapperResults); reduceDriver.addAll(partitions); reduceDriver.getConfiguration().setInt( GeoWaveConfiguratorBase.enumToConfKey( NNMapReduce.class, ClusteringParameters.Clustering.MINIMUM_SIZE), 2); final List> reduceResults = reduceDriver.run(); assertEquals(2, reduceResults.size()); /* * assertEquals( feature3.getID(), find( reduceResults, feature1.getID()).toString()); * * assertEquals( feature1.getID(), find( reduceResults, feature3.getID()).toString()); * * assertEquals( feature4.getID(), find( reduceResults, feature2.getID()).toString()); * * assertEquals( feature2.getID(), find( reduceResults, feature4.getID()).toString()); */ } private List>> getReducerDataFromMapperInput( final List> mapperResults) { final List>> reducerInputSet = new ArrayList<>(); for (final Pair pair : mapperResults) { getListFor(pair.getFirst(), reducerInputSet).add(pair.getSecond()); } return reducerInputSet; } private List getListFor( final PartitionDataWritable pd, final List>> reducerInputSet) { for (final Pair> pair : reducerInputSet) { if (pair.getFirst().compareTo(pd) == 0) { return pair.getSecond(); } } final List newPairList = new ArrayList<>(); reducerInputSet.add(new Pair(pd, newPairList)); return newPairList; } private PartitionData getPartitionDataFor( final List> mapperResults, final String id, final boolean primary) { for (final Pair pair : mapperResults) { if (((FeatureWritable) pair.getSecond().getObjectWritable().get()).getFeature().getID().equals( id) && (pair.getFirst().getPartitionData().isPrimary() == primary)) { return pair.getFirst().getPartitionData(); } } return null; } private double round(final double value) { return (double) Math.round(value * 1000000) / 1000000; } @Test public void test8With4() throws IOException { final Random r = new Random(3434); for (int i = 0; i < 8; i++) { final SimpleFeature feature = createTestFeature( "f" + i, new Coordinate( round(30.0 + (r.nextGaussian() * 0.00001)), round(30.0 + (r.nextGaussian() * 0.00001)))); mapDriver.addInput(new GeoWaveInputKey(adapterId, new ByteArray(feature.getID())), feature); } final List> mapperResults = mapDriver.run(); final List>> partitions = getReducerDataFromMapperInput(mapperResults); reduceDriver.addAll(partitions); reduceDriver.getConfiguration().setInt( GeoWaveConfiguratorBase.enumToConfKey( NNMapReduce.class, ClusteringParameters.Clustering.MINIMUM_SIZE), 4); final List> reduceResults = reduceDriver.run(); assertEquals(1, reduceResults.size()); } @Test public void testScale() throws IOException { final Random r = new Random(3434); for (int i = 0; i < 10000; i++) { final SimpleFeature feature = createTestFeature( "f" + i, new Coordinate( round(30.0 + (r.nextGaussian() * 0.0001)), round(30.0 + (r.nextGaussian() * 0.0001)))); mapDriver.addInput(new GeoWaveInputKey(adapterId, new ByteArray(feature.getID())), feature); } final List> mapperResults = mapDriver.run(); final List>> partitions = getReducerDataFromMapperInput(mapperResults); reduceDriver.addAll(partitions); reduceDriver.getConfiguration().setInt( GeoWaveConfiguratorBase.enumToConfKey( NNMapReduce.class, ClusteringParameters.Clustering.MINIMUM_SIZE), 10); final List> reduceResults = reduceDriver.run(); assertTrue(reduceResults.size() > 0); } } ================================================ FILE: analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/kmeans/KMeansDistortionMapReduceTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kmeans; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mrunit.mapreduce.MapDriver; import org.apache.hadoop.mrunit.mapreduce.ReduceDriver; import org.apache.hadoop.mrunit.types.Pair; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.adapter.vector.FeatureWritable; import org.locationtech.geowave.analytic.AnalyticFeature; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory; import org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave; import org.locationtech.geowave.analytic.clustering.ClusteringUtils; import org.locationtech.geowave.analytic.clustering.DistortionGroupManagement.DistortionEntry; import org.locationtech.geowave.analytic.clustering.NestedGroupCentroidAssignment; import org.locationtech.geowave.analytic.distance.DistanceFn; import org.locationtech.geowave.analytic.distance.FeatureCentroidDistanceFn; import org.locationtech.geowave.analytic.extract.SimpleFeatureCentroidExtractor; import org.locationtech.geowave.analytic.mapreduce.CountofDoubleWritable; import org.locationtech.geowave.analytic.param.CentroidParameters; import org.locationtech.geowave.analytic.param.CommonParameters; import org.locationtech.geowave.analytic.param.GlobalParameters; import org.locationtech.geowave.analytic.param.StoreParameters.StoreParam; import org.locationtech.geowave.analytic.store.PersistableStore; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.store.GeoWaveStoreFinder; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.Writer; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.memory.MemoryRequiredOptions; import org.locationtech.geowave.core.store.memory.MemoryStoreFactoryFamily; import org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase; import org.locationtech.geowave.mapreduce.JobContextAdapterStore; import org.locationtech.geowave.mapreduce.JobContextInternalAdapterStore; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.GeometryFactory; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; public class KMeansDistortionMapReduceTest { MapDriver mapDriver; ReduceDriver reduceDriver; @Rule public TestName name = new TestName(); final String batchId = "b1"; final SimpleFeatureType ftype = AnalyticFeature.createGeometryFeatureAdapter( "centroid", new String[] {"extra1"}, "http://geowave.test.net", ClusteringUtils.CLUSTERING_CRS).getFeatureType(); final FeatureDataAdapter testObjectAdapter = new FeatureDataAdapter(ftype); short adapterId = 1234; private static final List capturedObjects = new ArrayList<>(); final Index index = SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()); final GeometryFactory factory = new GeometryFactory(); final String grp1 = "g1"; @Before public void setUp() throws IOException { final KMeansDistortionMapReduce.KMeansDistortionMapper mapper = new KMeansDistortionMapReduce.KMeansDistortionMapper(); final KMeansDistortionMapReduce.KMeansDistortionReduce reducer = new KMeansDistortionMapReduce.KMeansDistortionReduce(); mapDriver = MapDriver.newMapDriver(mapper); reduceDriver = ReduceDriver.newReduceDriver(reducer); mapDriver.getConfiguration().setClass( GeoWaveConfiguratorBase.enumToConfKey( KMeansDistortionMapReduce.class, CommonParameters.Common.DISTANCE_FUNCTION_CLASS), FeatureCentroidDistanceFn.class, DistanceFn.class); JobContextAdapterStore.addDataAdapter(mapDriver.getConfiguration(), testObjectAdapter); JobContextAdapterStore.addDataAdapter(reduceDriver.getConfiguration(), testObjectAdapter); JobContextInternalAdapterStore.addTypeName( mapDriver.getConfiguration(), testObjectAdapter.getTypeName(), adapterId); JobContextInternalAdapterStore.addTypeName( reduceDriver.getConfiguration(), testObjectAdapter.getTypeName(), adapterId); final PropertyManagement propManagement = new PropertyManagement(); propManagement.store( CentroidParameters.Centroid.INDEX_NAME, SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()).getName()); propManagement.store(CentroidParameters.Centroid.DATA_TYPE_ID, ftype.getTypeName()); propManagement.store( CentroidParameters.Centroid.DATA_NAMESPACE_URI, ftype.getName().getNamespaceURI()); propManagement.store(GlobalParameters.Global.BATCH_ID, batchId); propManagement.store( CentroidParameters.Centroid.EXTRACTOR_CLASS, SimpleFeatureCentroidExtractor.class); propManagement.store( CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS, SimpleFeatureItemWrapperFactory.class); final DataStorePluginOptions pluginOptions = new DataStorePluginOptions(); GeoWaveStoreFinder.getRegisteredStoreFactoryFamilies().put( "memory", new MemoryStoreFactoryFamily()); pluginOptions.selectPlugin("memory"); final MemoryRequiredOptions opts = (MemoryRequiredOptions) pluginOptions.getFactoryOptions(); final String namespace = "test_" + getClass().getName() + "_" + name.getMethodName(); opts.setGeoWaveNamespace(namespace); final PersistableStore store = new PersistableStore(pluginOptions); propManagement.store(StoreParam.INPUT_STORE, store); NestedGroupCentroidAssignment.setParameters( mapDriver.getConfiguration(), KMeansDistortionMapReduce.class, propManagement); serializations(); capturedObjects.clear(); final SimpleFeature feature = AnalyticFeature.createGeometryFeature( ftype, batchId, "123", "fred", grp1, 20.30203, factory.createPoint(new Coordinate(02.33, 0.23)), new String[] {"extra1"}, new double[] {0.022}, 1, 1, 0); propManagement.store(CentroidParameters.Centroid.ZOOM_LEVEL, 1); ingest(pluginOptions.createDataStore(), testObjectAdapter, index, feature); CentroidManagerGeoWave.setParameters( reduceDriver.getConfiguration(), KMeansDistortionMapReduce.class, propManagement); } private void ingest( final DataStore dataStore, final FeatureDataAdapter adapter, final Index index, final SimpleFeature feature) throws IOException { dataStore.addType(adapter, index); try (Writer writer = dataStore.createWriter(adapter.getTypeName())) { writer.write(feature); writer.close(); } } private void serializations() { final String[] strings = reduceDriver.getConfiguration().getStrings("io.serializations"); final String[] newStrings = new String[strings.length + 1]; System.arraycopy(strings, 0, newStrings, 0, strings.length); newStrings[newStrings.length - 1] = SimpleFeatureImplSerialization.class.getName(); reduceDriver.getConfiguration().setStrings("io.serializations", newStrings); mapDriver.getConfiguration().setStrings("io.serializations", newStrings); } @Test public void testMapper() throws IOException { final GeoWaveInputKey inputKey = new GeoWaveInputKey(); inputKey.setInternalAdapterId(adapterId); inputKey.setDataId(new ByteArray("abc".getBytes())); final ObjectWritable ow = new ObjectWritable(); ow.set( new FeatureWritable( ftype, AnalyticFeature.createGeometryFeature( ftype, batchId, "123", "fred", grp1, 20.30203, factory.createPoint(new Coordinate(02.33, 0.23)), new String[] {"extra1"}, new double[] {0.022}, 1, 1, 0))); mapDriver.withInput(inputKey, ow); final List> results = mapDriver.run(); // output key has the dataID adjusted to contain the rank assertEquals(results.get(0).getFirst().toString(), grp1); // output value is the same as input value assertEquals(results.get(0).getSecond().getValue(), 0.0, 0.0001); } @Test public void testReducer() throws IOException { reduceDriver.addInput( new Text("g1"), Arrays.asList(new CountofDoubleWritable(0.34, 1), new CountofDoubleWritable(0.75, 1))); reduceDriver.addInput( new Text("g2"), Arrays.asList(new CountofDoubleWritable(0.34, 1), new CountofDoubleWritable(0.25, 1))); final List> results = reduceDriver.run(); assertEquals(1, results.size()); assertTrue(results.get(0).getSecond().getGroupId().equals("g1")); assertTrue(results.get(0).getSecond().getClusterCount().equals(1)); // TODO: floating point error? assertTrue(results.get(0).getSecond().getDistortionValue().equals(3.6697247706422016)); } } ================================================ FILE: analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/kmeans/KSamplerMapReduceTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kmeans; import static org.junit.Assert.assertEquals; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mrunit.mapreduce.MapDriver; import org.apache.hadoop.mrunit.mapreduce.ReduceDriver; import org.apache.hadoop.mrunit.types.Pair; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; import org.locationtech.geowave.analytic.AnalyticFeature; import org.locationtech.geowave.analytic.AnalyticItemWrapperFactory; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.clustering.CentroidManagerGeoWave; import org.locationtech.geowave.analytic.clustering.ClusteringUtils; import org.locationtech.geowave.analytic.extract.CentroidExtractor; import org.locationtech.geowave.analytic.param.CentroidParameters; import org.locationtech.geowave.analytic.param.GlobalParameters; import org.locationtech.geowave.analytic.param.SampleParameters; import org.locationtech.geowave.analytic.param.StoreParameters.StoreParam; import org.locationtech.geowave.analytic.sample.function.SamplingRankFunction; import org.locationtech.geowave.analytic.store.PersistableStore; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.store.GeoWaveStoreFinder; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.memory.MemoryRequiredOptions; import org.locationtech.geowave.core.store.memory.MemoryStoreFactoryFamily; import org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase; import org.locationtech.geowave.mapreduce.JobContextAdapterStore; import org.locationtech.geowave.mapreduce.JobContextInternalAdapterStore; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey; import org.locationtech.jts.geom.Coordinate; import org.slf4j.Logger; public class KSamplerMapReduceTest { MapDriver mapDriver; ReduceDriver reduceDriver; short internalAdapterId; short other; final TestObjectDataAdapter testObjectAdapter = new TestObjectDataAdapter(); @Rule public TestName name = new TestName(); private static final List capturedObjects = new ArrayList<>(); public KSamplerMapReduceTest() {} public static class TestSamplingMidRankFunction implements SamplingRankFunction { @Override public double rank(final int sampleSize, final Object value) { capturedObjects.add(value); return 0.5; } @Override public void initialize(final JobContext context, final Class scope, final Logger logger) throws IOException {} } public static class TestSamplingNoRankFunction implements SamplingRankFunction { @Override public void initialize(final JobContext context, final Class scope, final Logger logger) throws IOException {} @Override public double rank(final int sampleSize, final Object value) { capturedObjects.add(value); return 0.0; } } @Before public void setUp() throws IOException { final KSamplerMapReduce.SampleMap mapper = new KSamplerMapReduce.SampleMap<>(); final KSamplerMapReduce.SampleReducer reducer = new KSamplerMapReduce.SampleReducer<>(); mapDriver = MapDriver.newMapDriver(mapper); reduceDriver = ReduceDriver.newReduceDriver(reducer); final DataTypeAdapter adapter = AnalyticFeature.createGeometryFeatureAdapter( "altoids", new String[] {}, "http://geowave.test.net", ClusteringUtils.CLUSTERING_CRS); final PropertyManagement propManagement = new PropertyManagement(); final DataStorePluginOptions pluginOptions = new DataStorePluginOptions(); GeoWaveStoreFinder.getRegisteredStoreFactoryFamilies().put( "memory", new MemoryStoreFactoryFamily()); pluginOptions.selectPlugin("memory"); final MemoryRequiredOptions opts = (MemoryRequiredOptions) pluginOptions.getFactoryOptions(); final String namespace = "test_" + getClass().getName() + "_" + name.getMethodName(); opts.setGeoWaveNamespace(namespace); final PersistableStore store = new PersistableStore(pluginOptions); propManagement.store(StoreParam.INPUT_STORE, store); propManagement.store( CentroidParameters.Centroid.INDEX_NAME, SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()).getName()); propManagement.store(CentroidParameters.Centroid.DATA_TYPE_ID, "altoids"); propManagement.store(CentroidParameters.Centroid.DATA_NAMESPACE_URI, "http://geowave.test.net"); propManagement.store(GlobalParameters.Global.BATCH_ID, "b1"); propManagement.store(CentroidParameters.Centroid.EXTRACTOR_CLASS, TestObjectExtractor.class); propManagement.store( CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS, TestObjectItemWrapperFactory.class); CentroidManagerGeoWave.setParameters( reduceDriver.getConfiguration(), KSamplerMapReduce.class, propManagement); CentroidManagerGeoWave.setParameters( mapDriver.getConfiguration(), KSamplerMapReduce.class, propManagement); // TODO it seems the centroid adapter is required to have been written, // should this initialization be handled by the runner class rather than // externally such as in the test? final DataStore dataStore = store.getDataStoreOptions().createDataStore(); final InternalAdapterStore internalAdapterStore = store.getDataStoreOptions().createInternalAdapterStore(); dataStore.addType( adapter, SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions())); mapDriver.getConfiguration().setClass( GeoWaveConfiguratorBase.enumToConfKey( KSamplerMapReduce.class, SampleParameters.Sample.SAMPLE_RANK_FUNCTION), TestSamplingMidRankFunction.class, SamplingRankFunction.class); internalAdapterId = internalAdapterStore.getInitialAdapterId(testObjectAdapter.getTypeName()); other = internalAdapterStore.getInitialAdapterId(adapter.getTypeName()); JobContextAdapterStore.addDataAdapter(mapDriver.getConfiguration(), testObjectAdapter); JobContextAdapterStore.addDataAdapter(mapDriver.getConfiguration(), adapter); JobContextInternalAdapterStore.addTypeName( mapDriver.getConfiguration(), testObjectAdapter.getTypeName(), internalAdapterId); JobContextInternalAdapterStore.addTypeName( mapDriver.getConfiguration(), adapter.getTypeName(), other); mapDriver.getConfiguration().setInt( GeoWaveConfiguratorBase.enumToConfKey( KSamplerMapReduce.class, SampleParameters.Sample.SAMPLE_SIZE), 2); reduceDriver.getConfiguration().setInt( GeoWaveConfiguratorBase.enumToConfKey( KSamplerMapReduce.class, SampleParameters.Sample.SAMPLE_SIZE), 2); JobContextAdapterStore.addDataAdapter(reduceDriver.getConfiguration(), adapter); JobContextAdapterStore.addDataAdapter(reduceDriver.getConfiguration(), testObjectAdapter); JobContextInternalAdapterStore.addTypeName( reduceDriver.getConfiguration(), adapter.getTypeName(), other); JobContextInternalAdapterStore.addTypeName( reduceDriver.getConfiguration(), testObjectAdapter.getTypeName(), internalAdapterId); reduceDriver.getConfiguration().set( GeoWaveConfiguratorBase.enumToConfKey( KSamplerMapReduce.class, SampleParameters.Sample.DATA_TYPE_NAME), "altoids"); reduceDriver.getConfiguration().setClass( GeoWaveConfiguratorBase.enumToConfKey( KSamplerMapReduce.class, CentroidParameters.Centroid.EXTRACTOR_CLASS), TestObjectExtractor.class, CentroidExtractor.class); mapDriver.getConfiguration().setClass( GeoWaveConfiguratorBase.enumToConfKey( KSamplerMapReduce.class, CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS), TestObjectItemWrapperFactory.class, AnalyticItemWrapperFactory.class); reduceDriver.getConfiguration().setClass( GeoWaveConfiguratorBase.enumToConfKey( KSamplerMapReduce.class, CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS), TestObjectItemWrapperFactory.class, AnalyticItemWrapperFactory.class); serializations(); } private void serializations() { final String[] strings = reduceDriver.getConfiguration().getStrings("io.serializations"); final String[] newStrings = new String[strings.length + 2]; System.arraycopy(strings, 0, newStrings, 0, strings.length); newStrings[newStrings.length - 1] = SimpleFeatureImplSerialization.class.getName(); newStrings[newStrings.length - 2] = TestObjectSerialization.class.getName(); reduceDriver.getConfiguration().setStrings("io.serializations", newStrings); } @Test public void testMapperWithMidRankedKey() throws IOException { capturedObjects.clear(); mapDriver.getConfiguration().setClass( GeoWaveConfiguratorBase.enumToConfKey( KSamplerMapReduce.class, SampleParameters.Sample.SAMPLE_RANK_FUNCTION), TestSamplingMidRankFunction.class, SamplingRankFunction.class); final GeoWaveInputKey inputKey = new GeoWaveInputKey(); inputKey.setInternalAdapterId(internalAdapterId); inputKey.setDataId(new ByteArray("abc".getBytes())); final ObjectWritable ow = new ObjectWritable(); ow.set(new TestObjectWritable(new TestObject(new Coordinate(25.4, 25.6), "abc"))); final GeoWaveInputKey outputKey = new GeoWaveInputKey(); outputKey.setInternalAdapterId(internalAdapterId); final ByteBuffer keyBuf = ByteBuffer.allocate(64); keyBuf.putDouble(0.5); keyBuf.putInt(1); keyBuf.put("1".getBytes()); keyBuf.putInt(3); keyBuf.put(inputKey.getDataId().getBytes()); outputKey.setDataId(new ByteArray(keyBuf.array())); mapDriver.withInput(inputKey, ow); final List> results = mapDriver.run(); // output key has the dataID adjusted to contain the rank assertEquals(results.get(0).getFirst(), outputKey); // output value is the same as input value assertEquals(results.get(0).getSecond().get(), ow.get()); // results from sample rank function to make sure it was provided the // correct object assertEquals(1, capturedObjects.size()); assertEquals("abc", ((TestObject) capturedObjects.get(0)).id); } @Test public void testMapperWithZeroRank() throws IOException { capturedObjects.clear(); mapDriver.getConfiguration().setClass( GeoWaveConfiguratorBase.enumToConfKey( KSamplerMapReduce.class, SampleParameters.Sample.SAMPLE_RANK_FUNCTION), TestSamplingNoRankFunction.class, SamplingRankFunction.class); final GeoWaveInputKey inputKey = new GeoWaveInputKey(); inputKey.setInternalAdapterId(internalAdapterId); inputKey.setDataId(new ByteArray("abc".getBytes())); final ObjectWritable ow = new ObjectWritable(); ow.set(new TestObjectWritable(new TestObject(new Coordinate(25.4, 25.6), "abc"))); final GeoWaveInputKey outputKey = new GeoWaveInputKey(); outputKey.setInternalAdapterId(internalAdapterId); final ByteBuffer keyBuf = ByteBuffer.allocate(64); keyBuf.putDouble(0.0); keyBuf.putInt(3); keyBuf.put(inputKey.getDataId().getBytes()); outputKey.setDataId(new ByteArray(keyBuf.array())); mapDriver.withInput(inputKey, ow); final List> results = mapDriver.run(); assertEquals(0, results.size()); // results from sample rank function to make sure it was provided the // correct object assertEquals(1, capturedObjects.size()); assertEquals("abc", ((TestObject) capturedObjects.get(0)).id); } @Test public void testReducer() throws IOException { final ObjectWritable ow1 = new ObjectWritable(); ow1.set(new TestObjectWritable(new TestObject(new Coordinate(25.4, 25.6), "abc"))); final ObjectWritable ow2 = new ObjectWritable(); ow2.set(new TestObjectWritable(new TestObject(new Coordinate(25.4, 25.6), "def"))); final ObjectWritable ow3 = new ObjectWritable(); ow3.set(new TestObjectWritable(new TestObject(new Coordinate(25.4, 25.6), "ghi"))); final GeoWaveInputKey inputKey1 = new GeoWaveInputKey(); inputKey1.setInternalAdapterId(internalAdapterId); ByteBuffer keyBuf = ByteBuffer.allocate(64); keyBuf.putDouble(0.5); keyBuf.putInt(3); keyBuf.put("111".getBytes()); inputKey1.setDataId(new ByteArray(keyBuf.array())); keyBuf = ByteBuffer.allocate(64); final GeoWaveInputKey inputKey2 = new GeoWaveInputKey(); inputKey2.setInternalAdapterId(internalAdapterId); keyBuf.putDouble(0.6); keyBuf.putInt(3); keyBuf.put("111".getBytes()); inputKey2.setDataId(new ByteArray(keyBuf.array())); keyBuf = ByteBuffer.allocate(64); final GeoWaveInputKey inputKey3 = new GeoWaveInputKey(); inputKey3.setInternalAdapterId(internalAdapterId); keyBuf.putDouble(0.7); keyBuf.putInt(3); keyBuf.put("111".getBytes()); inputKey3.setDataId(new ByteArray(keyBuf.array())); reduceDriver.addInput(inputKey1, Arrays.asList(ow1)); reduceDriver.addInput(inputKey2, Arrays.asList(ow2)); reduceDriver.addInput(inputKey3, Arrays.asList(ow3)); final List> results = reduceDriver.run(); assertEquals(2, results.size()); assertEquals(results.get(0).getFirst().getTypeName(), "altoids"); assertEquals(results.get(1).getFirst().getTypeName(), "altoids"); assertEquals("abc", results.get(0).getSecond().getName()); assertEquals("def", results.get(1).getSecond().getName()); } } ================================================ FILE: analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/kmeans/SimpleFeatureImplSerialization.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kmeans; import java.io.DataInputStream; import java.io.DataOutput; import java.io.DataOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import org.apache.hadoop.io.serializer.Deserializer; import org.apache.hadoop.io.serializer.Serialization; import org.apache.hadoop.io.serializer.Serializer; import org.geotools.feature.simple.SimpleFeatureImpl; import org.locationtech.geowave.adapter.vector.FeatureWritable; public class SimpleFeatureImplSerialization implements Serialization { @Override public boolean accept(final Class c) { return SimpleFeatureImpl.class.isAssignableFrom(c); } @Override public Deserializer getDeserializer(final Class arg0) { return new SFDeserializer(); } @Override public Serializer getSerializer(final Class arg0) { return new SFSerializer(); } public class SFDeserializer implements Deserializer { private InputStream in; private DataInputStream dataInput; @Override public void open(final InputStream in) throws IOException { this.in = in; dataInput = new DataInputStream(in); } @Override public SimpleFeatureImpl deserialize(final SimpleFeatureImpl t) throws IOException { final FeatureWritable fw = new FeatureWritable(); fw.readFields(dataInput); return (SimpleFeatureImpl) fw.getFeature(); } @Override public void close() throws IOException { in.close(); } } private static class SFSerializer implements Serializer { private OutputStream out; private DataOutput dataOutput; @Override public void open(final OutputStream out) throws IOException { this.out = out; dataOutput = new DataOutputStream(out); } @Override public void serialize(final SimpleFeatureImpl t) throws IOException { final FeatureWritable fw = new FeatureWritable(t.getFeatureType(), t); fw.write(dataOutput); } @Override public void close() throws IOException { out.close(); } } } ================================================ FILE: analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/kmeans/TestObject.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kmeans; import java.io.Serializable; import java.util.UUID; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; public class TestObject implements Serializable { /** */ private static final long serialVersionUID = 1L; private static final GeometryFactory factory = new GeometryFactory(); public Geometry geo; public String id; public String groupID = "1"; public String name; public int level = 1; public TestObject() { id = UUID.randomUUID().toString(); } public TestObject(final Geometry geo, final String id, final String groupID) { super(); this.geo = geo; this.id = id; this.groupID = groupID; name = id; } public TestObject(final Coordinate coor, final String id) { geo = factory.createPoint(coor); geo.setSRID(2029); this.id = id; name = id; } public int getLevel() { return level; } public void setLevel(final int level) { this.level = level; } public String getName() { return name; } public void setName(final String name) { this.name = name; } public String getGroupID() { return groupID; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((geo == null) ? 0 : geo.hashCode()); result = (prime * result) + ((id == null) ? 0 : id.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final TestObject other = (TestObject) obj; if (geo == null) { if (other.geo != null) { return false; } } else if (!geo.equals(other.geo)) { return false; } if (id == null) { if (other.id != null) { return false; } } else if (!id.equals(other.id)) { return false; } return true; } } ================================================ FILE: analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/kmeans/TestObjectDataAdapter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kmeans; import java.util.Arrays; import java.util.Map; import java.util.stream.Collectors; import org.locationtech.geowave.core.geotime.adapter.SpatialFieldDescriptorBuilder; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.adapter.FieldDescriptorBuilder; import org.locationtech.geowave.core.store.api.RowBuilder; import org.locationtech.geowave.mapreduce.HadoopDataAdapter; import org.locationtech.geowave.mapreduce.HadoopWritableSerializer; import org.locationtech.jts.geom.Geometry; import com.google.common.base.Functions; public class TestObjectDataAdapter implements HadoopDataAdapter { private static final String GEOM = "myGeo"; private static final String ID = "myId"; private static final String GROUP_ID = "myGroupId"; private static final FieldDescriptor GEO_FIELD = new SpatialFieldDescriptorBuilder<>(Geometry.class).fieldName( GEOM).spatialIndexHint().build(); private static final FieldDescriptor ID_FIELD = new FieldDescriptorBuilder<>(String.class).fieldName(ID).build(); private static final FieldDescriptor GROUP_ID_FIELD = new FieldDescriptorBuilder<>(String.class).fieldName(GROUP_ID).build(); private static final FieldDescriptor[] DESCRIPTORS = new FieldDescriptor[] {GEO_FIELD, ID_FIELD, GROUP_ID_FIELD}; private static final Map> DESCRIPTOR_MAP = Arrays.stream(DESCRIPTORS).collect( Collectors.toMap(FieldDescriptor::fieldName, Functions.identity())); public TestObjectDataAdapter() { super(); } @Override public String getTypeName() { return "test"; } @Override public byte[] getDataId(final TestObject entry) { return StringUtils.stringToBinary(entry.id); } @Override public RowBuilder newRowBuilder(final FieldDescriptor[] outputFieldDescriptors) { return new RowBuilder() { private String id; private String groupID; private Geometry geom; @Override public void setField(final String id, final Object fieldValue) { if (id.equals(GEOM)) { geom = (Geometry) fieldValue; } else if (id.equals(ID)) { this.id = (String) fieldValue; } else if (id.equals(GROUP_ID)) { groupID = (String) fieldValue; } } @Override public void setFields(final Map values) { if (values.containsKey(GEOM)) { geom = (Geometry) values.get(GEOM); } if (values.containsKey(ID)) { id = (String) values.get(ID); } if (values.containsKey(GROUP_ID)) { groupID = (String) values.get(GROUP_ID); } } @Override public TestObject buildRow(final byte[] dataId) { return new TestObject(geom, id, groupID); } }; } @Override public HadoopWritableSerializer createWritableSerializer() { return new TestObjectHadoopSerializer(); } private class TestObjectHadoopSerializer implements HadoopWritableSerializer { @Override public TestObjectWritable toWritable(final TestObject entry) { return new TestObjectWritable(entry); } @Override public TestObject fromWritable(final TestObjectWritable writable) { return writable.getObj(); } } @Override public Object getFieldValue(final TestObject entry, final String fieldName) { switch (fieldName) { case GEOM: return entry.geo; case ID: return entry.id; case GROUP_ID: return entry.groupID; } return null; } @Override public Class getDataClass() { return TestObject.class; } @Override public FieldDescriptor[] getFieldDescriptors() { return DESCRIPTORS; } @Override public FieldDescriptor getFieldDescriptor(final String fieldName) { return DESCRIPTOR_MAP.get(fieldName); } @Override public byte[] toBinary() { return new byte[0]; } @Override public void fromBinary(final byte[] bytes) {} } ================================================ FILE: analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/kmeans/TestObjectDimExtractor.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kmeans; import org.locationtech.geowave.analytic.extract.DimensionExtractor; import org.locationtech.geowave.analytic.extract.EmptyDimensionExtractor; import org.locationtech.jts.geom.Geometry; public class TestObjectDimExtractor extends EmptyDimensionExtractor implements DimensionExtractor { /** * */ private static final long serialVersionUID = 1L; @Override public String getGroupID(final TestObject anObject) { return anObject.getGroupID(); } @Override public Geometry getGeometry(final TestObject anObject) { return anObject.geo; } } ================================================ FILE: analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/kmeans/TestObjectDistanceFn.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kmeans; import org.locationtech.geowave.analytic.distance.CoordinateEuclideanDistanceFn; import org.locationtech.geowave.analytic.distance.DistanceFn; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; public class TestObjectDistanceFn implements DistanceFn { /** * */ private static final long serialVersionUID = 1L; private final DistanceFn coordinateDistanceFunction = new CoordinateEuclideanDistanceFn(); private Geometry getGeometry(final TestObject x) { return x.geo; } @Override public double measure(final TestObject x, final TestObject y) { return coordinateDistanceFunction.measure( getGeometry(x).getCentroid().getCoordinate(), getGeometry(y).getCentroid().getCoordinate()); } } ================================================ FILE: analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/kmeans/TestObjectExtractor.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kmeans; import org.locationtech.geowave.analytic.extract.CentroidExtractor; import org.locationtech.jts.geom.Point; public class TestObjectExtractor implements CentroidExtractor { @Override public Point getCentroid(final TestObject anObject) { return anObject.geo.getCentroid(); } } ================================================ FILE: analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/kmeans/TestObjectItemWrapperFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kmeans; import java.io.IOException; import org.apache.hadoop.mapreduce.JobContext; import org.locationtech.geowave.analytic.AnalyticItemWrapper; import org.locationtech.geowave.analytic.AnalyticItemWrapperFactory; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.slf4j.Logger; public class TestObjectItemWrapperFactory implements AnalyticItemWrapperFactory { @Override public AnalyticItemWrapper create(final TestObject item) { return new TestObjectItemWrapper(item); } @Override public void initialize(final JobContext context, final Class scope, final Logger logger) throws IOException {} @Override public AnalyticItemWrapper createNextItem( final TestObject feature, final String groupID, final Coordinate coordinate, final String[] extraNames, final double[] extraValues) { final TestObject obj = new TestObject(); obj.groupID = groupID; obj.geo = feature.geo.getFactory().createPoint(coordinate); obj.name = feature.name; return new TestObjectItemWrapper(obj); } static class TestObjectItemWrapper implements AnalyticItemWrapper { private final TestObject item; public TestObjectItemWrapper(final TestObject item) { super(); this.item = item; } @Override public String getID() { return item.id; } @Override public String getGroupID() { return item.groupID; } @Override public TestObject getWrappedItem() { return item; } @Override public long getAssociationCount() { // TODO Auto-generated method stub return 0; } @Override public void resetAssociatonCount() { // TODO Auto-generated method stub } @Override public void incrementAssociationCount(final long increment) { // TODO Auto-generated method stub } @Override public int getIterationID() { // TODO Auto-generated method stub return 0; } @Override public String getName() { return item.id; } @Override public String[] getExtraDimensions() { return new String[] {}; } @Override public double[] getDimensionValues() { return new double[0]; } @Override public Geometry getGeometry() { return item.geo; } @Override public double getCost() { return 0; } @Override public void setCost(final double cost) { // TODO Auto-generated method stub } @Override public void setZoomLevel(final int level) { item.setLevel(level); } @Override public int getZoomLevel() { return item.getLevel(); } @Override public void setBatchID(final String batchID) { // TODO Auto-generated method stub } @Override public String getBatchID() { // TODO Auto-generated method stub return null; } @Override public void setGroupID(final String groupID) { item.groupID = groupID; } } } ================================================ FILE: analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/kmeans/TestObjectSerialization.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kmeans; import java.io.DataInputStream; import java.io.DataOutput; import java.io.DataOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import org.apache.hadoop.io.serializer.Deserializer; import org.apache.hadoop.io.serializer.Serialization; import org.apache.hadoop.io.serializer.Serializer; public class TestObjectSerialization implements Serialization { @Override public boolean accept(final Class c) { return TestObject.class.isAssignableFrom(c); } @Override public Deserializer getDeserializer(final Class arg0) { return new TODeserializer(); } @Override public Serializer getSerializer(final Class arg0) { return new TOSerializer(); } public class TODeserializer implements Deserializer { private InputStream in; private DataInputStream dataInput; @Override public void open(final InputStream in) throws IOException { this.in = in; dataInput = new DataInputStream(in); } @Override public TestObject deserialize(final TestObject t) throws IOException { final TestObjectWritable fw = new TestObjectWritable(); fw.readFields(dataInput); return fw.getObj(); } @Override public void close() throws IOException { in.close(); } } private static class TOSerializer implements Serializer { private OutputStream out; private DataOutput dataOutput; @Override public void open(final OutputStream out) throws IOException { this.out = out; dataOutput = new DataOutputStream(out); } @Override public void serialize(final TestObject t) throws IOException { final TestObjectWritable fw = new TestObjectWritable(t); fw.write(dataOutput); } @Override public void close() throws IOException { out.close(); } } } ================================================ FILE: analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/kmeans/TestObjectWritable.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kmeans; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import org.apache.hadoop.io.Writable; import org.locationtech.jts.geom.Coordinate; public class TestObjectWritable implements Writable { private TestObject obj; public TestObjectWritable() {} public TestObjectWritable(final TestObject obj) { super(); this.obj = obj; } public TestObject getObj() { return obj; } public void setObj(final TestObject obj) { this.obj = obj; } @Override public void readFields(final DataInput arg0) throws IOException { final String id = arg0.readUTF(); final String name = arg0.readUTF(); final String gid = arg0.readUTF(); final double x = arg0.readDouble(); final double y = arg0.readDouble(); obj = new TestObject(new Coordinate(x, y), id); obj.setName(name); obj.groupID = gid; } @Override public void write(final DataOutput arg0) throws IOException { arg0.writeUTF(obj.id); arg0.writeUTF(obj.name); arg0.writeUTF(obj.groupID); arg0.writeDouble(obj.geo.getCoordinate().x); arg0.writeDouble(obj.geo.getCoordinate().y); } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((obj == null) ? 0 : obj.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final TestObjectWritable other = (TestObjectWritable) obj; if (this.obj == null) { if (other.obj != null) { return false; } } else if (!this.obj.equals(other.obj)) { return false; } return true; } } ================================================ FILE: analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/kmeans/runner/KMeansIterationsJobRunnerTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kmeans.runner; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.UUID; import org.apache.hadoop.conf.Configuration; import org.geotools.feature.type.BasicFeatureTypes; import org.junit.Before; import org.junit.Test; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.analytic.AnalyticFeature; import org.locationtech.geowave.analytic.AnalyticItemWrapper; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.SimpleFeatureItemWrapperFactory; import org.locationtech.geowave.analytic.clustering.CentroidManager; import org.locationtech.geowave.analytic.clustering.ClusteringUtils; import org.locationtech.geowave.analytic.clustering.exception.MatchingCentroidNotFoundException; import org.locationtech.geowave.analytic.distance.FeatureCentroidDistanceFn; import org.locationtech.geowave.analytic.param.CentroidParameters; import org.locationtech.geowave.analytic.param.ClusteringParameters; import org.locationtech.geowave.analytic.param.CommonParameters; import org.locationtech.geowave.analytic.param.GlobalParameters; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.Point; import org.opengis.feature.simple.SimpleFeature; public class KMeansIterationsJobRunnerTest { private final KMeansIterationsJobRunnerForTest jobRunner = new KMeansIterationsJobRunnerForTest(); private static final String[] grps = new String[] {"g1", "g2"}; private static final FeatureDataAdapter adapter = AnalyticFeature.createGeometryFeatureAdapter( "centroid", new String[] {}, BasicFeatureTypes.DEFAULT_NAMESPACE, ClusteringUtils.CLUSTERING_CRS); PropertyManagement propertyMgt = new PropertyManagement(); @Before public void setup() { propertyMgt.store(GlobalParameters.Global.BATCH_ID, "b1"); propertyMgt.store(CentroidParameters.Centroid.DATA_TYPE_ID, "centroid"); propertyMgt.store( CentroidParameters.Centroid.INDEX_NAME, SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()).getName()); propertyMgt.store(ClusteringParameters.Clustering.CONVERGANCE_TOLERANCE, new Double(0.0001)); propertyMgt.store( CommonParameters.Common.DISTANCE_FUNCTION_CLASS, FeatureCentroidDistanceFn.class); propertyMgt.store( CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS, SimpleFeatureItemWrapperFactory.class); } @Test public void testRun() throws Exception { // seed jobRunner.runJob(new Configuration(), propertyMgt); // then test jobRunner.run(new Configuration(), propertyMgt); for (final Map.Entry>> e : KMeansIterationsJobRunnerForTest.groups.entrySet()) { assertEquals(3, e.getValue().size()); for (final AnalyticItemWrapper newCentroid : e.getValue()) { assertEquals(2, newCentroid.getIterationID()); // check to make sure there is no overlap of old and new IDs boolean b = false; for (final AnalyticItemWrapper oldCentroid : KMeansIterationsJobRunnerForTest.deletedSet.get( e.getKey())) { b |= oldCentroid.getID().equals(newCentroid.getID()); } assertFalse(b); } } for (final Map.Entry>> e : KMeansIterationsJobRunnerForTest.deletedSet.entrySet()) { assertEquals(3, e.getValue().size()); for (final AnalyticItemWrapper oldCentroid : e.getValue()) { assertEquals(1, oldCentroid.getIterationID()); } } } public static class KMeansIterationsJobRunnerForTest extends KMeansIterationsJobRunner { private int iteration = 1; protected static Map>> groups = new HashMap<>(); protected static Map>> deletedSet = new HashMap<>(); private static SimpleFeatureItemWrapperFactory factory = new SimpleFeatureItemWrapperFactory(); private static final GeometryFactory geoFactory = new GeometryFactory(); private static Point[] points = new Point[] { geoFactory.createPoint(new Coordinate(2.3, 2.3)), geoFactory.createPoint(new Coordinate(2.31, 2.31)), geoFactory.createPoint(new Coordinate(2.32, 2.31)), geoFactory.createPoint(new Coordinate(2.31, 2.33)), geoFactory.createPoint(new Coordinate(2.29, 2.31)), geoFactory.createPoint(new Coordinate(2.3, 2.32)), geoFactory.createPoint(new Coordinate(2.28, 2.3)), geoFactory.createPoint(new Coordinate(2.28, 2.27)), geoFactory.createPoint(new Coordinate(2.27, 2.31)), geoFactory.createPoint(new Coordinate(2.33, 2.3)), geoFactory.createPoint(new Coordinate(2.31, 2.35))}; @Override protected CentroidManager constructCentroidManager( final Configuration config, final PropertyManagement runTimeProperties) throws IOException { return new CentroidManager() { @Override public void clear() {} @Override public AnalyticItemWrapper createNextCentroid( final SimpleFeature feature, final String groupID, final Coordinate coordinate, final String[] extraNames, final double[] extraValues) { return factory.createNextItem(feature, groupID, coordinate, extraNames, extraValues); } @Override public void delete(final String[] dataIds) throws IOException { final List grps = Arrays.asList(dataIds); for (final Map.Entry>> entry : groups.entrySet()) { final Iterator> it = entry.getValue().iterator(); while (it.hasNext()) { final AnalyticItemWrapper next = it.next(); if (grps.contains(next.getID())) { deletedSet.get(entry.getKey()).add(next); it.remove(); } } } } @Override public List getAllCentroidGroups() throws IOException { final List ll = new ArrayList<>(); for (final String g : groups.keySet()) { ll.add(g); } return ll; } @Override public List> getCentroidsForGroup(final String groupID) throws IOException { return groups.get(groupID); } @Override public List> getCentroidsForGroup( final String batchID, final String groupID) throws IOException { return groups.get(groupID); } @Override public int processForAllGroups( final org.locationtech.geowave.analytic.clustering.CentroidManager.CentroidProcessingFn fn) throws IOException { for (final Map.Entry>> entry : groups.entrySet()) { final int status = fn.processGroup(entry.getKey(), entry.getValue()); if (status < 0) { return status; } } return 0; } @Override public AnalyticItemWrapper getCentroid(final String id) { // TODO Auto-generated method stub return null; } @Override public String getDataTypeName() { return "centroid"; } @Override public String getIndexName() { return SpatialDimensionalityTypeProvider.createIndexFromOptions( new SpatialOptions()).getName(); } @Override public AnalyticItemWrapper getCentroidById( final String id, final String groupID) throws IOException, MatchingCentroidNotFoundException { final Iterator> it = this.getCentroidsForGroup(groupID).iterator(); while (it.hasNext()) { final AnalyticItemWrapper feature = (it.next()); if (feature.getID().equals(id)) { return feature; } } throw new MatchingCentroidNotFoundException(id); } }; } @Override protected int runJob(final Configuration config, final PropertyManagement runTimeProperties) throws Exception { int j = 0; for (final String grpID : grps) { if (!groups.containsKey(grpID)) { groups.put(grpID, new ArrayList>()); deletedSet.put(grpID, new ArrayList>()); } for (int i = 0; i < 3; i++) { final SimpleFeature nextFeature = AnalyticFeature.createGeometryFeature( adapter.getFeatureType(), "b1", UUID.randomUUID().toString(), "nn" + i, grpID, 0.1, points[j++], new String[0], new double[0], 1, iteration, 0); groups.get(grpID).add(factory.create(nextFeature)); } } iteration++; return 0; } } } ================================================ FILE: analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/kmeans/runner/StripWeakCentroidsRunnerTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.kmeans.runner; import static org.junit.Assert.assertEquals; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Random; import org.apache.hadoop.conf.Configuration; import org.junit.Assert; import org.junit.Test; import org.locationtech.geowave.analytic.AnalyticItemWrapper; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.clustering.CentroidManager; import org.locationtech.geowave.analytic.clustering.LongCentroid; import org.locationtech.geowave.analytic.clustering.exception.MatchingCentroidNotFoundException; import org.locationtech.geowave.analytic.mapreduce.kmeans.runner.StripWeakCentroidsRunner.MaxChangeBreakStrategy; import org.locationtech.geowave.analytic.mapreduce.kmeans.runner.StripWeakCentroidsRunner.StableChangeBreakStrategy; import org.locationtech.geowave.analytic.mapreduce.kmeans.runner.StripWeakCentroidsRunner.TailMaxBreakStrategy; import org.locationtech.geowave.analytic.mapreduce.kmeans.runner.StripWeakCentroidsRunner.TailStableChangeBreakStrategy; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.jts.geom.Coordinate; public class StripWeakCentroidsRunnerTest { @Test public void testStable() throws Exception { final StripWeakCentroidsRunnerForTest testObj = new StripWeakCentroidsRunnerForTest(60, 62); testObj.setBreakStrategy(new StableChangeBreakStrategy()); testObj.run(new Configuration(), new PropertyManagement()); } @Test public void testStable1() throws Exception { final List> list = new ArrayList<>(); final int cnts[] = new int[] {1000, 851, 750, 650, 525, 200, 100, 90, 70}; for (int i = 0; i < cnts.length; i++) { list.add(new LongCentroid(i, "", cnts[i])); } final StableChangeBreakStrategy breakS = new StableChangeBreakStrategy<>(); assertEquals(5, breakS.getBreakPoint(list)); } @Test public void testStableUniform() throws Exception { final List> list = new ArrayList<>(); final int cnts[] = new int[] {1000, 851, 750, 650, 525, 200, 100, 90, 70}; for (int i = 0; i < cnts.length; i++) { list.add(new LongCentroid(i, "", cnts[i])); } final TailStableChangeBreakStrategy breakS = new TailStableChangeBreakStrategy<>(); assertEquals(5, breakS.getBreakPoint(list)); } @Test public void testMaxDense() throws Exception { final List> list = new ArrayList<>(); final int cnts[] = new int[] {900, 600, 800,}; for (int i = 0; i < cnts.length; i++) { list.add(new LongCentroid(i, "", cnts[i])); } final TailMaxBreakStrategy breakS = new TailMaxBreakStrategy<>(); assertEquals(3, breakS.getBreakPoint(list)); } @Test public void testMaxUniform() throws Exception { final List> list = new ArrayList<>(); final int cnts[] = new int[] {1000, 851, 750, 650, 525, 200, 90, 70}; for (int i = 0; i < cnts.length; i++) { list.add(new LongCentroid(i, "", cnts[i])); } final TailMaxBreakStrategy breakS = new TailMaxBreakStrategy<>(); assertEquals(5, breakS.getBreakPoint(list)); } @Test public void testCliffMean() throws Exception { final StripWeakCentroidsRunnerForTest testObj = new StripWeakCentroidsRunnerForTest(79, 81); testObj.setBreakStrategy(new MaxChangeBreakStrategy()); testObj.run(new Configuration(), new PropertyManagement()); } @Test public void testCliff() throws Exception { final StripWeakCentroidsRunnerForTestOne testObj = new StripWeakCentroidsRunnerForTestOne(); testObj.run(new Configuration(), new PropertyManagement()); } private static class StripWeakCentroidsRunnerForTest extends StripWeakCentroidsRunner { private final List> testSet; private final int min; private final int max; StripWeakCentroidsRunnerForTest(final int min, final int max) { super(); this.min = min; this.max = max; testSet = load(); } @Override protected CentroidManager constructCentroidManager( final Configuration config, final PropertyManagement runTimeProperties) throws IOException { return new CentroidManager() { @Override public AnalyticItemWrapper createNextCentroid( final Long feature, final String groupID, final Coordinate coordinate, final String[] extraNames, final double[] extraValues) { return new LongCentroid(feature, groupID, 1); } @Override public void clear() {} @Override public void delete(final String[] dataIds) throws IOException { Assert.assertTrue(dataIds.length + "<=" + max, dataIds.length <= max); Assert.assertTrue(dataIds.length + ">=" + min, dataIds.length >= min); } @Override public List getAllCentroidGroups() throws IOException { return Arrays.asList("1"); } @Override public List> getCentroidsForGroup(final String groupID) throws IOException { Assert.assertEquals("1", groupID); return testSet; } @Override public List> getCentroidsForGroup( final String batchID, final String groupID) throws IOException { Assert.assertEquals("1", groupID); return testSet; } @Override public int processForAllGroups( final org.locationtech.geowave.analytic.clustering.CentroidManager.CentroidProcessingFn fn) throws IOException { return fn.processGroup("1", testSet); } @Override public AnalyticItemWrapper getCentroid(final String id) { // TODO Auto-generated method stub return null; } @Override public String getDataTypeName() { return "centroid"; } @Override public String getIndexName() { return SpatialDimensionalityTypeProvider.createIndexFromOptions( new SpatialOptions()).getName(); } @Override public AnalyticItemWrapper getCentroidById(final String id, final String groupID) throws IOException, MatchingCentroidNotFoundException { Assert.assertEquals("1", groupID); throw new MatchingCentroidNotFoundException(id); } }; } private List> load() { final Random rand = new Random(2331); int begin = 100000000; final List> centroids = new ArrayList<>(); for (int i = 0; i <= 100; i++) { if ((i > 0) && ((i % 20) == 0)) { begin /= (Math.pow(100, i / 20)); } centroids.add(new LongCentroid(i, "", (int) (Math.abs(rand.nextDouble() * 10000) + begin))); } return centroids; } } private static class StripWeakCentroidsRunnerForTestOne extends StripWeakCentroidsRunner { private final List> testSet = Arrays.asList((AnalyticItemWrapper) new LongCentroid(1L, "", 22)); StripWeakCentroidsRunnerForTestOne() { super(); } @Override protected CentroidManager constructCentroidManager( final Configuration config, final PropertyManagement runTimeProperties) throws IOException { return new CentroidManager() { @Override public AnalyticItemWrapper createNextCentroid( final Long feature, final String groupID, final Coordinate coordinate, final String[] extraNames, final double[] extraValues) { return new LongCentroid(feature, groupID, 1); } @Override public void clear() {} @Override public void delete(final String[] dataIds) throws IOException { Assert.assertFalse(true); } @Override public List getAllCentroidGroups() throws IOException { return Arrays.asList("1"); } @Override public List> getCentroidsForGroup(final String groupID) throws IOException { Assert.assertEquals("1", groupID); return testSet; } @Override public List> getCentroidsForGroup( final String batchID, final String groupID) throws IOException { Assert.assertEquals("1", groupID); return testSet; } @Override public int processForAllGroups( final org.locationtech.geowave.analytic.clustering.CentroidManager.CentroidProcessingFn fn) throws IOException { return fn.processGroup("1", testSet); } @Override public AnalyticItemWrapper getCentroid(final String id) { // TODO Auto-generated method stub return null; } @Override public String getDataTypeName() { return "centroid"; } @Override public String getIndexName() { return SpatialDimensionalityTypeProvider.createIndexFromOptions( new SpatialOptions()).getName(); } @Override public AnalyticItemWrapper getCentroidById(final String id, final String groupID) throws IOException, MatchingCentroidNotFoundException { Assert.assertEquals("1", groupID); throw new MatchingCentroidNotFoundException(id); } }; } } } ================================================ FILE: analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/nn/NNJobRunnerTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.nn; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.Counters; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.ScopedJobConfiguration; import org.locationtech.geowave.analytic.distance.DistanceFn; import org.locationtech.geowave.analytic.distance.FeatureCentroidDistanceFn; import org.locationtech.geowave.analytic.distance.GeometryCentroidDistanceFn; import org.locationtech.geowave.analytic.mapreduce.GeoWaveAnalyticJobRunner; import org.locationtech.geowave.analytic.mapreduce.MapReduceIntegration; import org.locationtech.geowave.analytic.mapreduce.SequenceFileInputFormatConfiguration; import org.locationtech.geowave.analytic.param.CommonParameters; import org.locationtech.geowave.analytic.param.MapReduceParameters.MRConfig; import org.locationtech.geowave.analytic.param.PartitionParameters.Partition; import org.locationtech.geowave.analytic.param.StoreParameters.StoreParam; import org.locationtech.geowave.analytic.partitioner.OrthodromicDistancePartitioner; import org.locationtech.geowave.analytic.partitioner.Partitioner; import org.locationtech.geowave.analytic.store.PersistableStore; import org.locationtech.geowave.core.store.GeoWaveStoreFinder; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.memory.MemoryRequiredOptions; import org.locationtech.geowave.core.store.memory.MemoryStoreFactoryFamily; public class NNJobRunnerTest { final NNJobRunner jjJobRunner = new NNJobRunner(); final PropertyManagement runTimeProperties = new PropertyManagement(); @Rule public TestName name = new TestName(); @Before public void init() { jjJobRunner.setMapReduceIntegrater(new MapReduceIntegration() { @Override public int submit( final Configuration configuration, final PropertyManagement runTimeProperties, final GeoWaveAnalyticJobRunner tool) throws Exception { tool.setConf(configuration); return ToolRunner.run(configuration, tool, new String[] {}); } @Override public Counters waitForCompletion(final Job job) throws ClassNotFoundException, IOException, InterruptedException { Assert.assertEquals(SequenceFileInputFormat.class, job.getInputFormatClass()); Assert.assertEquals(10, job.getNumReduceTasks()); final ScopedJobConfiguration configWrapper = new ScopedJobConfiguration(job.getConfiguration(), NNMapReduce.class); Assert.assertEquals("file://foo/bin", job.getConfiguration().get("mapred.input.dir")); Assert.assertEquals(0.4, configWrapper.getDouble(Partition.MAX_DISTANCE, 0.0), 0.001); Assert.assertEquals(100, configWrapper.getInt(Partition.MAX_MEMBER_SELECTION, 1)); try { final Partitioner wrapper = configWrapper.getInstance(Partition.PARTITIONER_CLASS, Partitioner.class, null); Assert.assertEquals(OrthodromicDistancePartitioner.class, wrapper.getClass()); final Partitioner secondary = configWrapper.getInstance( Partition.SECONDARY_PARTITIONER_CLASS, Partitioner.class, null); Assert.assertEquals(OrthodromicDistancePartitioner.class, secondary.getClass()); final DistanceFn distancFn = configWrapper.getInstance( CommonParameters.Common.DISTANCE_FUNCTION_CLASS, DistanceFn.class, GeometryCentroidDistanceFn.class); Assert.assertEquals(FeatureCentroidDistanceFn.class, distancFn.getClass()); } catch (final InstantiationException e) { throw new IOException("Unable to configure system", e); } catch (final IllegalAccessException e) { throw new IOException("Unable to configure system", e); } Assert.assertEquals(10, job.getNumReduceTasks()); return new Counters(); } @Override public Job getJob(final Tool tool) throws IOException { return new Job(tool.getConf()); } @Override public Configuration getConfiguration(final PropertyManagement runTimeProperties) throws IOException { return new Configuration(); } }); jjJobRunner.setInputFormatConfiguration( new SequenceFileInputFormatConfiguration(new Path("file://foo/bin"))); jjJobRunner.setReducerCount(10); runTimeProperties.store(MRConfig.HDFS_BASE_DIR, "/"); final DataStorePluginOptions pluginOptions = new DataStorePluginOptions(); GeoWaveStoreFinder.getRegisteredStoreFactoryFamilies().put( "memory", new MemoryStoreFactoryFamily()); pluginOptions.selectPlugin("memory"); final MemoryRequiredOptions opts = (MemoryRequiredOptions) pluginOptions.getFactoryOptions(); final String namespace = "test_" + getClass().getName() + "_" + name.getMethodName(); opts.setGeoWaveNamespace(namespace); final PersistableStore store = new PersistableStore(pluginOptions); runTimeProperties.store(StoreParam.INPUT_STORE, store); runTimeProperties.store( CommonParameters.Common.DISTANCE_FUNCTION_CLASS, FeatureCentroidDistanceFn.class); runTimeProperties.store(Partition.PARTITIONER_CLASS, OrthodromicDistancePartitioner.class); runTimeProperties.store( Partition.SECONDARY_PARTITIONER_CLASS, OrthodromicDistancePartitioner.class); runTimeProperties.store(Partition.MAX_DISTANCE, Double.valueOf(0.4)); runTimeProperties.store(Partition.MAX_MEMBER_SELECTION, Integer.valueOf(100)); } @Test public void test() throws Exception { jjJobRunner.run(runTimeProperties); } } ================================================ FILE: analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/nn/NNMapReduceTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.nn; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import java.util.UUID; import org.apache.hadoop.io.DataInputByteBuffer; import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.io.Text; import org.apache.hadoop.mrunit.mapreduce.MapDriver; import org.apache.hadoop.mrunit.mapreduce.ReduceDriver; import org.apache.hadoop.mrunit.types.Pair; import org.geotools.feature.type.BasicFeatureTypes; import org.junit.Before; import org.junit.Test; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.adapter.vector.FeatureWritable; import org.locationtech.geowave.analytic.AdapterWithObjectWritable; import org.locationtech.geowave.analytic.AnalyticFeature; import org.locationtech.geowave.analytic.clustering.ClusteringUtils; import org.locationtech.geowave.analytic.distance.DistanceFn; import org.locationtech.geowave.analytic.distance.FeatureCentroidOrthodromicDistanceFn; import org.locationtech.geowave.analytic.mapreduce.kmeans.SimpleFeatureImplSerialization; import org.locationtech.geowave.analytic.mapreduce.nn.NNMapReduce.PartitionDataWritable; import org.locationtech.geowave.analytic.param.CommonParameters; import org.locationtech.geowave.analytic.param.PartitionParameters; import org.locationtech.geowave.analytic.partitioner.Partitioner.PartitionData; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.store.GeoWaveStoreFinder; import org.locationtech.geowave.core.store.memory.MemoryStoreFactoryFamily; import org.locationtech.geowave.core.store.metadata.InternalAdapterStoreImpl; import org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase; import org.locationtech.geowave.mapreduce.JobContextAdapterStore; import org.locationtech.geowave.mapreduce.JobContextInternalAdapterStore; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.GeometryFactory; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; public class NNMapReduceTest { MapDriver mapDriver; ReduceDriver reduceDriver; SimpleFeatureType ftype; short internalAdapterId; final GeometryFactory factory = new GeometryFactory(); @Before public void setUp() throws IOException { GeoWaveStoreFinder.getRegisteredStoreFactoryFamilies().put( "memory", new MemoryStoreFactoryFamily()); final NNMapReduce.NNMapper nnMapper = new NNMapReduce.NNMapper<>(); final NNMapReduce.NNReducer nnReducer = new NNMapReduce.NNSimpleFeatureIDOutputReducer(); mapDriver = MapDriver.newMapDriver(nnMapper); reduceDriver = ReduceDriver.newReduceDriver(nnReducer); mapDriver.getConfiguration().set( GeoWaveConfiguratorBase.enumToConfKey( NNMapReduce.class, PartitionParameters.Partition.DISTANCE_THRESHOLDS), "0.0002,0.0002"); reduceDriver.getConfiguration().setClass( GeoWaveConfiguratorBase.enumToConfKey( NNMapReduce.class, CommonParameters.Common.DISTANCE_FUNCTION_CLASS), FeatureCentroidOrthodromicDistanceFn.class, DistanceFn.class); reduceDriver.getConfiguration().setDouble( GeoWaveConfiguratorBase.enumToConfKey( NNMapReduce.class, PartitionParameters.Partition.MAX_DISTANCE), 0.001); ftype = AnalyticFeature.createGeometryFeatureAdapter( "centroid", new String[] {"extra1"}, BasicFeatureTypes.DEFAULT_NAMESPACE, ClusteringUtils.CLUSTERING_CRS).getFeatureType(); final FeatureDataAdapter adapter = new FeatureDataAdapter(ftype); JobContextAdapterStore.addDataAdapter(mapDriver.getConfiguration(), adapter); internalAdapterId = InternalAdapterStoreImpl.getLazyInitialAdapterId(adapter.getTypeName()); JobContextAdapterStore.addDataAdapter(reduceDriver.getConfiguration(), adapter); JobContextInternalAdapterStore.addTypeName( mapDriver.getConfiguration(), adapter.getTypeName(), internalAdapterId); JobContextInternalAdapterStore.addTypeName( reduceDriver.getConfiguration(), adapter.getTypeName(), internalAdapterId); serializations(); } private SimpleFeature createTestFeature(final Coordinate coord) { return AnalyticFeature.createGeometryFeature( ftype, "b1", UUID.randomUUID().toString(), "fred", "NA", 20.30203, factory.createPoint(coord), new String[] {"extra1"}, new double[] {0.022}, 1, 1, 0); } private void serializations() { final String[] strings = reduceDriver.getConfiguration().getStrings("io.serializations"); final String[] newStrings = new String[strings.length + 1]; System.arraycopy(strings, 0, newStrings, 0, strings.length); newStrings[newStrings.length - 1] = SimpleFeatureImplSerialization.class.getName(); reduceDriver.getConfiguration().setStrings("io.serializations", newStrings); mapDriver.getConfiguration().setStrings("io.serializations", newStrings); } @Test public void testMapper() throws IOException { final SimpleFeature feature1 = createTestFeature(new Coordinate(30.0, 30.00000001)); final SimpleFeature feature2 = createTestFeature(new Coordinate(179.9999999999, 30.0000001)); final SimpleFeature feature3 = createTestFeature(new Coordinate(30.00000001, 30.00000001)); final SimpleFeature feature4 = createTestFeature(new Coordinate(-179.9999999999, 30.0000001)); final GeoWaveInputKey inputKey1 = new GeoWaveInputKey(); inputKey1.setInternalAdapterId(internalAdapterId); inputKey1.setDataId(new ByteArray(feature1.getID())); final GeoWaveInputKey inputKey2 = new GeoWaveInputKey(); inputKey2.setInternalAdapterId(internalAdapterId); inputKey2.setDataId(new ByteArray(feature2.getID())); final GeoWaveInputKey inputKey3 = new GeoWaveInputKey(); inputKey3.setInternalAdapterId(internalAdapterId); inputKey3.setDataId(new ByteArray(feature4.getID())); final GeoWaveInputKey inputKey4 = new GeoWaveInputKey(); inputKey4.setInternalAdapterId(internalAdapterId); inputKey4.setDataId(new ByteArray(feature4.getID())); mapDriver.addInput(inputKey1, feature1); mapDriver.addInput(inputKey2, feature2); mapDriver.addInput(inputKey3, feature3); mapDriver.addInput(inputKey4, feature4); final List> mapperResults = mapDriver.run(); assertEquals( 10, // includes overlap mapperResults.size()); assertFalse(getPartitionDataFor(mapperResults, feature1.getID(), true).isEmpty()); assertFalse(getPartitionDataFor(mapperResults, feature2.getID(), true).isEmpty()); assertFalse(getPartitionDataFor(mapperResults, feature2.getID(), false).isEmpty()); assertFalse(getPartitionDataFor(mapperResults, feature3.getID(), true).isEmpty()); assertTrue( intersects( getPartitionDataFor(mapperResults, feature1.getID(), true), getPartitionDataFor(mapperResults, feature3.getID(), true))); assertTrue( intersects( getPartitionDataFor(mapperResults, feature2.getID(), false), getPartitionDataFor(mapperResults, feature4.getID(), false))); final List>> partitions = getReducerDataFromMapperInput(mapperResults); assertEquals(3, partitions.size()); reduceDriver.addAll(partitions); final List> reduceResults = reduceDriver.run(); assertEquals(4, reduceResults.size()); assertEquals(feature3.getID(), find(reduceResults, feature1.getID()).toString()); assertEquals(feature1.getID(), find(reduceResults, feature3.getID()).toString()); assertEquals(feature4.getID(), find(reduceResults, feature2.getID()).toString()); assertEquals(feature2.getID(), find(reduceResults, feature4.getID()).toString()); } @Test public void testWritable() throws IOException { final PartitionDataWritable writable1 = new PartitionDataWritable(); final PartitionDataWritable writable2 = new PartitionDataWritable(); writable1.setPartitionData( new PartitionData(new ByteArray(new byte[] {}), new ByteArray("abc"), true)); writable2.setPartitionData( new PartitionData(new ByteArray(new byte[] {}), new ByteArray("abc"), false)); assertTrue(writable1.compareTo(writable2) == 0); writable2.setPartitionData( new PartitionData(new ByteArray(new byte[] {}), new ByteArray("abd"), false)); assertTrue(writable1.compareTo(writable2) < 0); writable2.setPartitionData( new PartitionData(new ByteArray(new byte[] {}), new ByteArray("abd"), true)); assertTrue(writable1.compareTo(writable2) < 0); final DataOutputBuffer output = new DataOutputBuffer(); writable1.write(output); output.flush(); final DataInputByteBuffer input = new DataInputByteBuffer(); input.reset(ByteBuffer.wrap(output.getData())); writable2.readFields(input); assertTrue(writable1.compareTo(writable2) == 0); } private Text find(final List> outputSet, final String key) { for (final Pair item : outputSet) { if (key.equals(item.getFirst().toString())) { return item.getSecond(); } } return null; } private List>> getReducerDataFromMapperInput( final List> mapperResults) { final List>> reducerInputSet = new ArrayList<>(); for (final Pair pair : mapperResults) { getListFor(pair.getFirst(), reducerInputSet).add(pair.getSecond()); } return reducerInputSet; } private List getListFor( final PartitionDataWritable pd, final List>> reducerInputSet) { for (final Pair> pair : reducerInputSet) { if (pair.getFirst().compareTo(pd) == 0) { return pair.getSecond(); } } final List newPairList = new ArrayList<>(); reducerInputSet.add(new Pair(pd, newPairList)); return newPairList; } private boolean intersects(final List setOne, final List setTwo) { for (final PartitionData pdOne : setOne) { for (final PartitionData pdTwo : setTwo) { if (pdOne.getCompositeKey().equals(pdTwo.getCompositeKey())) { return true; } } } return false; } private List getPartitionDataFor( final List> mapperResults, final String id, final boolean primary) { final ArrayList results = new ArrayList<>(); for (final Pair pair : mapperResults) { if (((FeatureWritable) pair.getSecond().getObjectWritable().get()).getFeature().getID().equals( id) && (pair.getFirst().partitionData.isPrimary() == primary)) { results.add(pair.getFirst().partitionData); } } return results; } } ================================================ FILE: analytics/mapreduce/src/test/java/org/locationtech/geowave/analytic/mapreduce/operations/options/PropertyManagementConverterTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.mapreduce.operations.options; import org.junit.Assert; import org.junit.Test; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.param.GlobalParameters; public class PropertyManagementConverterTest { @Test public void testConverter() throws Exception { final PropertyManagement propMgmt = new PropertyManagement(); final PropertyManagementConverter conv = new PropertyManagementConverter(propMgmt); final DBScanOptions opts = new DBScanOptions(); opts.setGlobalBatchId("some-value"); conv.readProperties(opts); Assert.assertEquals("some-value", propMgmt.getProperty(GlobalParameters.Global.BATCH_ID)); } } ================================================ FILE: analytics/mapreduce/src/test/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi ================================================ org.locationtech.geowave.analytic.mapreduce.TestMapReducePersistableRegistry ================================================ FILE: analytics/mapreduce/src/test/resources/log4j.properties ================================================ log4j.rootLogger=INFO, stdout # Direct log messages to stdout log4j.appender.stdout=org.apache.logging.log4j.core.appender.ConsoleAppender log4j.appender.stdout.Target=System.out log4j.appender.stdout.layout=org.apache.logging.log4j.core.layout.PatternLayout log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n ================================================ FILE: analytics/pom.xml ================================================ 4.0.0 geowave-parent org.locationtech.geowave ../ 2.0.2-SNAPSHOT geowave-analytic-parent GeoWave Analytics Parent POM The set of analytics provided for GeoWave Datasets pom api spark mapreduce pyspark ================================================ FILE: analytics/pyspark/.gitignore ================================================ __pycache__ ================================================ FILE: analytics/pyspark/pom.xml ================================================ geowave-analytic-parent org.locationtech.geowave 2.0.2-SNAPSHOT 4.0.0 python GeoWave pyspark geowave-analytic-pyspark pom org.apache.maven.plugins maven-resources-plugin 3.1.0 copy-resources process-resources copy-resources ${project.build.directory}/python src/main/python ** **/*.pyc true python org.codehaus.mojo exec-maven-plugin 1.6.0 ${python.executable} setuptools package package exec ${project.build.directory}/python setup.py sdist --dist-dir=${project.build.directory} org.codehaus.mojo build-helper-maven-plugin attach-artifacts package attach-artifact ${project.build.directory}/geowave_pyspark-${project.version}.tar.gz tar.gz ================================================ FILE: analytics/pyspark/src/main/python/geowave_pyspark/__init__.py ================================================ ############################################################################### # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt ############################################################################## import types ================================================ FILE: analytics/pyspark/src/main/python/geowave_pyspark/types.py ================================================ ############################################################################### # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt ############################################################################## from shapely import wkb from shapely.geometry import LineString, MultiLineString, MultiPoint, MultiPolygon, Point, Polygon from shapely.geometry.base import BaseGeometry from pyspark.sql.types import UserDefinedType, StructField, BinaryType, StructType class AbstractGeometryUDT(UserDefinedType): @classmethod def sqlType(cls): return StructType([StructField("wkb", BinaryType(), True)]) @classmethod def module(cls): return 'geowave_pyspark.types' @classmethod def scalaUDT(cls): return 'org.locationtech.geowave.analytic.spark.sparksql.udt.' + cls.__name__ def serialize(self, obj): return _serialize_to_wkb(obj) def deserialize(self, datum): return _deserialize_from_wkb(datum[0]) class PointUDT(AbstractGeometryUDT): pass class LineStringUDT(AbstractGeometryUDT): pass class PolygonUDT(AbstractGeometryUDT): pass class MultiPointUDT(AbstractGeometryUDT): pass class MultiLineStringUDT(AbstractGeometryUDT): pass class MultiPolygonUDT(AbstractGeometryUDT): pass class GeometryUDT(AbstractGeometryUDT): pass def _serialize_to_wkb(data): if isinstance(data, BaseGeometry): return bytearray(data.wkb) return None def _deserialize_from_wkb(data): if data is None: return None return wkb.loads(bytes(data)) _deserialize_from_wkb.__safe_for_unpickling__ = True # Spark expects a private link to the UDT representation of the class Point.__UDT__ = PointUDT() MultiPoint.__UDT__ = MultiPointUDT() LineString.__UDT__ = LineStringUDT() MultiLineString.__UDT__ = MultiLineStringUDT() Polygon.__UDT__ = PolygonUDT() MultiPolygon.__UDT__ = MultiPolygonUDT() BaseGeometry.__UDT__ = GeometryUDT() # make Geometry dumps a little cleaner BaseGeometry.__repr__ = BaseGeometry.__str__ ================================================ FILE: analytics/pyspark/src/main/python/setup.py ================================================ ############################################################################### # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt ############################################################################## from setuptools import setup, find_packages setup( name='geowave_pyspark', version='${project.version}', url='https://locationtech.github.io/geowave/', packages=find_packages(), install_requires=['pytz', 'shapely', 'pyspark>=2.1.1,<2.3.1'] ) ================================================ FILE: analytics/spark/pom.xml ================================================ 4.0.0 geowave-analytic-parent org.locationtech.geowave ../ 2.0.2-SNAPSHOT 1.8 2.11.8 geowave-analytic-spark GeoWave Spark Analytics org.apache.spark spark-core_2.12 compile io.netty netty org.apache.spark spark-mllib_2.12 ${spark.version} compile org.locationtech.geowave geowave-analytic-mapreduce ${project.version} provided org.locationtech.geowave geowave-analytic-api org.scalatest scalatest_2.12 ${spark.version} test net.alchim31.maven scala-maven-plugin 3.2.0 com.github.spotbugs spotbugs-maven-plugin true build-installer-plugin maven-assembly-plugin ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/AnalyticOperationCLIProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark; import org.locationtech.geowave.analytic.mapreduce.operations.AnalyticSection; import org.locationtech.geowave.analytic.spark.kde.operations.KDESparkCommand; import org.locationtech.geowave.analytic.spark.kmeans.operations.KmeansSparkCommand; import org.locationtech.geowave.analytic.spark.resize.ResizeSparkCommand; import org.locationtech.geowave.analytic.spark.sparksql.operations.SparkSqlCommand; import org.locationtech.geowave.analytic.spark.spatial.operations.SpatialJoinCommand; import org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi; public class AnalyticOperationCLIProvider implements CLIOperationProviderSpi { private static final Class[] OPERATIONS = new Class[] { AnalyticSection.class, KmeansSparkCommand.class, KDESparkCommand.class, SparkSqlCommand.class, SpatialJoinCommand.class, ResizeSparkCommand.class}; @Override public Class[] getOperations() { return OPERATIONS; } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/GeoWaveIndexedRDD.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark; import java.io.Serializable; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import org.apache.spark.api.java.JavaPairRDD; import org.apache.spark.api.java.function.PairFlatMapFunction; import org.apache.spark.broadcast.Broadcast; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; import org.opengis.feature.simple.SimpleFeature; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Lists; import scala.Tuple2; public class GeoWaveIndexedRDD implements Serializable { /** * */ private static final long serialVersionUID = 1L; private static Logger LOGGER = LoggerFactory.getLogger(GeoWaveIndexedRDD.class); private final GeoWaveRDD geowaveRDD; private JavaPairRDD> rawFeatureRDD = null; private JavaPairRDD> rawGeometryRDD = null; // Because it can be expensive to serialize IndexStrategy for every record. // Index strategy must be able to be broadcast. private Broadcast indexStrategy = null; public GeoWaveIndexedRDD( final GeoWaveRDD geowaveRDD, final Broadcast indexStrategy) { this.geowaveRDD = geowaveRDD; this.indexStrategy = indexStrategy; } public void reset() { rawFeatureRDD = null; rawGeometryRDD = null; } public void reindex(final Broadcast newIndexStrategy) { // Remove original indexing strategy if (indexStrategy != null) { indexStrategy.unpersist(); } indexStrategy = (Broadcast) newIndexStrategy; reset(); } public JavaPairRDD> getIndexedFeatureRDD() { return this.getIndexedFeatureRDD(0.0); } public JavaPairRDD> getIndexedFeatureRDD( final double bufferAmount) { verifyParameters(); if (!geowaveRDD.isLoaded()) { LOGGER.error("Must provide a loaded RDD."); return null; } if (rawFeatureRDD == null) { final JavaPairRDD> indexedData = geowaveRDD.getRawRDD().flatMapToPair( new PairFlatMapFunction, ByteArray, Tuple2>() { /** * */ private static final long serialVersionUID = 1L; @Override public Iterator>> call( final Tuple2 t) throws Exception { // Flattened output array. final List>> result = new ArrayList<>(); // Pull feature to index from tuple final SimpleFeature inputFeature = t._2; // If we are dealing with null or empty // geometry we can't properly compare this // feature. final Geometry geom = (Geometry) inputFeature.getDefaultGeometry(); if (geom == null) { return Collections.emptyIterator(); } final Envelope internalEnvelope = geom.getEnvelopeInternal(); if (internalEnvelope.isNull()) { return Collections.emptyIterator(); } // If we have to buffer geometry for // predicate expand bounds internalEnvelope.expandBy(bufferAmount); // Get data range from expanded envelope final MultiDimensionalNumericData boundsRange = GeometryUtils.getBoundsFromEnvelope(internalEnvelope); final NumericIndexStrategy index = indexStrategy.value(); InsertionIds insertIds = index.getInsertionIds(boundsRange, 80); // If we didnt expand the envelope for // buffering we can trim the indexIds by the // geometry if (bufferAmount == 0.0) { insertIds = RDDUtils.trimIndexIds(insertIds, geom, index); } for (final Iterator iter = insertIds.getCompositeInsertionIds().iterator(); iter.hasNext();) { final byte[] id = iter.next(); final Tuple2 valuePair = new Tuple2<>(t._1, inputFeature); final Tuple2> indexPair = new Tuple2<>(new ByteArray(id), valuePair); result.add(indexPair); } return result.iterator(); } }); rawFeatureRDD = indexedData; } return rawFeatureRDD; } public JavaPairRDD> getIndexedGeometryRDD() { return this.getIndexedGeometryRDD(0.0, false); } public JavaPairRDD> getIndexedGeometryRDD( final double bufferAmount, final boolean recalculate) { verifyParameters(); if (!geowaveRDD.isLoaded()) { LOGGER.error("Must provide a loaded RDD."); return null; } if ((rawGeometryRDD == null) || recalculate) { rawGeometryRDD = geowaveRDD.getRawRDD().filter( t -> ((t._2.getDefaultGeometry() != null) && !((Geometry) t._2.getDefaultGeometry()).getEnvelopeInternal().isNull())).flatMapToPair( new PairFlatMapFunction, ByteArray, Tuple2>() { /** * */ private static final long serialVersionUID = 1L; @Override public Iterator>> call( final Tuple2 t) throws Exception { // Pull feature to index from tuple final SimpleFeature inputFeature = t._2; // If we are dealing with null or empty // geometry we can't properly compare this // feature. final Geometry geom = (Geometry) inputFeature.getDefaultGeometry(); final Envelope internalEnvelope = geom.getEnvelopeInternal(); // If we have to buffer geometry for // predicate expand bounds internalEnvelope.expandBy(bufferAmount); // Get data range from expanded envelope final MultiDimensionalNumericData boundsRange = GeometryUtils.getBoundsFromEnvelope(internalEnvelope); final NumericIndexStrategy index = indexStrategy.value(); InsertionIds insertIds = index.getInsertionIds(boundsRange, 80); // If we didnt expand the envelope for // buffering we can trim the indexIds by the // geometry if (bufferAmount == 0.0) { insertIds = RDDUtils.trimIndexIds(insertIds, geom, index); } // Flattened output array. final List>> result = Lists.newArrayListWithCapacity(insertIds.getSize()); for (final Iterator iter = insertIds.getCompositeInsertionIds().iterator(); iter.hasNext();) { final byte[] id = iter.next(); final Tuple2 valuePair = new Tuple2<>(t._1, geom); final Tuple2> indexPair = new Tuple2<>(new ByteArray(id), valuePair); result.add(indexPair); } return result.iterator(); } }); } return rawGeometryRDD; } public Broadcast getIndexStrategy() { return indexStrategy; } public GeoWaveRDD getGeoWaveRDD() { return geowaveRDD; } private boolean verifyParameters() { if (geowaveRDD == null) { LOGGER.error("Must supply a input rdd to index. Please set one and try again."); return false; } if (indexStrategy == null) { LOGGER.error("Broadcasted strategy must be set before features can be indexed."); return false; } return true; } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/GeoWaveRDD.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark; import java.io.Serializable; import org.apache.spark.api.java.JavaPairRDD; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.opengis.feature.simple.SimpleFeature; public class GeoWaveRDD implements Serializable { /** * */ private static final long serialVersionUID = 1L; private JavaPairRDD rawRDD = null; public GeoWaveRDD() {} public GeoWaveRDD(final JavaPairRDD rawRDD) { this.rawRDD = rawRDD; } public JavaPairRDD getRawRDD() { return rawRDD; } public void setRawRDD(final JavaPairRDD rawRDD) { this.rawRDD = rawRDD; } public boolean isLoaded() { return (getRawRDD() != null); } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/GeoWaveRDDLoader.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.spark.SparkContext; import org.apache.spark.api.java.JavaPairRDD; import org.apache.spark.broadcast.Broadcast; import org.apache.spark.rdd.RDD; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.store.api.QueryBuilder; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.mapreduce.input.GeoWaveInputFormat; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.opengis.coverage.grid.GridCoverage; import org.opengis.feature.simple.SimpleFeature; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import scala.Tuple2; public class GeoWaveRDDLoader { private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveRDDLoader.class); public static GeoWaveRDD loadRDD(final SparkContext sc, final DataStorePluginOptions storeOptions) throws IOException { final RDDOptions defaultOptions = new RDDOptions(); return GeoWaveRDDLoader.loadRDD(sc, storeOptions, defaultOptions); } public static GeoWaveRDD loadRDD( final SparkContext sc, final DataStorePluginOptions storeOptions, final RDDOptions rddOpts) throws IOException { final JavaPairRDD rawRDD = GeoWaveRDDLoader.loadRawRDD(sc, storeOptions, rddOpts); return new GeoWaveRDD(rawRDD); } public static GeoWaveIndexedRDD loadIndexedRDD( final SparkContext sc, final DataStorePluginOptions storeOptions, final RDDOptions rddOpts, final NumericIndexStrategy indexStrategy) throws IOException { final GeoWaveRDD wrappedRDD = GeoWaveRDDLoader.loadRDD(sc, storeOptions, rddOpts); // Index strategy can be expensive so we will broadcast it and store it Broadcast broadcastStrategy = null; if (indexStrategy != null) { broadcastStrategy = (Broadcast) RDDUtils.broadcastIndexStrategy(sc, indexStrategy); } final GeoWaveIndexedRDD returnRDD = new GeoWaveIndexedRDD(wrappedRDD, broadcastStrategy); return returnRDD; } public static GeoWaveIndexedRDD loadIndexedRDD( final SparkContext sc, final GeoWaveRDD inputRDD, final NumericIndexStrategy indexStrategy) throws IOException { if ((inputRDD == null) || !inputRDD.isLoaded()) { return null; } // Index strategy can be expensive so we will broadcast it and store it Broadcast broadcastStrategy = null; if (indexStrategy != null) { broadcastStrategy = (Broadcast) RDDUtils.broadcastIndexStrategy(sc, indexStrategy); } final GeoWaveIndexedRDD returnRDD = new GeoWaveIndexedRDD(inputRDD, broadcastStrategy); return returnRDD; } public static JavaPairRDD loadRawRDD( final SparkContext sc, final DataStorePluginOptions storeOptions, final RDDOptions rddOpts) throws IOException { if (sc == null) { LOGGER.error("Must supply a valid Spark Context. Please set SparkContext and try again."); return null; } if (storeOptions == null) { LOGGER.error("Must supply input store to load. Please set storeOptions and try again."); return null; } if (rddOpts == null) { LOGGER.error("Must supply valid RDDOptions to load a rdd."); return null; } final Configuration conf = new Configuration(sc.hadoopConfiguration()); GeoWaveInputFormat.setStoreOptions(conf, storeOptions); if (rddOpts.getQuery() != null) { GeoWaveInputFormat.setQuery( conf, rddOpts.getQuery(), storeOptions.createAdapterStore(), storeOptions.createInternalAdapterStore(), storeOptions.createIndexStore()); } if ((rddOpts.getMinSplits() > -1) || (rddOpts.getMaxSplits() > -1)) { GeoWaveInputFormat.setMinimumSplitCount(conf, rddOpts.getMinSplits()); GeoWaveInputFormat.setMaximumSplitCount(conf, rddOpts.getMaxSplits()); } else { final int defaultSplitsSpark = sc.getConf().getInt("spark.default.parallelism", -1); // Attempt to grab default partition count for spark and split data // along that. // Otherwise just fallback to default according to index strategy if (defaultSplitsSpark != -1) { GeoWaveInputFormat.setMinimumSplitCount(conf, defaultSplitsSpark); GeoWaveInputFormat.setMaximumSplitCount(conf, defaultSplitsSpark); } } final RDD> rdd = sc.newAPIHadoopRDD( conf, GeoWaveInputFormat.class, GeoWaveInputKey.class, SimpleFeature.class); final JavaPairRDD javaRdd = JavaPairRDD.fromJavaRDD(rdd.toJavaRDD()); return javaRdd; } public static JavaPairRDD loadRawRasterRDD( final SparkContext sc, final DataStorePluginOptions storeOptions, final String indexName, final Integer minSplits, final Integer maxSplits) throws IOException { if (sc == null) { LOGGER.error("Must supply a valid Spark Context. Please set SparkContext and try again."); return null; } if (storeOptions == null) { LOGGER.error("Must supply input store to load. Please set storeOptions and try again."); return null; } final Configuration conf = new Configuration(sc.hadoopConfiguration()); GeoWaveInputFormat.setStoreOptions(conf, storeOptions); if (indexName != null) { GeoWaveInputFormat.setQuery( conf, QueryBuilder.newBuilder().indexName(indexName).build(), storeOptions.createAdapterStore(), storeOptions.createInternalAdapterStore(), storeOptions.createIndexStore()); } if (((minSplits != null) && (minSplits > -1)) || ((maxSplits != null) && (maxSplits > -1))) { GeoWaveInputFormat.setMinimumSplitCount(conf, minSplits); GeoWaveInputFormat.setMaximumSplitCount(conf, maxSplits); } else { final int defaultSplitsSpark = sc.getConf().getInt("spark.default.parallelism", -1); // Attempt to grab default partition count for spark and split data // along that. // Otherwise just fallback to default according to index strategy if (defaultSplitsSpark != -1) { GeoWaveInputFormat.setMinimumSplitCount(conf, defaultSplitsSpark); GeoWaveInputFormat.setMaximumSplitCount(conf, defaultSplitsSpark); } } final RDD> rdd = sc.newAPIHadoopRDD( conf, GeoWaveInputFormat.class, GeoWaveInputKey.class, GridCoverage.class); final JavaPairRDD javaRdd = JavaPairRDD.fromJavaRDD(rdd.toJavaRDD()); return javaRdd; } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/GeoWaveRasterRDD.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark; import java.io.Serializable; import org.apache.spark.api.java.JavaPairRDD; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.opengis.coverage.grid.GridCoverage; public class GeoWaveRasterRDD implements Serializable { /** * */ private static final long serialVersionUID = 1L; private JavaPairRDD rawRDD = null; public GeoWaveRasterRDD() {} public GeoWaveRasterRDD(final JavaPairRDD rawRDD) { this.rawRDD = rawRDD; } public JavaPairRDD getRawRDD() { return rawRDD; } public void setRawRDD(final JavaPairRDD rawRDD) { this.rawRDD = rawRDD; } public boolean isLoaded() { return (getRawRDD() != null); } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/GeoWaveRegistrator.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark; import org.apache.spark.serializer.KryoRegistrator; import org.geotools.feature.simple.SimpleFeatureImpl; import org.locationtech.geowave.adapter.raster.adapter.GridCoverageWritable; import org.locationtech.geowave.analytic.kryo.FeatureSerializer; import org.locationtech.geowave.analytic.kryo.GridCoverageWritableSerializer; import org.locationtech.geowave.analytic.kryo.PersistableSerializer; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.persist.PersistableFactory; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.prep.PreparedGeometry; import com.esotericsoftware.kryo.Kryo; public class GeoWaveRegistrator implements KryoRegistrator { @Override public void registerClasses(final Kryo kryo) { // Use existing FeatureSerializer code to serialize SimpleFeature // classes final FeatureSerializer simpleFeatureSerializer = new FeatureSerializer(); final GridCoverageWritableSerializer gcwSerializer = new GridCoverageWritableSerializer(); final PersistableSerializer persistSerializer = new PersistableSerializer(); PersistableFactory.getInstance().getClassIdMapping().entrySet().forEach( e -> kryo.register(e.getKey(), persistSerializer)); kryo.register(GeoWaveRDD.class); kryo.register(GeoWaveIndexedRDD.class); kryo.register(Geometry.class); kryo.register(PreparedGeometry.class); kryo.register(ByteArray.class); kryo.register(GeoWaveInputKey.class); kryo.register(SimpleFeatureImpl.class, simpleFeatureSerializer); kryo.register(GridCoverageWritable.class, gcwSerializer); } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/GeoWaveSparkConf.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark; import java.io.Serializable; import org.apache.spark.SparkConf; import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.SparkSession.Builder; import org.locationtech.geowave.analytic.spark.sparksql.GeoWaveSpatialEncoders; import org.slf4j.Logger; import org.slf4j.LoggerFactory; // This class is used to create SparkConf and SparkSessions that will be compatible with GeoWave. public class GeoWaveSparkConf implements Serializable { /** * */ private static final long serialVersionUID = 1L; private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveSparkConf.class); // Returns a SparkConf with just the basic settings necessary for spark to // work with GeoWave public static SparkConf getDefaultConfig() { SparkConf defaultConfig = new SparkConf(); defaultConfig = defaultConfig.setMaster("yarn"); defaultConfig = defaultConfig.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer"); defaultConfig = defaultConfig.set( "spark.kryo.registrator", "org.locationtech.geowave.analytic.spark.GeoWaveRegistrator"); return defaultConfig; } // Returns a *NEW* SparkConf with GeoWave default settings applied using // userConf as base. public static SparkConf applyDefaultsToConfig(final SparkConf userConf) { SparkConf newConf = userConf.clone(); newConf = newConf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer"); newConf = newConf.set( "spark.kryo.registrator", "org.locationtech.geowave.analytic.spark.GeoWaveRegistrator"); return newConf; } // Create a default SparkSession with GeoWave settings applied to config. public static SparkSession createDefaultSession() { final SparkConf defaultConfig = GeoWaveSparkConf.getDefaultConfig(); return GeoWaveSparkConf.internalCreateSession(defaultConfig, null); } // Create a SparkSession with GeoWave settings and then user configuration // options added on top of defaults. public static SparkSession createDefaultSession(final SparkConf addonOptions) { final SparkConf defaultConfig = GeoWaveSparkConf.getDefaultConfig(); return GeoWaveSparkConf.internalCreateSession(defaultConfig, addonOptions); } // Create a SparkSession from default config with additional options, if // set. Mainly used from Command line runners. public static SparkSession createSessionFromParams( final String appName, String master, final String host, final String jars) { // Grab default config for GeoWave SparkConf defaultConfig = GeoWaveSparkConf.getDefaultConfig(); // Apply master from default if (master == null) { master = "yarn"; } // Apply user options if set, correctly handling host for yarn. if (appName != null) { defaultConfig = defaultConfig.setAppName(appName); } defaultConfig = defaultConfig.setMaster(master); if (host != null) { if (master != "yarn") { defaultConfig = defaultConfig.set("spark.driver.host", host); } else { LOGGER.warn( "Attempting to set spark driver host for yarn master. Normally this is handled via hadoop configuration. Remove host or set another master designation and try again."); } } if (jars != null) { defaultConfig = defaultConfig.set("spark.jars", jars); } // Finally return the session from builder return GeoWaveSparkConf.internalCreateSession(defaultConfig, null); } private static SparkSession internalCreateSession( final SparkConf conf, final SparkConf addonOptions) { // Create initial SessionBuilder from default Configuration. Builder builder = SparkSession.builder().config(conf); // Ensure SpatialEncoders and UDTs are registered at each session // creation. GeoWaveSpatialEncoders.registerUDTs(); if (addonOptions != null) { builder = builder.config(addonOptions); } return builder.getOrCreate(); } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/RDDOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark; import org.locationtech.geowave.core.store.api.Query; public class RDDOptions { private Query query = null; private int minSplits = -1; private int maxSplits = -1; public RDDOptions() {} public Query getQuery() { return query; } public void setQuery(final Query query) { this.query = query; } public int getMinSplits() { return minSplits; } public void setMinSplits(final int minSplits) { this.minSplits = minSplits; } public int getMaxSplits() { return maxSplits; } public void setMaxSplits(final int maxSplits) { this.maxSplits = maxSplits; } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/RDDUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark; import java.io.IOException; import java.util.Date; import java.util.Iterator; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.Job; import org.apache.spark.SparkContext; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.broadcast.Broadcast; import org.apache.spark.mllib.linalg.Vector; import org.apache.spark.mllib.linalg.Vectors; import org.geotools.geometry.jts.JTS; import org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter; import org.locationtech.geowave.core.geotime.store.query.ScaledTemporalRange; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.index.SinglePartitionInsertionIds; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputFormat; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.Point; import org.locationtech.jts.geom.Polygon; import org.locationtech.jts.operation.predicate.RectangleIntersects; import org.opengis.coverage.grid.GridCoverage; import org.opengis.feature.simple.SimpleFeature; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import scala.Tuple2; import scala.reflect.ClassTag; public class RDDUtils { private static Logger LOGGER = LoggerFactory.getLogger(RDDUtils.class); /** * Translate a set of objects in a JavaRDD to SimpleFeatures and push to GeoWave * * @throws IOException */ public static void writeRDDToGeoWave( final SparkContext sc, final Index index, final DataStorePluginOptions outputStoreOptions, final DataTypeAdapter adapter, final GeoWaveRDD inputRDD) throws IOException { if (!inputRDD.isLoaded()) { LOGGER.error("Must provide a loaded RDD."); return; } writeToGeoWave(sc, index, outputStoreOptions, adapter, inputRDD.getRawRDD().values()); } public static void writeRDDToGeoWave( final SparkContext sc, final Index[] indices, final DataStorePluginOptions outputStoreOptions, final DataTypeAdapter adapter, final GeoWaveRDD inputRDD) throws IOException { if (!inputRDD.isLoaded()) { LOGGER.error("Must provide a loaded RDD."); return; } for (int iStrategy = 0; iStrategy < indices.length; iStrategy += 1) { writeToGeoWave( sc, indices[iStrategy], outputStoreOptions, adapter, inputRDD.getRawRDD().values()); } } public static JavaRDD rddFeatureCentroids(final GeoWaveRDD inputRDD) { if (!inputRDD.isLoaded()) { LOGGER.error("Must provide a loaded RDD."); return null; } final JavaRDD centroids = inputRDD.getRawRDD().values().map(feature -> { final Geometry geom = (Geometry) feature.getDefaultGeometry(); return geom.getCentroid(); }); return centroids; } public static JavaRDD rddFeatureVectors(final GeoWaveRDD inputRDD) { return rddFeatureVectors(inputRDD, null, null); } public static JavaRDD rddFeatureVectors( final GeoWaveRDD inputRDD, final String timeField, final ScaledTemporalRange scaledRange) { if (!inputRDD.isLoaded()) { LOGGER.error("Must provide a loaded RDD."); return null; } final JavaRDD vectorRDD = inputRDD.getRawRDD().values().map(feature -> { final Point centroid = ((Geometry) feature.getDefaultGeometry()).getCentroid(); int numValues = 2; Date time = null; if (timeField != null) { // if this is a ranged schema, we have to take the // midpoint if (timeField.contains("|")) { final int pipeIndex = timeField.indexOf("|"); final String startField = timeField.substring(0, pipeIndex); final String endField = timeField.substring(pipeIndex + 1); final Date start = (Date) feature.getAttribute(startField); final Date end = (Date) feature.getAttribute(endField); final long halfDur = (end.getTime() - start.getTime()) / 2; time = new Date(start.getTime() + halfDur); } else { time = (Date) feature.getAttribute(timeField); } if (time != null) { numValues++; } } final double[] values = new double[numValues]; values[0] = centroid.getX(); values[1] = centroid.getY(); if (time != null) { values[2] = scaledRange.timeToValue(time); } return Vectors.dense(values); }); return vectorRDD; } public static InsertionIds trimIndexIds( final InsertionIds rawIds, final Geometry geom, final NumericIndexStrategy index) { for (final SinglePartitionInsertionIds insertionId : rawIds.getPartitionKeys()) { final byte[] partitionKey = insertionId.getPartitionKey(); final int size = insertionId.getSortKeys().size(); if (size > 3) { final Iterator it = insertionId.getSortKeys().iterator(); while (it.hasNext()) { final byte[] sortKey = it.next(); final MultiDimensionalNumericData keyTile = index.getRangeForId(partitionKey, sortKey); final Envelope other = new Envelope(); other.init( keyTile.getMinValuesPerDimension()[0], keyTile.getMaxValuesPerDimension()[0], keyTile.getMinValuesPerDimension()[1], keyTile.getMaxValuesPerDimension()[1]); final Polygon rect = JTS.toGeometry(other); if (!RectangleIntersects.intersects(rect, geom)) { it.remove(); } } } } return rawIds; } /** * Translate a set of objects in a JavaRDD to a provided type and push to GeoWave * * @throws IOException */ private static void writeToGeoWave( final SparkContext sc, final Index index, final DataStorePluginOptions outputStoreOptions, final DataTypeAdapter adapter, final JavaRDD inputRDD) throws IOException { // setup the configuration and the output format final Configuration conf = new org.apache.hadoop.conf.Configuration(sc.hadoopConfiguration()); GeoWaveOutputFormat.setStoreOptions(conf, outputStoreOptions); GeoWaveOutputFormat.addIndex(conf, index); GeoWaveOutputFormat.addDataAdapter(conf, adapter); // create the job final Job job = new Job(conf); job.setOutputKeyClass(GeoWaveOutputKey.class); job.setOutputValueClass(SimpleFeature.class); job.setOutputFormatClass(GeoWaveOutputFormat.class); // broadcast string names final ClassTag stringTag = scala.reflect.ClassTag$.MODULE$.apply(String.class); final Broadcast typeName = sc.broadcast(adapter.getTypeName(), stringTag); final Broadcast indexName = sc.broadcast(index.getName(), stringTag); // map to a pair containing the output key and the output value inputRDD.mapToPair( feat -> new Tuple2<>( new GeoWaveOutputKey(typeName.value(), indexName.value()), feat)).saveAsNewAPIHadoopDataset(job.getConfiguration()); } public static void writeRasterToGeoWave( final SparkContext sc, final Index index, final DataStorePluginOptions outputStoreOptions, final RasterDataAdapter adapter, final JavaRDD inputRDD) throws IOException { // setup the configuration and the output format final Configuration conf = new org.apache.hadoop.conf.Configuration(sc.hadoopConfiguration()); GeoWaveOutputFormat.setStoreOptions(conf, outputStoreOptions); GeoWaveOutputFormat.addIndex(conf, index); GeoWaveOutputFormat.addDataAdapter(conf, adapter); // create the job final Job job = new Job(conf); job.setOutputKeyClass(GeoWaveOutputKey.class); job.setOutputValueClass(GridCoverage.class); job.setOutputFormatClass(GeoWaveOutputFormat.class); // broadcast string names final ClassTag stringTag = scala.reflect.ClassTag$.MODULE$.apply(String.class); final Broadcast typeName = sc.broadcast(adapter.getTypeName(), stringTag); final Broadcast indexName = sc.broadcast(index.getName(), stringTag); // map to a pair containing the output key and the output value inputRDD.mapToPair( gridCoverage -> new Tuple2<>( new GeoWaveOutputKey(typeName.value(), indexName.value()), gridCoverage)).saveAsNewAPIHadoopDataset(job.getConfiguration()); } public static Broadcast broadcastIndexStrategy( final SparkContext sc, final NumericIndexStrategy indexStrategy) { final ClassTag indexClassTag = scala.reflect.ClassTag$.MODULE$.apply(indexStrategy.getClass()); final Broadcast broadcastStrategy = sc.broadcast(indexStrategy, indexClassTag); return broadcastStrategy; } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/kde/KDERunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.kde; import java.awt.image.WritableRaster; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import org.apache.commons.io.FilenameUtils; import org.apache.spark.RangePartitioner; import org.apache.spark.api.java.JavaPairRDD; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.api.java.function.Function; import org.apache.spark.api.java.function.Function2; import org.apache.spark.api.java.function.PairFlatMapFunction; import org.apache.spark.sql.SparkSession; import org.geotools.geometry.jts.JTS; import org.geotools.referencing.CRS; import org.locationtech.geowave.adapter.raster.FitToIndexGridCoverage; import org.locationtech.geowave.adapter.raster.RasterUtils; import org.locationtech.geowave.adapter.raster.adapter.ClientMergeableRasterTile; import org.locationtech.geowave.adapter.raster.adapter.GridCoverageWritable; import org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter; import org.locationtech.geowave.adapter.raster.adapter.merge.nodata.NoDataMergeStrategy; import org.locationtech.geowave.adapter.vector.util.FeatureDataUtils; import org.locationtech.geowave.analytic.mapreduce.kde.CellCounter; import org.locationtech.geowave.analytic.mapreduce.kde.GaussianFilter; import org.locationtech.geowave.analytic.mapreduce.kde.KDEReducer; import org.locationtech.geowave.analytic.spark.GeoWaveRDD; import org.locationtech.geowave.analytic.spark.GeoWaveRDDLoader; import org.locationtech.geowave.analytic.spark.GeoWaveSparkConf; import org.locationtech.geowave.analytic.spark.RDDOptions; import org.locationtech.geowave.analytic.spark.RDDUtils; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.mapreduce.HadoopWritableSerializer; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.Point; import org.opengis.coverage.grid.GridCoverage; import org.opengis.feature.simple.SimpleFeature; import org.opengis.geometry.MismatchedDimensionException; import org.opengis.referencing.FactoryException; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.opengis.referencing.cs.CoordinateSystem; import org.opengis.referencing.cs.CoordinateSystemAxis; import org.opengis.referencing.operation.MathTransform; import org.opengis.referencing.operation.TransformException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Iterators; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import scala.Tuple2; public class KDERunner { private static final Logger LOGGER = LoggerFactory.getLogger(KDERunner.class); public static final int NUM_BANDS = 3; protected static final String[] NAME_PER_BAND = new String[] {"Weight", "Normalized", "Percentile"}; protected static final double[] MINS_PER_BAND = new double[] {0, 0, 0}; protected static final double[] MAXES_PER_BAND = new double[] {Double.MAX_VALUE, 1, 1}; private String appName = "KDERunner"; private String master = "yarn"; private String host = "localhost"; private JavaSparkContext jsc = null; private SparkSession session = null; private DataStorePluginOptions inputDataStore = null; private DataStorePluginOptions outputDataStore = null; private String cqlFilter = null; private String typeName = null; private String indexName = null; private int minLevel = 5; private int maxLevel = 20; private int tileSize = 1; private String coverageName = "kde"; private Index outputIndex; private int minSplits = -1; private int maxSplits = -1; public KDERunner() {} private void initContext() { if (session == null) { String jar = ""; try { jar = KDERunner.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath(); if (!FilenameUtils.isExtension(jar.toLowerCase(), "jar")) { jar = ""; } } catch (final URISyntaxException e) { LOGGER.error("Unable to set jar location in spark configuration", e); } session = GeoWaveSparkConf.createSessionFromParams(appName, master, host, jar); jsc = JavaSparkContext.fromSparkContext(session.sparkContext()); } } public void close() { if (session != null) { session.close(); session = null; } } public void setTileSize(final int tileSize) { this.tileSize = tileSize; } public void run() throws IOException { initContext(); // Validate inputs if (inputDataStore == null) { LOGGER.error("You must supply an input datastore!"); throw new IOException("You must supply an input datastore!"); } // Retrieve the feature adapters final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder(); List featureTypeNames; // If provided, just use the one if (typeName != null) { featureTypeNames = new ArrayList<>(); featureTypeNames.add(typeName); } else { // otherwise, grab all the feature adapters featureTypeNames = FeatureDataUtils.getFeatureTypeNames(inputDataStore); } bldr.setTypeNames(featureTypeNames.toArray(new String[0])); if (indexName != null) { bldr.indexName(indexName); } Index inputPrimaryIndex = null; final Index[] idxArray = inputDataStore.createDataStore().getIndices(); for (final Index idx : idxArray) { if ((idx != null) && ((indexName == null) || indexName.equals(idx.getName()))) { inputPrimaryIndex = idx; break; } } final CoordinateReferenceSystem inputIndexCrs = GeometryUtils.getIndexCrs(inputPrimaryIndex); final String inputCrsCode = GeometryUtils.getCrsCode(inputIndexCrs); Index outputPrimaryIndex = outputIndex; CoordinateReferenceSystem outputIndexCrs = null; final String outputCrsCode; if (outputPrimaryIndex != null) { outputIndexCrs = GeometryUtils.getIndexCrs(outputPrimaryIndex); outputCrsCode = GeometryUtils.getCrsCode(outputIndexCrs); } else { final SpatialDimensionalityTypeProvider sdp = new SpatialDimensionalityTypeProvider(); final SpatialOptions so = sdp.createOptions(); so.setCrs(inputCrsCode); outputPrimaryIndex = SpatialDimensionalityTypeProvider.createIndexFromOptions(so); outputIndexCrs = inputIndexCrs; outputCrsCode = inputCrsCode; } final CoordinateSystem cs = outputIndexCrs.getCoordinateSystem(); final CoordinateSystemAxis csx = cs.getAxis(0); final CoordinateSystemAxis csy = cs.getAxis(1); final double xMax = csx.getMaximumValue(); final double xMin = csx.getMinimumValue(); final double yMax = csy.getMaximumValue(); final double yMin = csy.getMinimumValue(); if ((xMax == Double.POSITIVE_INFINITY) || (xMin == Double.NEGATIVE_INFINITY) || (yMax == Double.POSITIVE_INFINITY) || (yMin == Double.NEGATIVE_INFINITY)) { LOGGER.error( "Raster KDE resize with raster primary index CRS dimensions min/max equal to positive infinity or negative infinity is not supported"); throw new RuntimeException( "Raster KDE resize with raster primary index CRS dimensions min/max equal to positive infinity or negative infinity is not supported"); } if (cqlFilter != null) { bldr.constraints(bldr.constraintsFactory().cqlConstraints(cqlFilter)); } // Load RDD from datastore final RDDOptions kdeOpts = new RDDOptions(); kdeOpts.setMinSplits(minSplits); kdeOpts.setMaxSplits(maxSplits); kdeOpts.setQuery(bldr.build()); final Function identity = x -> x; final Function2 sum = (final Double x, final Double y) -> { return x + y; }; final RasterDataAdapter adapter = RasterUtils.createDataAdapterTypeDouble( coverageName, KDEReducer.NUM_BANDS, tileSize, MINS_PER_BAND, MAXES_PER_BAND, NAME_PER_BAND, new NoDataMergeStrategy()); outputDataStore.createDataStore().addType(adapter, outputPrimaryIndex); // The following "inner" variables are created to give access to member // variables within lambda // expressions final int innerTileSize = 1;// tileSize; final String innerCoverageName = coverageName; for (int level = minLevel; level <= maxLevel; level++) { final int numXTiles = (int) Math.pow(2, level + 1); final int numYTiles = (int) Math.pow(2, level); final int numXPosts = numXTiles; // * tileSize; final int numYPosts = numYTiles; // * tileSize; final GeoWaveRDD kdeRDD = GeoWaveRDDLoader.loadRDD(session.sparkContext(), inputDataStore, kdeOpts); JavaPairRDD cells = kdeRDD.getRawRDD().flatMapToPair( new GeoWaveCellMapper( numXPosts, numYPosts, xMin, xMax, yMin, yMax, inputCrsCode, outputCrsCode)).combineByKey(identity, sum, sum).mapToPair(item -> item.swap()); cells = cells.partitionBy( new RangePartitioner( cells.getNumPartitions(), cells.rdd(), true, scala.math.Ordering.Double$.MODULE$, scala.reflect.ClassTag$.MODULE$.apply(Double.class))).sortByKey(false).cache(); final long count = cells.count(); if (count == 0) { LOGGER.warn("No cells produced by KDE"); continue; } final double max = cells.first()._1; JavaRDD rdd = cells.zipWithIndex().map(t -> { final TileInfo tileInfo = fromCellIndexToTileInfo( t._1._2, numXPosts, numYPosts, numXTiles, numYTiles, xMin, xMax, yMin, yMax, innerTileSize); final WritableRaster raster = RasterUtils.createRasterTypeDouble(NUM_BANDS, innerTileSize); final double normalizedValue = t._1._1 / max; // because we are using a Double as the key, the ordering // isn't always completely reproducible as Double equals does not // take into account an epsilon final double percentile = (count - t._2) / ((double) count); raster.setSample(tileInfo.x, tileInfo.y, 0, t._1._1); raster.setSample(tileInfo.x, tileInfo.y, 1, normalizedValue); raster.setSample(tileInfo.x, tileInfo.y, 2, percentile); return RasterUtils.createCoverageTypeDouble( innerCoverageName, tileInfo.tileWestLon, tileInfo.tileEastLon, tileInfo.tileSouthLat, tileInfo.tileNorthLat, MINS_PER_BAND, MAXES_PER_BAND, NAME_PER_BAND, raster, GeometryUtils.DEFAULT_CRS_STR); }); LOGGER.debug("Writing results to output store..."); if (tileSize > 1) { // byte[] adapterBytes = PersistenceUtils.toBinary(adapter); // byte[] indexBytes = PersistenceUtils.toBinary(outputPrimaryIndex); rdd = rdd.flatMapToPair(new TransformTileSize(adapter, outputPrimaryIndex)).groupByKey().map( new MergeOverlappingTiles(adapter, outputPrimaryIndex)); } RDDUtils.writeRasterToGeoWave(jsc.sc(), outputPrimaryIndex, outputDataStore, adapter, rdd); LOGGER.debug("Results successfully written!"); } } private static class PartitionAndSortKey implements Serializable { private static final long serialVersionUID = 1L; byte[] partitionKey; byte[] sortKey; public PartitionAndSortKey(final byte[] partitionKey, final byte[] sortKey) { super(); this.partitionKey = partitionKey; this.sortKey = sortKey; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + Arrays.hashCode(partitionKey); result = (prime * result) + Arrays.hashCode(sortKey); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final PartitionAndSortKey other = (PartitionAndSortKey) obj; if (!Arrays.equals(partitionKey, other.partitionKey)) { return false; } if (!Arrays.equals(sortKey, other.sortKey)) { return false; } return true; } } @SuppressFBWarnings( value = "INT_BAD_REM_BY_1", justification = "The calculation is appropriate if we ever want to vary to tile size.") private static TileInfo fromCellIndexToTileInfo( final long index, final int numXPosts, final int numYPosts, final int numXTiles, final int numYTiles, final double xMin, final double xMax, final double yMin, final double yMax, final int tileSize) { final int xPost = (int) (index / numYPosts); final int yPost = (int) (index % numYPosts); final int xTile = xPost / tileSize; final int yTile = yPost / tileSize; final int x = (xPost % tileSize); final int y = (yPost % tileSize); final double crsWidth = xMax - xMin; final double crsHeight = yMax - yMin; final double tileWestLon = ((xTile * crsWidth) / numXTiles) + xMin; final double tileSouthLat = ((yTile * crsHeight) / numYTiles) + yMin; final double tileEastLon = tileWestLon + (crsWidth / numXTiles); final double tileNorthLat = tileSouthLat + (crsHeight / numYTiles); // remember java rasters go from 0 at the top to (height-1) at the bottom, so we // have to inverse // the y here which goes from bottom to top return new TileInfo(tileWestLon, tileEastLon, tileSouthLat, tileNorthLat, x, tileSize - y - 1); } public DataStorePluginOptions getInputDataStore() { return inputDataStore; } public void setInputDataStore(final DataStorePluginOptions inputDataStore) { this.inputDataStore = inputDataStore; } public DataStorePluginOptions getOutputDataStore() { return outputDataStore; } public void setOutputIndex(final Index outputIndex) { this.outputIndex = outputIndex; } public void setOutputDataStore(final DataStorePluginOptions outputDataStore) { this.outputDataStore = outputDataStore; } public void setSparkSession(final SparkSession ss) { session = ss; } public void setAppName(final String appName) { this.appName = appName; } public void setIndexName(final String indexName) { this.indexName = indexName; } public void setMinLevel(final int minLevel) { this.minLevel = minLevel; } public void setMaxLevel(final int maxLevel) { this.maxLevel = maxLevel; } public void setMaster(final String master) { this.master = master; } public void setHost(final String host) { this.host = host; } public void setCqlFilter(final String cqlFilter) { this.cqlFilter = cqlFilter; } public void setTypeName(final String typeName) { this.typeName = typeName; } public void setCoverageName(final String coverageName) { this.coverageName = coverageName; } public void setSplits(final int min, final int max) { minSplits = min; maxSplits = max; } protected static class GeoWaveCellMapper implements PairFlatMapFunction, Long, Double> { /** * */ private static final long serialVersionUID = 1L; private final int numXPosts; private final int numYPosts; private final double minX; private final double maxX; private final double minY; private final double maxY; private final String inputCrsCode; private final String outputCrsCode; private MathTransform transform = null; protected GeoWaveCellMapper( final int numXPosts, final int numYPosts, final double minX, final double maxX, final double minY, final double maxY, final String inputCrsCode, final String outputCrsCode) { this.numXPosts = numXPosts; this.numYPosts = numYPosts; this.minX = minX; this.maxX = maxX; this.minY = minY; this.maxY = maxY; this.inputCrsCode = inputCrsCode; this.outputCrsCode = outputCrsCode; } @Override public Iterator> call(final Tuple2 t) throws Exception { final List> cells = new ArrayList<>(); Point pt = null; if ((t != null) && (t._2 != null)) { final Object geomObj = t._2.getDefaultGeometry(); if ((geomObj != null) && (geomObj instanceof Geometry)) { if (inputCrsCode.equals(outputCrsCode)) { pt = ((Geometry) geomObj).getCentroid(); } else { if (transform == null) { try { transform = CRS.findMathTransform( CRS.decode(inputCrsCode, true), CRS.decode(outputCrsCode, true), true); } catch (final FactoryException e) { LOGGER.error("Unable to decode " + inputCrsCode + " CRS", e); throw new RuntimeException("Unable to initialize " + inputCrsCode + " object", e); } } try { final Geometry transformedGeometry = JTS.transform((Geometry) geomObj, transform); pt = transformedGeometry.getCentroid(); } catch (MismatchedDimensionException | TransformException e) { LOGGER.warn( "Unable to perform transform to specified CRS of the index, the feature geometry will remain in its original CRS", e); } } GaussianFilter.incrementPtFast( pt.getX(), pt.getY(), minX, maxX, minY, maxY, new CellCounter() { @Override public void increment(final long cellId, final double weight) { cells.add(new Tuple2<>(cellId, weight)); } }, numXPosts, numYPosts); } } return cells.iterator(); } } private static class MergeOverlappingTiles implements Function>, GridCoverage> { /** * */ private static final long serialVersionUID = 1L; private Index index; private RasterDataAdapter newAdapter; private HadoopWritableSerializer writableSerializer; public MergeOverlappingTiles(final RasterDataAdapter newAdapter, final Index index) { super(); this.index = index; this.newAdapter = newAdapter; writableSerializer = newAdapter.createWritableSerializer(); } private void readObject(final ObjectInputStream aInputStream) throws ClassNotFoundException, IOException { final byte[] adapterBytes = new byte[aInputStream.readShort()]; aInputStream.readFully(adapterBytes); final byte[] indexBytes = new byte[aInputStream.readShort()]; aInputStream.readFully(indexBytes); newAdapter = (RasterDataAdapter) PersistenceUtils.fromBinary(adapterBytes); index = (Index) PersistenceUtils.fromBinary(indexBytes); writableSerializer = newAdapter.createWritableSerializer(); } private void writeObject(final ObjectOutputStream aOutputStream) throws IOException { final byte[] adapterBytes = PersistenceUtils.toBinary(newAdapter); final byte[] indexBytes = PersistenceUtils.toBinary(index); aOutputStream.writeShort(adapterBytes.length); aOutputStream.write(adapterBytes); aOutputStream.writeShort(indexBytes.length); aOutputStream.write(indexBytes); } @Override public GridCoverage call(final Tuple2> v) throws Exception { GridCoverage mergedCoverage = null; ClientMergeableRasterTile mergedTile = null; boolean needsMerge = false; final Iterator it = v._2.iterator(); while (it.hasNext()) { final GridCoverageWritable value = it.next(); if (mergedCoverage == null) { mergedCoverage = writableSerializer.fromWritable(value); } else { if (!needsMerge) { mergedTile = newAdapter.getRasterTileFromCoverage(mergedCoverage); needsMerge = true; } final ClientMergeableRasterTile thisTile = newAdapter.getRasterTileFromCoverage(writableSerializer.fromWritable(value)); if (mergedTile != null) { mergedTile.merge(thisTile); } } } if (needsMerge) { mergedCoverage = newAdapter.getCoverageFromRasterTile( mergedTile, v._1.partitionKey, v._1.sortKey, index); } return mergedCoverage; } } private static class TransformTileSize implements PairFlatMapFunction { /** * */ private static final long serialVersionUID = 1L; private RasterDataAdapter newAdapter; private Index index; private HadoopWritableSerializer writableSerializer; public TransformTileSize(final RasterDataAdapter newAdapter, final Index index) { super(); this.newAdapter = newAdapter; this.index = index; writableSerializer = newAdapter.createWritableSerializer(); } private void readObject(final ObjectInputStream aInputStream) throws ClassNotFoundException, IOException { final byte[] adapterBytes = new byte[aInputStream.readShort()]; aInputStream.readFully(adapterBytes); final byte[] indexBytes = new byte[aInputStream.readShort()]; aInputStream.readFully(indexBytes); newAdapter = (RasterDataAdapter) PersistenceUtils.fromBinary(adapterBytes); index = (Index) PersistenceUtils.fromBinary(indexBytes); writableSerializer = newAdapter.createWritableSerializer(); } private void writeObject(final ObjectOutputStream aOutputStream) throws IOException { final byte[] adapterBytes = PersistenceUtils.toBinary(newAdapter); final byte[] indexBytes = PersistenceUtils.toBinary(index); aOutputStream.writeShort(adapterBytes.length); aOutputStream.write(adapterBytes); aOutputStream.writeShort(indexBytes.length); aOutputStream.write(indexBytes); } @Override public Iterator> call( final GridCoverage existingCoverage) throws Exception { final Iterator it = newAdapter.convertToIndex(index, existingCoverage); return Iterators.transform( it, g -> new Tuple2<>( new PartitionAndSortKey( ((FitToIndexGridCoverage) g).getPartitionKey(), ((FitToIndexGridCoverage) g).getSortKey()), writableSerializer.toWritable(((FitToIndexGridCoverage) g).getOriginalCoverage()))); } } private static final class TileInfo { private final double tileWestLon; private final double tileEastLon; private final double tileSouthLat; private final double tileNorthLat; private final int x; private final int y; public TileInfo( final double tileWestLon, final double tileEastLon, final double tileSouthLat, final double tileNorthLat, final int x, final int y) { this.tileWestLon = tileWestLon; this.tileEastLon = tileEastLon; this.tileSouthLat = tileSouthLat; this.tileNorthLat = tileNorthLat; this.x = x; this.y = y; } @Override public int hashCode() { final int prime = 31; int result = 1; long temp; temp = Double.doubleToLongBits(tileEastLon); result = (prime * result) + (int) (temp ^ (temp >>> 32)); temp = Double.doubleToLongBits(tileNorthLat); result = (prime * result) + (int) (temp ^ (temp >>> 32)); temp = Double.doubleToLongBits(tileSouthLat); result = (prime * result) + (int) (temp ^ (temp >>> 32)); temp = Double.doubleToLongBits(tileWestLon); result = (prime * result) + (int) (temp ^ (temp >>> 32)); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final TileInfo other = (TileInfo) obj; if (Double.doubleToLongBits(tileEastLon) != Double.doubleToLongBits(other.tileEastLon)) { return false; } if (Double.doubleToLongBits(tileNorthLat) != Double.doubleToLongBits(other.tileNorthLat)) { return false; } if (Double.doubleToLongBits(tileSouthLat) != Double.doubleToLongBits(other.tileSouthLat)) { return false; } if (Double.doubleToLongBits(tileWestLon) != Double.doubleToLongBits(other.tileWestLon)) { return false; } return true; } } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/kde/operations/KDESparkCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.kde.operations; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.locationtech.geowave.analytic.mapreduce.operations.AnalyticSection; import org.locationtech.geowave.analytic.spark.kde.KDERunner; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.Command; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.util.DataStoreUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import com.beust.jcommander.ParametersDelegate; @GeowaveOperation(name = "kdespark", parentOperation = AnalyticSection.class) @Parameters(commandDescription = "Kernel density estimate using Spark") public class KDESparkCommand extends ServiceEnabledCommand implements Command { private static final Logger LOGGER = LoggerFactory.getLogger(KDESparkCommand.class); @Parameter(description = " ") private List parameters = new ArrayList<>(); @ParametersDelegate private KDESparkOptions kdeSparkOptions = new KDESparkOptions(); private DataStorePluginOptions inputDataStore = null; private DataStorePluginOptions outputDataStore = null; @Override public void execute(final OperationParams params) throws Exception { // Ensure we have all the required arguments if (parameters.size() != 2) { throw new ParameterException("Requires arguments: "); } computeResults(params); } @Override public Void computeResults(final OperationParams params) throws Exception { final String inputStoreName = parameters.get(0); final String outputStoreName = parameters.get(1); // Config file final File configFile = getGeoWaveConfigFile(params); // Attempt to load input store. inputDataStore = CLIUtils.loadStore(inputStoreName, configFile, params.getConsole()); // Attempt to load output store. outputDataStore = CLIUtils.loadStore(outputStoreName, configFile, params.getConsole()); final KDERunner runner = new KDERunner(); runner.setAppName(kdeSparkOptions.getAppName()); runner.setMaster(kdeSparkOptions.getMaster()); runner.setHost(kdeSparkOptions.getHost()); runner.setSplits(kdeSparkOptions.getMinSplits(), kdeSparkOptions.getMaxSplits()); runner.setInputDataStore(inputDataStore); runner.setTypeName(kdeSparkOptions.getTypeName()); runner.setOutputDataStore(outputDataStore); runner.setCoverageName(kdeSparkOptions.getCoverageName()); runner.setIndexName(kdeSparkOptions.getIndexName()); runner.setMinLevel(kdeSparkOptions.getMinLevel()); runner.setMaxLevel(kdeSparkOptions.getMaxLevel()); runner.setTileSize((int) Math.sqrt(kdeSparkOptions.getTileSize())); if ((kdeSparkOptions.getOutputIndex() != null) && !kdeSparkOptions.getOutputIndex().trim().isEmpty()) { final String outputIndex = kdeSparkOptions.getOutputIndex(); // Load the Indices final List outputIndices = DataStoreUtils.loadIndices(outputDataStore.createIndexStore(), outputIndex); for (final Index primaryIndex : outputIndices) { if (SpatialDimensionalityTypeProvider.isSpatial(primaryIndex)) { runner.setOutputIndex(primaryIndex); } else { LOGGER.error( "spatial temporal is not supported for output index. Only spatial index is supported."); throw new IOException( "spatial temporal is not supported for output index. Only spatial index is supported."); } } } if (kdeSparkOptions.getCqlFilter() != null) { runner.setCqlFilter(kdeSparkOptions.getCqlFilter()); } runner.setOutputDataStore(outputDataStore); try { runner.run(); } catch (final IOException e) { throw new RuntimeException("Failed to execute: " + e.getMessage()); } finally { runner.close(); } return null; } public List getParameters() { return parameters; } public void setParameters(final String inputStoreName, final String outputStoreName) { parameters = new ArrayList<>(); parameters.add(inputStoreName); parameters.add(outputStoreName); } public DataStorePluginOptions getInputStoreOptions() { return inputDataStore; } public DataStorePluginOptions getOutputStoreOptions() { return outputDataStore; } public KDESparkOptions getKDESparkOptions() { return kdeSparkOptions; } public void setKDESparkOptions(final KDESparkOptions kdeSparkOptions) { this.kdeSparkOptions = kdeSparkOptions; } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/kde/operations/KDESparkOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.kde.operations; import com.beust.jcommander.Parameter; public class KDESparkOptions { @Parameter(names = {"-n", "--name"}, description = "The spark application name") private String appName = "KDE Spark"; @Parameter(names = "--tileSize", description = "The tile size") private Integer tileSize = 1; @Parameter(names = "--indexName", description = "An optional index name to filter the input data") private String indexName; @Parameter(names = "--minLevel", required = true, description = "The min level to run a KDE at") private Integer minLevel; @Parameter(names = "--maxLevel", required = true, description = "The max level to run a KDE at") private Integer maxLevel; @Parameter(names = {"-ho", "--host"}, description = "The spark driver host") private String host = "localhost"; @Parameter(names = {"-m", "--master"}, description = "The spark master designation") private String master = "yarn"; @Parameter( names = "--cqlFilter", description = "An optional CQL filter applied to the input data") private String cqlFilter = null; @Parameter(names = {"-f", "--featureType"}, description = "Feature type name to query") private String typeName = null; @Parameter(names = "--minSplits", description = "The min partitions for the input data") private Integer minSplits = -1; @Parameter(names = "--maxSplits", description = "The max partitions for the input data") private Integer maxSplits = -1; @Parameter(names = "--coverageName", required = true, description = "The coverage name") private String coverageName; @Parameter( names = "--outputIndex", description = "An optional index for output datastore. Only spatial index type is supported") private String outputIndex; public String getOutputIndex() { return outputIndex; } public void setOutputIndex(final String outputIndex) { this.outputIndex = outputIndex; } public String getAppName() { return appName; } public void setAppName(final String appName) { this.appName = appName; } public Integer getTileSize() { return tileSize; } public void setTileSize(final Integer tileSize) { this.tileSize = tileSize; } public String getIndexName() { return indexName; } public void setIndexName(final String indexName) { this.indexName = indexName; } public Integer getMinLevel() { return minLevel; } public void setMinLevel(final Integer minLevel) { this.minLevel = minLevel; } public Integer getMaxLevel() { return maxLevel; } public void setMaxLevel(final Integer maxLevel) { this.maxLevel = maxLevel; } public String getHost() { return host; } public void setHost(final String host) { this.host = host; } public String getMaster() { return master; } public void setMaster(final String master) { this.master = master; } public String getCqlFilter() { return cqlFilter; } public void setCqlFilter(final String cqlFilter) { this.cqlFilter = cqlFilter; } public String getTypeName() { return typeName; } public void setTypeName(final String typeName) { this.typeName = typeName; } public Integer getMinSplits() { return minSplits; } public void setMinSplits(final Integer minSplits) { this.minSplits = minSplits; } public Integer getMaxSplits() { return maxSplits; } public void setMaxSplits(final Integer maxSplits) { this.maxSplits = maxSplits; } public String getCoverageName() { return coverageName; } public void setCoverageName(final String coverageName) { this.coverageName = coverageName; } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/kmeans/KMeansHullGenerator.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.kmeans; import org.apache.spark.api.java.JavaPairRDD; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.mllib.clustering.KMeansModel; import org.apache.spark.mllib.linalg.Vector; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.jts.algorithm.ConvexHull; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Iterables; public class KMeansHullGenerator { private static final Logger LOGGER = LoggerFactory.getLogger(KMeansHullGenerator.class); public static JavaPairRDD> groupByIndex( final JavaRDD inputPoints, final KMeansModel clusterModel) { // Group the input points by their kmeans centroid index return inputPoints.groupBy(point -> { return clusterModel.predict(point); }); } public static JavaPairRDD generateHullsRDD( final JavaPairRDD> groupedPoints) { // Create the convex hull for each kmeans centroid final JavaPairRDD hullRDD = groupedPoints.mapValues(point -> { final Iterable coordIt = Iterables.transform(point, new com.google.common.base.Function() { @Override public Coordinate apply(final Vector input) { if (input != null) { return new Coordinate(input.apply(0), input.apply(1)); } return new Coordinate(); } }); final Coordinate[] coordArray = Iterables.toArray(coordIt, Coordinate.class); return new ConvexHull(coordArray, GeometryUtils.GEOMETRY_FACTORY).getConvexHull(); }); return hullRDD; } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/kmeans/KMeansRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.kmeans; import java.io.IOException; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.List; import org.apache.commons.io.FilenameUtils; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.mllib.clustering.KMeans; import org.apache.spark.mllib.clustering.KMeansModel; import org.apache.spark.mllib.linalg.Vector; import org.apache.spark.sql.SparkSession; import org.geotools.filter.text.cql2.CQLException; import org.geotools.filter.text.ecql.ECQL; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.adapter.vector.util.FeatureDataUtils; import org.locationtech.geowave.analytic.spark.GeoWaveRDD; import org.locationtech.geowave.analytic.spark.GeoWaveRDDLoader; import org.locationtech.geowave.analytic.spark.GeoWaveSparkConf; import org.locationtech.geowave.analytic.spark.RDDOptions; import org.locationtech.geowave.analytic.spark.RDDUtils; import org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.geotime.store.query.ScaledTemporalRange; import org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder; import org.locationtech.geowave.core.geotime.util.ExtractGeometryFilterVisitor; import org.locationtech.geowave.core.geotime.util.ExtractGeometryFilterVisitorResult; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.jts.geom.Geometry; import org.opengis.filter.Filter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.ParameterException; public class KMeansRunner { private static final Logger LOGGER = LoggerFactory.getLogger(KMeansRunner.class); private String appName = "KMeansRunner"; private String master = "yarn"; private String host = "localhost"; private JavaSparkContext jsc = null; private SparkSession session = null; private DataStorePluginOptions inputDataStore = null; private DataStorePluginOptions outputDataStore = null; private String centroidTypeName = "kmeans_centroids"; private String hullTypeName = "kmeans_hulls"; private JavaRDD centroidVectors; private KMeansModel outputModel; private int numClusters = 8; private int numIterations = 20; private double epsilon = -1.0; private String cqlFilter = null; private String typeName = null; private String timeField = null; private ScaledTemporalRange scaledTimeRange = null; private ScaledTemporalRange scaledRange = null; private int minSplits = -1; private int maxSplits = -1; private Boolean useTime = false; private Boolean generateHulls = false; private Boolean computeHullData = false; public KMeansRunner() {} private void initContext() { if (session == null) { String jar = ""; try { jar = KMeansRunner.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath(); if (!FilenameUtils.isExtension(jar.toLowerCase(), "jar")) { jar = ""; } } catch (final URISyntaxException e) { LOGGER.error("Unable to set jar location in spark configuration", e); } session = GeoWaveSparkConf.createSessionFromParams(appName, master, host, jar); jsc = JavaSparkContext.fromSparkContext(session.sparkContext()); } } public void close() { if (session != null) { session.close(); session = null; } } public void run() throws IOException { initContext(); // Validate inputs if (inputDataStore == null) { LOGGER.error("You must supply an input datastore!"); throw new IOException("You must supply an input datastore!"); } if (isUseTime()) { scaledRange = KMeansUtils.setRunnerTimeParams(this, inputDataStore, typeName); if (scaledRange == null) { LOGGER.error("Failed to set time params for kmeans. Please specify a valid feature type."); throw new ParameterException("--useTime option: Failed to set time params"); } } // Retrieve the feature adapters final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder(); List featureTypeNames; // If provided, just use the one if (typeName != null) { featureTypeNames = new ArrayList<>(); featureTypeNames.add(typeName); } else { // otherwise, grab all the feature adapters featureTypeNames = FeatureDataUtils.getFeatureTypeNames(inputDataStore); } bldr.setTypeNames(featureTypeNames.toArray(new String[0])); // This is required due to some funkiness in GeoWaveInputFormat final PersistentAdapterStore adapterStore = inputDataStore.createAdapterStore(); final InternalAdapterStore internalAdapterStore = inputDataStore.createInternalAdapterStore(); // TODO remove this, but in case there is trouble this is here for // reference temporarily // queryOptions.getAdaptersArray(adapterStore); // Add a spatial filter if requested try { if (cqlFilter != null) { Geometry bbox = null; String cqlTypeName; if (typeName == null) { cqlTypeName = featureTypeNames.get(0); } else { cqlTypeName = typeName; } final short adapterId = internalAdapterStore.getAdapterId(cqlTypeName); final DataTypeAdapter adapter = adapterStore.getAdapter(adapterId).getAdapter(); if (adapter instanceof GeotoolsFeatureDataAdapter) { final String geometryAttribute = ((GeotoolsFeatureDataAdapter) adapter).getFeatureType().getGeometryDescriptor().getLocalName(); Filter filter; filter = ECQL.toFilter(cqlFilter); final ExtractGeometryFilterVisitorResult geoAndCompareOpData = (ExtractGeometryFilterVisitorResult) filter.accept( new ExtractGeometryFilterVisitor( GeometryUtils.getDefaultCRS(), geometryAttribute), null); bbox = geoAndCompareOpData.getGeometry(); } if ((bbox != null) && !bbox.equals(GeometryUtils.infinity())) { bldr.constraints( bldr.constraintsFactory().spatialTemporalConstraints().spatialConstraints( bbox).build()); } } } catch (final CQLException e) { LOGGER.error("Unable to parse CQL: " + cqlFilter); } // Load RDD from datastore final RDDOptions kmeansOpts = new RDDOptions(); kmeansOpts.setMinSplits(minSplits); kmeansOpts.setMaxSplits(maxSplits); kmeansOpts.setQuery(bldr.build()); final GeoWaveRDD kmeansRDD = GeoWaveRDDLoader.loadRDD(session.sparkContext(), inputDataStore, kmeansOpts); // Retrieve the input centroids LOGGER.debug("Retrieving input centroids from RDD..."); centroidVectors = RDDUtils.rddFeatureVectors(kmeansRDD, timeField, scaledTimeRange); centroidVectors.cache(); // Init the algorithm final KMeans kmeans = new KMeans(); kmeans.setInitializationMode("kmeans||"); kmeans.setK(numClusters); kmeans.setMaxIterations(numIterations); if (epsilon > -1.0) { kmeans.setEpsilon(epsilon); } // Run KMeans LOGGER.debug("Running KMeans algorithm..."); outputModel = kmeans.run(centroidVectors.rdd()); LOGGER.debug("Writing results to output store..."); writeToOutputStore(); LOGGER.debug("Results successfully written!"); } public void writeToOutputStore() { if (outputDataStore != null) { // output cluster centroids (and hulls) to output datastore KMeansUtils.writeClusterCentroids( outputModel, outputDataStore, centroidTypeName, scaledRange); if (isGenerateHulls()) { KMeansUtils.writeClusterHulls( centroidVectors, outputModel, outputDataStore, hullTypeName, isComputeHullData()); } } } public Boolean isUseTime() { return useTime; } public void setUseTime(final Boolean useTime) { this.useTime = useTime; } public String getCentroidTypeName() { return centroidTypeName; } public void setCentroidTypeName(final String centroidTypeName) { this.centroidTypeName = centroidTypeName; } public String getHullTypeName() { return hullTypeName; } public void setHullTypeName(final String hullTypeName) { this.hullTypeName = hullTypeName; } public Boolean isGenerateHulls() { return generateHulls; } public void setGenerateHulls(final Boolean generateHulls) { this.generateHulls = generateHulls; } public Boolean isComputeHullData() { return computeHullData; } public void setComputeHullData(final Boolean computeHullData) { this.computeHullData = computeHullData; } public JavaRDD getInputCentroids() { return centroidVectors; } public DataStorePluginOptions getInputDataStore() { return inputDataStore; } public void setInputDataStore(final DataStorePluginOptions inputDataStore) { this.inputDataStore = inputDataStore; } public DataStorePluginOptions getOutputDataStore() { return outputDataStore; } public void setOutputDataStore(final DataStorePluginOptions outputDataStore) { this.outputDataStore = outputDataStore; } public void setSparkSession(final SparkSession ss) { session = ss; } public void setNumClusters(final int numClusters) { this.numClusters = numClusters; } public void setNumIterations(final int numIterations) { this.numIterations = numIterations; } public void setEpsilon(final Double epsilon) { this.epsilon = epsilon; } public KMeansModel getOutputModel() { return outputModel; } public void setAppName(final String appName) { this.appName = appName; } public void setMaster(final String master) { this.master = master; } public void setHost(final String host) { this.host = host; } public void setCqlFilter(final String cqlFilter) { this.cqlFilter = cqlFilter; } public void setTypeName(final String typeName) { this.typeName = typeName; } public void setTimeParams(final String timeField, final ScaledTemporalRange timeRange) { this.timeField = timeField; scaledTimeRange = timeRange; } public void setSplits(final int min, final int max) { minSplits = min; maxSplits = max; } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/kmeans/KMeansUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.kmeans; import java.util.Date; import java.util.List; import org.apache.spark.api.java.JavaPairRDD; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.mllib.clustering.KMeansModel; import org.apache.spark.mllib.linalg.Vector; import org.geotools.feature.AttributeTypeBuilder; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.feature.simple.SimpleFeatureTypeBuilder; import org.geotools.feature.type.BasicFeatureTypes; import org.geotools.referencing.CRS; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.adapter.vector.util.DateUtilities; import org.locationtech.geowave.adapter.vector.util.FeatureDataUtils; import org.locationtech.geowave.adapter.vector.util.PolygonAreaCalculator; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.core.geotime.store.query.ScaledTemporalRange; import org.locationtech.geowave.core.geotime.store.query.TemporalRange; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.Writer; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.referencing.FactoryException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Iterables; import scala.Tuple2; public class KMeansUtils { private static final Logger LOGGER = LoggerFactory.getLogger(KMeansUtils.class); public static DataTypeAdapter writeClusterCentroids( final KMeansModel clusterModel, final DataStorePluginOptions outputDataStore, final String centroidAdapterName, final ScaledTemporalRange scaledRange) { final SimpleFeatureTypeBuilder typeBuilder = new SimpleFeatureTypeBuilder(); typeBuilder.setName(centroidAdapterName); typeBuilder.setNamespaceURI(BasicFeatureTypes.DEFAULT_NAMESPACE); try { typeBuilder.setCRS(CRS.decode("EPSG:4326", true)); } catch (final FactoryException fex) { LOGGER.error(fex.getMessage(), fex); } final AttributeTypeBuilder attrBuilder = new AttributeTypeBuilder(); typeBuilder.add( attrBuilder.binding(Geometry.class).nillable(false).buildDescriptor( Geometry.class.getName().toString())); if (scaledRange != null) { typeBuilder.add(attrBuilder.binding(Date.class).nillable(false).buildDescriptor("Time")); } typeBuilder.add( attrBuilder.binding(Integer.class).nillable(false).buildDescriptor("ClusterIndex")); final SimpleFeatureType sfType = typeBuilder.buildFeatureType(); final SimpleFeatureBuilder sfBuilder = new SimpleFeatureBuilder(sfType); final FeatureDataAdapter featureAdapter = new FeatureDataAdapter(sfType); final DataStore featureStore = outputDataStore.createDataStore(); final Index featureIndex = SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()); featureStore.addType(featureAdapter, featureIndex); try (Writer writer = featureStore.createWriter(featureAdapter.getTypeName())) { for (final Vector center : clusterModel.clusterCenters()) { final int index = clusterModel.predict(center); final double lon = center.apply(0); final double lat = center.apply(1); sfBuilder.set( Geometry.class.getName(), GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(lon, lat))); if ((scaledRange != null) && (center.size() > 2)) { final double timeVal = center.apply(2); final Date time = scaledRange.valueToTime(timeVal); sfBuilder.set("Time", time); LOGGER.warn("Write time: " + time); } sfBuilder.set("ClusterIndex", index); final SimpleFeature sf = sfBuilder.buildFeature("Centroid-" + index); writer.write(sf); } } return featureAdapter; } public static DataTypeAdapter writeClusterHulls( final JavaRDD inputCentroids, final KMeansModel clusterModel, final DataStorePluginOptions outputDataStore, final String hullAdapterName, final boolean computeMetadata) { final JavaPairRDD> groupByRdd = KMeansHullGenerator.groupByIndex(inputCentroids, clusterModel); final JavaPairRDD hullRdd = KMeansHullGenerator.generateHullsRDD(groupByRdd); final SimpleFeatureTypeBuilder typeBuilder = new SimpleFeatureTypeBuilder(); typeBuilder.setName(hullAdapterName); typeBuilder.setNamespaceURI(BasicFeatureTypes.DEFAULT_NAMESPACE); try { typeBuilder.setCRS(CRS.decode("EPSG:4326", true)); } catch (final FactoryException e) { LOGGER.error(e.getMessage(), e); } final AttributeTypeBuilder attrBuilder = new AttributeTypeBuilder(); typeBuilder.add( attrBuilder.binding(Geometry.class).nillable(false).buildDescriptor( Geometry.class.getName().toString())); typeBuilder.add( attrBuilder.binding(Integer.class).nillable(false).buildDescriptor("ClusterIndex")); typeBuilder.add(attrBuilder.binding(Integer.class).nillable(false).buildDescriptor("Count")); typeBuilder.add(attrBuilder.binding(Double.class).nillable(false).buildDescriptor("Area")); typeBuilder.add(attrBuilder.binding(Double.class).nillable(false).buildDescriptor("Density")); final SimpleFeatureType sfType = typeBuilder.buildFeatureType(); final SimpleFeatureBuilder sfBuilder = new SimpleFeatureBuilder(sfType); final FeatureDataAdapter featureAdapter = new FeatureDataAdapter(sfType); final DataStore featureStore = outputDataStore.createDataStore(); final Index featureIndex = SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()); final PolygonAreaCalculator polyCalc = (computeMetadata ? new PolygonAreaCalculator() : null); featureStore.addType(featureAdapter, featureIndex); try (Writer writer = featureStore.createWriter(featureAdapter.getTypeName())) { for (final Tuple2 hull : hullRdd.collect()) { final Integer index = hull._1; final Geometry geom = hull._2; sfBuilder.set(Geometry.class.getName(), geom); sfBuilder.set("ClusterIndex", index); int count = 0; double area = 0.0; double density = 0.0; if (computeMetadata) { for (final Iterable points : groupByRdd.lookup(index)) { final Vector[] pointVec = Iterables.toArray(points, Vector.class); count += pointVec.length; } try { // HP Fortify "NULL Pointer Dereference" false positive // Exception handling will catch if polyCalc is null area = polyCalc.getAreaDensify(geom); density = count / area; } catch (final Exception e) { LOGGER.error("Problem computing polygon area: " + e.getMessage()); } } sfBuilder.set("Count", count); sfBuilder.set("Area", area); sfBuilder.set("Density", density); final SimpleFeature sf = sfBuilder.buildFeature("Hull-" + index); writer.write(sf); } } return featureAdapter; } public static ScaledTemporalRange setRunnerTimeParams( final KMeansRunner runner, final DataStorePluginOptions inputDataStore, String typeName) { if (typeName == null) { // if no id provided, locate a single // featureadapter final List typeNameList = FeatureDataUtils.getFeatureTypeNames(inputDataStore); if (typeNameList.size() == 1) { typeName = typeNameList.get(0); } else if (typeNameList.isEmpty()) { LOGGER.error("No feature adapters found for use with time param"); return null; } else { LOGGER.error( "Multiple feature adapters found for use with time param. Please specify one."); return null; } } final ScaledTemporalRange scaledRange = new ScaledTemporalRange(); final String timeField = FeatureDataUtils.getTimeField(inputDataStore, typeName); if (timeField != null) { final TemporalRange timeRange = DateUtilities.getTemporalRange(inputDataStore, typeName, timeField); if (timeRange != null) { scaledRange.setTimeRange(timeRange.getStartTime(), timeRange.getEndTime()); } final String geomField = FeatureDataUtils.getGeomField(inputDataStore, typeName); final Envelope bbox = org.locationtech.geowave.adapter.vector.util.FeatureGeometryUtils.getGeoBounds( inputDataStore, typeName, geomField); if (bbox != null) { final double xRange = bbox.getMaxX() - bbox.getMinX(); final double yRange = bbox.getMaxY() - bbox.getMinY(); final double valueRange = Math.min(xRange, yRange); scaledRange.setValueRange(0.0, valueRange); } runner.setTimeParams(timeField, scaledRange); return scaledRange; } LOGGER.error("Couldn't determine field to use for time param"); return null; } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/kmeans/operations/KMeansSparkOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.kmeans.operations; import com.beust.jcommander.Parameter; public class KMeansSparkOptions { @Parameter(names = {"-n", "--name"}, description = "The spark application name") private String appName = "KMeans Spark"; @Parameter(names = {"-ho", "--host"}, description = "The spark driver host") private String host = "localhost"; @Parameter(names = {"-m", "--master"}, description = "The spark master designation") private String master = "yarn"; @Parameter(names = {"-k", "--numClusters"}, description = "The number of clusters to generate") private Integer numClusters = 8; @Parameter(names = {"-i", "--numIterations"}, description = "The number of iterations to run") private Integer numIterations = 20; @Parameter(names = {"-e", "--epsilon"}, description = "The convergence tolerance") private Double epsilon = null; @Parameter(names = {"-t", "--useTime"}, description = "Use time field from input data") private Boolean useTime = false; @Parameter(names = {"-h", "--hulls"}, description = "Generate convex hulls?") private Boolean generateHulls = false; @Parameter( names = {"-ch", "--computeHullData"}, description = "Compute hull count, area and density?") private Boolean computeHullData = false; @Parameter( names = "--cqlFilter", description = "An optional CQL filter applied to the input data") private String cqlFilter = null; @Parameter(names = {"-f", "--featureType"}, description = "Feature type name to query") private String typeName = null; @Parameter(names = "--minSplits", description = "The min partitions for the input data") private Integer minSplits = -1; @Parameter(names = "--maxSplits", description = "The max partitions for the input data") private Integer maxSplits = -1; @Parameter( names = {"-ct", "--centroidType"}, description = "Feature type name for centroid output") private String centroidTypeName = "kmeans_centroids"; @Parameter(names = {"-ht", "--hullType"}, description = "Feature type name for hull output") private String hullTypeName = "kmeans_hulls"; public KMeansSparkOptions() {} public String getAppName() { return appName; } public void setAppName(final String appName) { this.appName = appName; } public String getHost() { return host; } public void setHost(final String host) { this.host = host; } public String getMaster() { return master; } public void setMaster(final String master) { this.master = master; } public Integer getNumClusters() { return numClusters; } public void setNumClusters(final Integer numClusters) { this.numClusters = numClusters; } public Integer getNumIterations() { return numIterations; } public void setNumIterations(final Integer numIterations) { this.numIterations = numIterations; } public Double getEpsilon() { return epsilon; } public void setEpsilon(final Double epsilon) { this.epsilon = epsilon; } public Boolean isUseTime() { return useTime; } public void setUseTime(final Boolean useTime) { this.useTime = useTime; } public Boolean isGenerateHulls() { return generateHulls; } public void setGenerateHulls(final Boolean generateHulls) { this.generateHulls = generateHulls; } public Boolean isComputeHullData() { return computeHullData; } public void setComputeHullData(final Boolean computeHullData) { this.computeHullData = computeHullData; } public String getCqlFilter() { return cqlFilter; } public void setCqlFilter(final String cqlFilter) { this.cqlFilter = cqlFilter; } public String getTypeName() { return typeName; } public void setTypeName(final String typeName) { this.typeName = typeName; } public Integer getMinSplits() { return minSplits; } public void setMinSplits(final Integer minSplits) { this.minSplits = minSplits; } public Integer getMaxSplits() { return maxSplits; } public void setMaxSplits(final Integer maxSplits) { this.maxSplits = maxSplits; } public String getCentroidTypeName() { return centroidTypeName; } public void setCentroidTypeName(final String centroidTypeName) { this.centroidTypeName = centroidTypeName; } public String getHullTypeName() { return hullTypeName; } public void setHullTypeName(final String hullTypeName) { this.hullTypeName = hullTypeName; } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/kmeans/operations/KmeansSparkCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.kmeans.operations; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.mapreduce.operations.AnalyticSection; import org.locationtech.geowave.analytic.mapreduce.operations.options.PropertyManagementConverter; import org.locationtech.geowave.analytic.param.StoreParameters; import org.locationtech.geowave.analytic.spark.kmeans.KMeansRunner; import org.locationtech.geowave.analytic.store.PersistableStore; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.Command; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.jts.util.Stopwatch; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import com.beust.jcommander.ParametersDelegate; @GeowaveOperation(name = "kmeansspark", parentOperation = AnalyticSection.class) @Parameters(commandDescription = "KMeans clustering using Spark ML") public class KmeansSparkCommand extends ServiceEnabledCommand implements Command { @Parameter(description = " ") private List parameters = new ArrayList<>(); @ParametersDelegate private KMeansSparkOptions kMeansSparkOptions = new KMeansSparkOptions(); DataStorePluginOptions inputDataStore = null; DataStorePluginOptions outputDataStore = null; // Log some timing Stopwatch stopwatch = new Stopwatch(); @Override public void execute(final OperationParams params) throws Exception { // Ensure we have all the required arguments if (parameters.size() != 2) { throw new ParameterException("Requires arguments: "); } computeResults(params); } @Override public Void computeResults(final OperationParams params) throws Exception { final String inputStoreName = parameters.get(0); final String outputStoreName = parameters.get(1); // Config file final File configFile = getGeoWaveConfigFile(params); // Attempt to load input store. inputDataStore = CLIUtils.loadStore(inputStoreName, configFile, params.getConsole()); // Attempt to load output store. outputDataStore = CLIUtils.loadStore(outputStoreName, configFile, params.getConsole()); // Save a reference to the store in the property management. final PersistableStore persistedStore = new PersistableStore(inputDataStore); final PropertyManagement properties = new PropertyManagement(); properties.store(StoreParameters.StoreParam.INPUT_STORE, persistedStore); // Convert properties from DBScanOptions and CommonOptions final PropertyManagementConverter converter = new PropertyManagementConverter(properties); converter.readProperties(kMeansSparkOptions); final KMeansRunner runner = new KMeansRunner(); runner.setAppName(kMeansSparkOptions.getAppName()); runner.setMaster(kMeansSparkOptions.getMaster()); runner.setHost(kMeansSparkOptions.getHost()); runner.setSplits(kMeansSparkOptions.getMinSplits(), kMeansSparkOptions.getMaxSplits()); runner.setInputDataStore(inputDataStore); runner.setNumClusters(kMeansSparkOptions.getNumClusters()); runner.setNumIterations(kMeansSparkOptions.getNumIterations()); runner.setUseTime(kMeansSparkOptions.isUseTime()); runner.setTypeName(kMeansSparkOptions.getTypeName()); if (kMeansSparkOptions.getEpsilon() != null) { runner.setEpsilon(kMeansSparkOptions.getEpsilon()); } if (kMeansSparkOptions.getTypeName() != null) { runner.setTypeName(kMeansSparkOptions.getTypeName()); } if (kMeansSparkOptions.getCqlFilter() != null) { runner.setCqlFilter(kMeansSparkOptions.getCqlFilter()); } runner.setGenerateHulls(kMeansSparkOptions.isGenerateHulls()); runner.setComputeHullData(kMeansSparkOptions.isComputeHullData()); runner.setHullTypeName(kMeansSparkOptions.getHullTypeName()); runner.setCentroidTypeName(kMeansSparkOptions.getCentroidTypeName()); runner.setOutputDataStore(outputDataStore); try { runner.run(); } catch (final IOException e) { throw new RuntimeException("Failed to execute: " + e.getMessage()); } finally { runner.close(); } return null; } public List getParameters() { return parameters; } public void setParameters(final String storeName) { parameters = new ArrayList<>(); parameters.add(storeName); } public DataStorePluginOptions getInputStoreOptions() { return inputDataStore; } public DataStorePluginOptions getOutputStoreOptions() { return outputDataStore; } public KMeansSparkOptions getKMeansSparkOptions() { return kMeansSparkOptions; } public void setKMeansSparkOptions(final KMeansSparkOptions kMeansSparkOptions) { this.kMeansSparkOptions = kMeansSparkOptions; } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/resize/RasterTileResizeSparkRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.resize; import java.io.IOException; import java.net.URISyntaxException; import java.util.Collections; import java.util.Iterator; import org.apache.commons.io.FilenameUtils; import org.apache.spark.api.java.JavaPairRDD; import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.api.java.function.Function; import org.apache.spark.api.java.function.PairFlatMapFunction; import org.apache.spark.sql.SparkSession; import org.locationtech.geowave.adapter.raster.FitToIndexGridCoverage; import org.locationtech.geowave.adapter.raster.adapter.GridCoverageWritable; import org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter; import org.locationtech.geowave.adapter.raster.operations.options.RasterTileResizeCommandLineOptions; import org.locationtech.geowave.adapter.raster.resize.RasterTileResizeHelper; import org.locationtech.geowave.analytic.spark.GeoWaveRDDLoader; import org.locationtech.geowave.analytic.spark.GeoWaveSparkConf; import org.locationtech.geowave.analytic.spark.RDDOptions; import org.locationtech.geowave.analytic.spark.RDDUtils; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.entities.GeoWaveKey; import org.locationtech.geowave.core.store.entities.GeoWaveKeyImpl; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.opengis.coverage.grid.GridCoverage; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import jersey.repackaged.com.google.common.collect.Iterables; import jersey.repackaged.com.google.common.collect.Iterators; import scala.Tuple2; public class RasterTileResizeSparkRunner { private static final Logger LOGGER = LoggerFactory.getLogger(RasterTileResizeSparkRunner.class); private String appName = "RasterResizeRunner"; private String master = "yarn"; private String host = "localhost"; private JavaSparkContext jsc = null; private SparkSession session = null; private final DataStorePluginOptions inputStoreOptions; private final DataStorePluginOptions outputStoreOptions; protected RasterTileResizeCommandLineOptions rasterResizeOptions; public RasterTileResizeSparkRunner( final DataStorePluginOptions inputStoreOptions, final DataStorePluginOptions outputStoreOptions, final RasterTileResizeCommandLineOptions rasterResizeOptions) { this.inputStoreOptions = inputStoreOptions; this.outputStoreOptions = outputStoreOptions; this.rasterResizeOptions = rasterResizeOptions; } public void setAppName(final String appName) { this.appName = appName; } public void setMaster(final String master) { this.master = master; } public void setHost(final String host) { this.host = host; } private void initContext() { if (session == null) { String jar = ""; try { jar = RasterTileResizeSparkRunner.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath(); if (!FilenameUtils.isExtension(jar.toLowerCase(), "jar")) { jar = ""; } } catch (final URISyntaxException e) { LOGGER.error("Unable to set jar location in spark configuration", e); } session = GeoWaveSparkConf.createSessionFromParams(appName, master, host, jar); jsc = JavaSparkContext.fromSparkContext(session.sparkContext()); } } public void run() throws IOException { initContext(); // Validate inputs if (inputStoreOptions == null) { LOGGER.error("You must supply an input datastore!"); throw new IOException("You must supply an input datastore!"); } final InternalAdapterStore internalAdapterStore = inputStoreOptions.createInternalAdapterStore(); final short internalAdapterId = internalAdapterStore.getAdapterId(rasterResizeOptions.getInputCoverageName()); final DataTypeAdapter adapter = inputStoreOptions.createAdapterStore().getAdapter(internalAdapterId).getAdapter(); if (adapter == null) { throw new IllegalArgumentException( "Adapter for coverage '" + rasterResizeOptions.getInputCoverageName() + "' does not exist in namespace '" + inputStoreOptions.getGeoWaveNamespace() + "'"); } Index index = null; final IndexStore indexStore = inputStoreOptions.createIndexStore(); if (rasterResizeOptions.getIndexName() != null) { index = indexStore.getIndex(rasterResizeOptions.getIndexName()); } if (index == null) { try (CloseableIterator indices = indexStore.getIndices()) { index = indices.next(); } if (index == null) { throw new IllegalArgumentException( "Index does not exist in namespace '" + inputStoreOptions.getGeoWaveNamespace() + "'"); } } final RasterDataAdapter newAdapter = new RasterDataAdapter( (RasterDataAdapter) adapter, rasterResizeOptions.getOutputCoverageName(), rasterResizeOptions.getOutputTileSize()); final DataStore store = outputStoreOptions.createDataStore(); store.addType(newAdapter, index); final short newInternalAdapterId = outputStoreOptions.createInternalAdapterStore().addTypeName(newAdapter.getTypeName()); final RDDOptions options = new RDDOptions(); if (rasterResizeOptions.getMinSplits() != null) { options.setMinSplits(rasterResizeOptions.getMinSplits()); } if (rasterResizeOptions.getMaxSplits() != null) { options.setMaxSplits(rasterResizeOptions.getMaxSplits()); } final JavaPairRDD inputRDD = GeoWaveRDDLoader.loadRawRasterRDD( jsc.sc(), inputStoreOptions, index.getName(), rasterResizeOptions.getMinSplits(), rasterResizeOptions.getMaxSplits()); LOGGER.debug("Writing results to output store..."); RDDUtils.writeRasterToGeoWave( jsc.sc(), index, outputStoreOptions, newAdapter, inputRDD.flatMapToPair( new RasterResizeMappingFunction( internalAdapterId, newInternalAdapterId, newAdapter, index)).groupByKey().map( new MergeRasterFunction( internalAdapterId, newInternalAdapterId, newAdapter, index))); LOGGER.debug("Results successfully written!"); } private static class RasterResizeMappingFunction implements PairFlatMapFunction, GeoWaveInputKey, GridCoverageWritable> { private final RasterTileResizeHelper helper; /** * */ private static final long serialVersionUID = 1L; public RasterResizeMappingFunction( final short oldAdapterId, final short newAdapterId, final RasterDataAdapter newAdapter, final Index index) { super(); helper = new RasterTileResizeHelper(oldAdapterId, newAdapterId, newAdapter, index); } @Override public Iterator> call( final Tuple2 t) throws Exception { if (helper.isOriginalCoverage(t._1.getInternalAdapterId())) { final Iterator coverages = helper.getCoveragesForIndex(t._2); if (coverages == null) { LOGGER.error("Couldn't get coverages instance, getCoveragesForIndex returned null"); throw new IOException( "Couldn't get coverages instance, getCoveragesForIndex returned null"); } return Iterators.transform(Iterators.filter(coverages, FitToIndexGridCoverage.class), c -> { // it should be a FitToIndexGridCoverage because it was just // converted above (filtered just in case) final byte[] partitionKey = c.getPartitionKey(); final byte[] sortKey = c.getSortKey(); final GeoWaveKey geowaveKey = new GeoWaveKeyImpl( helper.getNewDataId(c), t._1.getInternalAdapterId(), partitionKey, sortKey, 0); final GeoWaveInputKey inputKey = new GeoWaveInputKey(helper.getNewAdapterId(), geowaveKey, helper.getIndexName()); return new Tuple2<>(inputKey, helper.getSerializer().toWritable(c)); }); } return Collections.emptyIterator(); } } private static class MergeRasterFunction implements Function>, GridCoverage> { private final RasterTileResizeHelper helper; /** * */ private static final long serialVersionUID = 1L; public MergeRasterFunction( final short oldAdapterId, final short newAdapterId, final RasterDataAdapter newAdapter, final Index index) { super(); helper = new RasterTileResizeHelper(oldAdapterId, newAdapterId, newAdapter, index); } @Override public GridCoverage call(final Tuple2> tuple) throws Exception { return helper.getMergedCoverage( tuple._1, Iterables.transform(tuple._2, gcw -> helper.getSerializer().fromWritable(gcw))); } } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/resize/ResizeSparkCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.resize; import java.io.File; import java.util.ArrayList; import java.util.List; import org.locationtech.geowave.adapter.raster.operations.RasterSection; import org.locationtech.geowave.adapter.raster.operations.options.RasterTileResizeCommandLineOptions; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.Command; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import com.beust.jcommander.ParametersDelegate; @GeowaveOperation(name = "resizespark", parentOperation = RasterSection.class) @Parameters(commandDescription = "Resize raster tiles using Spark") public class ResizeSparkCommand extends DefaultOperation implements Command { @Parameter(description = " ") private List parameters = new ArrayList<>(); @Parameter(names = {"-n", "--name"}, description = "The spark application name") private String appName = "RasterResizeRunner"; @Parameter(names = {"-ho", "--host"}, description = "The spark driver host") private String host = "localhost"; @Parameter(names = {"-m", "--master"}, description = "The spark master designation") private String master = "yarn"; @ParametersDelegate private RasterTileResizeCommandLineOptions options = new RasterTileResizeCommandLineOptions(); private DataStorePluginOptions inputStoreOptions = null; private DataStorePluginOptions outputStoreOptions = null; @Override public void execute(final OperationParams params) throws Exception { createRunner(params).run(); } public RasterTileResizeSparkRunner createRunner(final OperationParams params) { // Ensure we have all the required arguments if (parameters.size() != 2) { throw new ParameterException("Requires arguments: "); } final String inputStoreName = parameters.get(0); final String outputStoreName = parameters.get(1); // Config file final File configFile = getGeoWaveConfigFile(params); // Attempt to load input store. inputStoreOptions = CLIUtils.loadStore(inputStoreName, configFile, params.getConsole()); // Attempt to load output store. outputStoreOptions = CLIUtils.loadStore(outputStoreName, configFile, params.getConsole()); final RasterTileResizeSparkRunner runner = new RasterTileResizeSparkRunner(inputStoreOptions, outputStoreOptions, options); runner.setHost(host); runner.setAppName(appName); runner.setMaster(master); return runner; } public List getParameters() { return parameters; } public void setParameters(final String inputStore, final String outputStore) { parameters = new ArrayList<>(); parameters.add(inputStore); parameters.add(outputStore); } public RasterTileResizeCommandLineOptions getOptions() { return options; } public void setOptions(final RasterTileResizeCommandLineOptions options) { this.options = options; } public DataStorePluginOptions getInputStoreOptions() { return inputStoreOptions; } public DataStorePluginOptions getOutputStoreOptions() { return outputStoreOptions; } public void setAppName(final String appName) { this.appName = appName; } public void setHost(final String host) { this.host = host; } public void setMaster(final String master) { this.master = master; } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/GeoWaveSpatialEncoders.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql; import org.apache.spark.sql.types.UDTRegistration; import org.locationtech.geowave.analytic.spark.sparksql.udt.GeometryUDT; import org.locationtech.geowave.analytic.spark.sparksql.udt.LineStringUDT; import org.locationtech.geowave.analytic.spark.sparksql.udt.MultiLineStringUDT; import org.locationtech.geowave.analytic.spark.sparksql.udt.MultiPointUDT; import org.locationtech.geowave.analytic.spark.sparksql.udt.MultiPolygonUDT; import org.locationtech.geowave.analytic.spark.sparksql.udt.PointUDT; import org.locationtech.geowave.analytic.spark.sparksql.udt.PolygonUDT; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.LineString; import org.locationtech.jts.geom.MultiLineString; import org.locationtech.jts.geom.MultiPoint; import org.locationtech.jts.geom.MultiPolygon; import org.locationtech.jts.geom.Point; import org.locationtech.jts.geom.Polygon; /** Created by jwileczek on 7/24/18. */ public class GeoWaveSpatialEncoders { public static GeometryUDT geometryUDT = new GeometryUDT(); public static PointUDT pointUDT = new PointUDT(); public static LineStringUDT lineStringUDT = new LineStringUDT(); public static PolygonUDT polygonUDT = new PolygonUDT(); public static MultiPointUDT multiPointUDT = new MultiPointUDT(); public static MultiPolygonUDT multiPolygonUDT = new MultiPolygonUDT(); public static void registerUDTs() { UDTRegistration.register( Geometry.class.getCanonicalName(), GeometryUDT.class.getCanonicalName()); UDTRegistration.register(Point.class.getCanonicalName(), PointUDT.class.getCanonicalName()); UDTRegistration.register( LineString.class.getCanonicalName(), LineStringUDT.class.getCanonicalName()); UDTRegistration.register(Polygon.class.getCanonicalName(), PolygonUDT.class.getCanonicalName()); UDTRegistration.register( MultiLineString.class.getCanonicalName(), MultiLineStringUDT.class.getCanonicalName()); UDTRegistration.register( MultiPoint.class.getCanonicalName(), MultiPointUDT.class.getCanonicalName()); UDTRegistration.register( MultiPolygon.class.getCanonicalName(), MultiPolygonUDT.class.getCanonicalName()); } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/SimpleFeatureDataFrame.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.types.StructType; import org.locationtech.geowave.adapter.vector.util.FeatureDataUtils; import org.locationtech.geowave.analytic.spark.GeoWaveRDD; import org.locationtech.geowave.analytic.spark.sparksql.udf.GeomFunctionRegistry; import org.locationtech.geowave.analytic.spark.sparksql.util.SchemaConverter; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.opengis.feature.simple.SimpleFeatureType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class SimpleFeatureDataFrame { private static Logger LOGGER = LoggerFactory.getLogger(SimpleFeatureDataFrame.class); private final SparkSession sparkSession; private SimpleFeatureType featureType; private StructType schema; private JavaRDD rowRDD = null; private Dataset dataFrame = null; public SimpleFeatureDataFrame(final SparkSession sparkSession) { this.sparkSession = sparkSession; } public boolean init(final DataStorePluginOptions dataStore, final String typeName) { featureType = FeatureDataUtils.getFeatureType(dataStore, typeName); if (featureType == null) { return false; } schema = SchemaConverter.schemaFromFeatureType(featureType); if (schema == null) { return false; } GeomFunctionRegistry.registerGeometryFunctions(sparkSession); return true; } public SimpleFeatureType getFeatureType() { return featureType; } public StructType getSchema() { return schema; } public JavaRDD getRowRDD() { return rowRDD; } public Dataset getDataFrame(final GeoWaveRDD pairRDD) { if (rowRDD == null) { final SimpleFeatureMapper mapper = new SimpleFeatureMapper(schema); rowRDD = pairRDD.getRawRDD().values().map(mapper); } if (dataFrame == null) { dataFrame = sparkSession.createDataFrame(rowRDD, schema); } return dataFrame; } public Dataset resetDataFrame(final GeoWaveRDD pairRDD) { rowRDD = null; dataFrame = null; return getDataFrame(pairRDD); } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/SimpleFeatureDataType.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql; import org.apache.spark.sql.types.DataType; public class SimpleFeatureDataType { private final DataType dataType; private final boolean isGeom; public SimpleFeatureDataType(final DataType dataType, final boolean isGeom) { this.dataType = dataType; this.isGeom = isGeom; } public DataType getDataType() { return dataType; } public boolean isGeom() { return isGeom; } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/SimpleFeatureMapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql; import java.io.Serializable; import java.sql.Timestamp; import java.util.Date; import org.apache.spark.api.java.function.Function; import org.apache.spark.sql.Row; import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema; import org.apache.spark.sql.types.DataTypes; import org.apache.spark.sql.types.StructField; import org.apache.spark.sql.types.StructType; import org.opengis.feature.simple.SimpleFeature; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; @SuppressFBWarnings public class SimpleFeatureMapper implements Function { /** * */ private static final long serialVersionUID = 1L; private static Logger LOGGER = LoggerFactory.getLogger(SimpleFeatureDataFrame.class); private final StructType schema; public SimpleFeatureMapper(final StructType schema) { this.schema = schema; } @Override public Row call(final SimpleFeature feature) throws Exception { final Object[] fields = new Serializable[schema.size()]; for (int i = 0; i < schema.size(); i++) { final Object fieldObj = feature.getAttribute(i); if (fieldObj != null) { final StructField structField = schema.apply(i); if (structField.name().equals("geom")) { fields[i] = fieldObj; } else if (structField.dataType() == DataTypes.TimestampType) { fields[i] = new Timestamp(((Date) fieldObj).getTime()); } else if (structField.dataType() != null) { fields[i] = fieldObj; } else { LOGGER.error("Unexpected attribute in field(" + structField.name() + "): " + fieldObj); } } } return new GenericRowWithSchema(fields, schema); } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/SqlQueryRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql; import java.io.IOException; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Objects; import java.util.concurrent.ExecutionException; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.io.FilenameUtils; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.catalyst.parser.ParseException; import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan; import org.locationtech.geowave.adapter.vector.util.FeatureDataUtils; import org.locationtech.geowave.analytic.spark.GeoWaveRDD; import org.locationtech.geowave.analytic.spark.GeoWaveRDDLoader; import org.locationtech.geowave.analytic.spark.GeoWaveSparkConf; import org.locationtech.geowave.analytic.spark.RDDOptions; import org.locationtech.geowave.analytic.spark.sparksql.udf.GeomFunction; import org.locationtech.geowave.analytic.spark.sparksql.udf.GeomWithinDistance; import org.locationtech.geowave.analytic.spark.sparksql.udf.UDFRegistrySPI; import org.locationtech.geowave.analytic.spark.sparksql.udf.UDFRegistrySPI.UDFNameAndConstructor; import org.locationtech.geowave.analytic.spark.spatial.SpatialJoinRunner; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.QueryBuilder; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.index.IndexStore; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParser; public class SqlQueryRunner { private static final Logger LOGGER = LoggerFactory.getLogger(SqlQueryRunner.class); private String appName = "SqlQueryRunner"; private String master = "yarn"; private String host = "localhost"; private SparkSession session; private final HashMap inputStores = new HashMap<>(); private final List extractedPredicates = new ArrayList<>(); private String sql = null; public SqlQueryRunner() {} private void initContext() { if (session == null) { String jar = ""; try { jar = SqlQueryRunner.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath(); if (!FilenameUtils.isExtension(jar.toLowerCase(), "jar")) { jar = ""; } } catch (final URISyntaxException e) { LOGGER.error("Unable to set jar location in spark configuration", e); } session = GeoWaveSparkConf.createSessionFromParams(appName, master, host, jar); } } public void close() { if (session != null) { session.close(); session = null; } } public Dataset run() throws IOException, InterruptedException, ExecutionException, ParseException { initContext(); // Load stores and create views. loadStoresAndViews(); // Create a version of the sql without string literals to check for // subquery syntax in sql statement. final Pattern stringLit = Pattern.compile("(?:\\'|\\\").*?(?:\\'|\\\")"); final Matcher m = stringLit.matcher(sql); final String cleanedSql = m.replaceAll(""); LOGGER.debug("cleaned SQL statement: " + cleanedSql); // This regex checks for the presence of multiple sql actions being done // in one sql statement. // Ultimately this is detecting the presence of subqueries within the // sql statement // which due to the complexity of breaking down we won't support // injecting a optimized join into the process if (!cleanedSql.matches( "(?i)^(?=(?:.*(?:\\b(?:INSERT INTO|UPDATE|SELECT|WITH|DELETE|CREATE TABLE|ALTER TABLE|DROP TABLE)\\b)){2})")) { // Parse sparks logical plan for query and determine if spatial join // is present LogicalPlan plan = null; plan = session.sessionState().sqlParser().parsePlan(sql); final JsonParser gsonParser = new JsonParser(); final JsonElement jElement = gsonParser.parse(plan.prettyJson()); if (jElement.isJsonArray()) { final JsonArray jArray = jElement.getAsJsonArray(); final int size = jArray.size(); for (int iObj = 0; iObj < size; iObj++) { final JsonElement childElement = jArray.get(iObj); if (childElement.isJsonObject()) { final JsonObject jObj = childElement.getAsJsonObject(); final String objClass = jObj.get("class").getAsString(); if (Objects.equals(objClass, "org.apache.spark.sql.catalyst.plans.logical.Filter")) { // Search through filter Object to determine if // GeomPredicate function present in condition. final JsonElement conditionElements = jObj.get("condition"); if (conditionElements.isJsonArray()) { final JsonArray conditionArray = conditionElements.getAsJsonArray(); final int condSize = conditionArray.size(); for (int iCond = 0; iCond < condSize; iCond++) { final JsonElement childCond = conditionArray.get(iCond); if (childCond.isJsonObject()) { final JsonObject condObj = childCond.getAsJsonObject(); final String condClass = condObj.get("class").getAsString(); if (Objects.equals( condClass, "org.apache.spark.sql.catalyst.analysis.UnresolvedFunction")) { final String udfName = condObj.get("name").getAsJsonObject().get("funcName").getAsString(); final UDFNameAndConstructor geomUDF = UDFRegistrySPI.findFunctionByName(udfName); if (geomUDF != null) { final ExtractedGeomPredicate relevantPredicate = new ExtractedGeomPredicate(); relevantPredicate.predicate = geomUDF.getPredicateConstructor().get(); relevantPredicate.predicateName = udfName; extractedPredicates.add(relevantPredicate); } } } } } } } } } } // We only need to do all this query work if we find a predicate that // would indicate a spatial join if (extractedPredicates.size() == 1) { // This pattern detects the word where outside of quoted areas and // captures it in group 2 final Pattern whereDetect = Pattern.compile("(?i)(\"[^\"]*\"|'[^']*')|(\\bWHERE\\b)"); final Pattern andOrDetect = Pattern.compile("(?i)(\"[^\"]*\"|'[^']*')|(\\bAND|OR\\b)"); final Pattern orderGroupDetect = Pattern.compile("(?i)(\"[^\"]*\"|'[^']*')|(\\bORDER BY|GROUP BY\\b)"); final Matcher filterStart = getFirstPositiveMatcher(whereDetect, sql); if (filterStart == null) { LOGGER.error("There should be a where clause matching the pattern. Running default SQL"); return runDefaultSQL(); } final int whereStart = filterStart.start(2); int whereEnd = sql.length(); final Matcher filterEnd = getFirstPositiveMatcher(orderGroupDetect, sql.substring(whereStart)); if (filterEnd != null) { whereEnd = filterEnd.start(2); } final String filterClause = sql.substring(whereStart, whereEnd); LOGGER.warn("Extracted Filter Clause: " + filterClause); final Matcher compoundFilter = getFirstPositiveMatcher(andOrDetect, filterClause); if (compoundFilter != null) { LOGGER.warn( "Compound conditional detected can result in multiple joins. Too complex to plan in current context. Running default sql"); return runDefaultSQL(); } final ExtractedGeomPredicate pred = extractedPredicates.get(0); // Parse filter string for predicate location final int functionPos = filterClause.indexOf(pred.predicateName); final int funcArgStart = filterClause.indexOf("(", functionPos); final int funcArgEnd = filterClause.indexOf(")", funcArgStart); String funcArgs = filterClause.substring(funcArgStart + 1, funcArgEnd); funcArgs = funcArgs.replaceAll("\\s", ""); LOGGER.warn("Function Args: " + funcArgs); final String[] args = funcArgs.split(Pattern.quote(",")); if (args.length == 2) { // Determine valid table relations that map to input stores final String[] tableRelations = getTableRelations(args); pred.leftTableRelation = tableRelations[0]; pred.rightTableRelation = tableRelations[1]; } if ((pred.leftTableRelation == null) || (pred.rightTableRelation == null)) { LOGGER.warn("Cannot translate table identifier to geowave rdd for join."); return runDefaultSQL(); } // Extract radius for distance join from condition boolean negativePredicate = false; if (Objects.equals(pred.predicateName, "GeomDistance")) { // Look ahead two tokens for logical operand and scalar|boolean final String afterFunc = filterClause.substring(funcArgEnd + 1); final String[] tokens = afterFunc.split(" "); double radius = 0.0; if (tokens.length < 2) { LOGGER.warn("Could not extract radius for distance join. Running default SQL"); return runDefaultSQL(); } else { final String logicalOperand = tokens[0].trim(); if ((logicalOperand.equals(">")) || (logicalOperand.equals(">="))) { negativePredicate = true; } final String radiusStr = tokens[1].trim(); if (!org.apache.commons.lang3.math.NumberUtils.isNumber(radiusStr)) { LOGGER.warn("Could not extract radius for distance join. Running default SQL"); return runDefaultSQL(); } else { final Double r = org.apache.commons.lang3.math.NumberUtils.createDouble(radiusStr); if (r == null) { LOGGER.warn("Could not extract radius for distance join. Running default SQL"); return runDefaultSQL(); } radius = r.doubleValue(); } } ((GeomWithinDistance) pred.predicate).setRadius(radius); } // At this point we are performing a join final SpatialJoinRunner joinRunner = new SpatialJoinRunner(session); // Collect input store info for join final InputStoreInfo leftStore = inputStores.get(pred.leftTableRelation); final InputStoreInfo rightStore = inputStores.get(pred.rightTableRelation); joinRunner.setNegativeTest(negativePredicate); // Setup store info for runner final AdapterToIndexMapping[] leftMappings = leftStore.getOrCreateAdapterIndexMappingStore().getIndicesForAdapter( leftStore.getOrCreateInternalAdapterStore().getAdapterId(leftStore.typeName)); final AdapterToIndexMapping[] rightMappings = rightStore.getOrCreateAdapterIndexMappingStore().getIndicesForAdapter( rightStore.getOrCreateInternalAdapterStore().getAdapterId(rightStore.typeName)); NumericIndexStrategy leftStrat = null; if (leftMappings.length > 0) { leftStrat = leftMappings[0].getIndex(leftStore.getOrCreateIndexStore()).getIndexStrategy(); } NumericIndexStrategy rightStrat = null; if (rightMappings.length > 0) { rightStrat = rightMappings[0].getIndex(rightStore.getOrCreateIndexStore()).getIndexStrategy(); } joinRunner.setLeftRDD( GeoWaveRDDLoader.loadIndexedRDD(session.sparkContext(), leftStore.rdd, leftStrat)); joinRunner.setRightRDD( GeoWaveRDDLoader.loadIndexedRDD(session.sparkContext(), rightStore.rdd, rightStrat)); joinRunner.setPredicate(pred.predicate); joinRunner.setLeftStore(leftStore.storeOptions); joinRunner.setRightStore(rightStore.storeOptions); // Execute the join joinRunner.run(); // Load results into dataframes and replace original views with // joined views final SimpleFeatureDataFrame leftResultFrame = new SimpleFeatureDataFrame(session); final SimpleFeatureDataFrame rightResultFrame = new SimpleFeatureDataFrame(session); leftResultFrame.init(leftStore.storeOptions, leftStore.typeName); rightResultFrame.init(rightStore.storeOptions, rightStore.typeName); final Dataset leftFrame = leftResultFrame.getDataFrame(joinRunner.getLeftResults()); final Dataset rightFrame = rightResultFrame.getDataFrame(joinRunner.getRightResults()); leftFrame.createOrReplaceTempView(leftStore.viewName); rightFrame.createOrReplaceTempView(rightStore.viewName); } // Run the remaining query through the session sql runner. // This will likely attempt to regenerate the join, but should reuse the // pairs generated from optimized join beforehand final Dataset results = session.sql(sql); return results; } private Dataset runDefaultSQL() { return session.sql(sql); } private Matcher getFirstPositiveMatcher(final Pattern compiledPattern, final String sql) { final Matcher returnMatch = compiledPattern.matcher(sql); return getNextPositiveMatcher(returnMatch); } private Matcher getNextPositiveMatcher(final Matcher lastMatch) { while (lastMatch.find()) { if (lastMatch.group(2) != null) { return lastMatch; } } return null; } private String[] getTableRelations(final String[] predicateArgs) { final String[] outputRelations = { getTableNameFromArg(predicateArgs[0].trim()), getTableNameFromArg(predicateArgs[1].trim())}; return outputRelations; } private String getTableNameFromArg(final String funcArg) { final String[] attribSplit = funcArg.split(Pattern.quote(".")); // If we split into two parts the first part will be the relation name if (attribSplit.length == 2) { final InputStoreInfo storeInfo = inputStores.get(attribSplit[0].trim()); if (storeInfo != null) { return storeInfo.viewName; } } return null; } private void loadStoresAndViews() throws IOException { final Collection addStores = inputStores.values(); for (final InputStoreInfo storeInfo : addStores) { final RDDOptions rddOpts = new RDDOptions(); rddOpts.setQuery(QueryBuilder.newBuilder().addTypeName(storeInfo.typeName).build()); storeInfo.rdd = GeoWaveRDDLoader.loadRDD(session.sparkContext(), storeInfo.storeOptions, rddOpts); // Create a DataFrame from the Left RDD final SimpleFeatureDataFrame dataFrame = new SimpleFeatureDataFrame(session); if (!dataFrame.init(storeInfo.storeOptions, storeInfo.typeName)) { LOGGER.error("Failed to initialize dataframe"); return; } LOGGER.debug(dataFrame.getSchema().json()); final Dataset dfTemp = dataFrame.getDataFrame(storeInfo.rdd); dfTemp.createOrReplaceTempView(storeInfo.viewName); } } public String addInputStore( final DataStorePluginOptions storeOptions, final String typeName, final String viewName) { if (storeOptions == null) { LOGGER.error("Must supply datastore plugin options."); return null; } // If view name is null we will attempt to use adapterId as viewName String addTypeName = typeName; // If adapterId is null we grab first adapter available from store if (addTypeName == null) { final List adapterTypes = FeatureDataUtils.getFeatureTypeNames(storeOptions); final int adapterCount = adapterTypes.size(); if (adapterCount > 0) { addTypeName = adapterTypes.get(0); } else { LOGGER.error("Feature adapter not found in store. One must be specified manually"); return null; } } String addView = viewName; if (addView == null) { addView = addTypeName; } // Check if store exists already using that view name if (inputStores.containsKey(addView)) { return addView; } // Create and add new store info if we make it to this point final InputStoreInfo inputInfo = new InputStoreInfo(storeOptions, addTypeName, addView); inputStores.put(addView, inputInfo); return addView; } public void removeInputStore(final String viewName) { inputStores.remove(viewName); } public void removeAllStores() { inputStores.clear(); } public void setSparkSession(final SparkSession session) { this.session = session; } public void setAppName(final String appName) { this.appName = appName; } public void setMaster(final String master) { this.master = master; } public void setHost(final String host) { this.host = host; } public void setSql(final String sql) { this.sql = sql; } private static class InputStoreInfo { public InputStoreInfo( final DataStorePluginOptions storeOptions, final String typeName, final String viewName) { this.storeOptions = storeOptions; this.typeName = typeName; this.viewName = viewName; } private final DataStorePluginOptions storeOptions; private IndexStore indexStore = null; private InternalAdapterStore internalAdapterStore = null; private AdapterIndexMappingStore adapterIndexMappingStore = null; private final String typeName; private final String viewName; private GeoWaveRDD rdd = null; private IndexStore getOrCreateIndexStore() { if (indexStore == null) { indexStore = storeOptions.createIndexStore(); } return indexStore; } private InternalAdapterStore getOrCreateInternalAdapterStore() { if (internalAdapterStore == null) { internalAdapterStore = storeOptions.createInternalAdapterStore(); } return internalAdapterStore; } private AdapterIndexMappingStore getOrCreateAdapterIndexMappingStore() { if (adapterIndexMappingStore == null) { adapterIndexMappingStore = storeOptions.createAdapterIndexMappingStore(); } return adapterIndexMappingStore; } } private static class ExtractedGeomPredicate { private GeomFunction predicate; private String predicateName; private String leftTableRelation = null; private String rightTableRelation = null; } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/SqlResultsWriter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql; import java.sql.Timestamp; import java.text.NumberFormat; import java.util.Date; import java.util.List; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import org.apache.spark.sql.types.DataTypes; import org.apache.spark.sql.types.StructField; import org.apache.spark.sql.types.StructType; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.analytic.spark.sparksql.util.SchemaConverter; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.Writer; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.jts.geom.Geometry; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class SqlResultsWriter { private static final Logger LOGGER = LoggerFactory.getLogger(SqlResultsWriter.class); private static final String DEFAULT_TYPE_NAME = "sqlresults"; private final Dataset results; private final DataStorePluginOptions outputDataStore; private final NumberFormat nf; public SqlResultsWriter( final Dataset results, final DataStorePluginOptions outputDataStore) { this.results = results; this.outputDataStore = outputDataStore; nf = NumberFormat.getIntegerInstance(); nf.setMinimumIntegerDigits(6); } public void writeResults(String typeName) { if (typeName == null) { typeName = DEFAULT_TYPE_NAME; LOGGER.warn( "Using default type name (adapter id): '" + DEFAULT_TYPE_NAME + "' for SQL output"); } final StructType schema = results.schema(); final SimpleFeatureType featureType = SchemaConverter.schemaToFeatureType(schema, typeName); final SimpleFeatureBuilder sfBuilder = new SimpleFeatureBuilder(featureType); final FeatureDataAdapter featureAdapter = new FeatureDataAdapter(featureType); final DataStore featureStore = outputDataStore.createDataStore(); final Index featureIndex = SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()); featureStore.addType(featureAdapter, featureIndex); try (Writer writer = featureStore.createWriter(featureAdapter.getTypeName())) { final List rows = results.collectAsList(); for (int r = 0; r < rows.size(); r++) { final Row row = rows.get(r); for (int i = 0; i < schema.fields().length; i++) { final StructField field = schema.apply(i); final Object rowObj = row.apply(i); if (rowObj != null) { if (field.name().equals("geom")) { final Geometry geom = (Geometry) rowObj; sfBuilder.set("geom", geom); } else if (field.dataType() == DataTypes.TimestampType) { final long millis = ((Timestamp) rowObj).getTime(); final Date date = new Date(millis); sfBuilder.set(field.name(), date); } else { sfBuilder.set(field.name(), rowObj); } } } final SimpleFeature sf = sfBuilder.buildFeature("result-" + nf.format(r)); writer.write(sf); } } } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/operations/SparkSqlCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql.operations; import java.io.File; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.lang.StringUtils; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import org.apache.spark.sql.SaveMode; import org.locationtech.geowave.analytic.mapreduce.operations.AnalyticSection; import org.locationtech.geowave.analytic.spark.sparksql.SqlQueryRunner; import org.locationtech.geowave.analytic.spark.sparksql.SqlResultsWriter; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.jts.util.Stopwatch; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import com.beust.jcommander.ParametersDelegate; import com.beust.jcommander.internal.Console; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; @GeowaveOperation(name = "sql", parentOperation = AnalyticSection.class) @Parameters(commandDescription = "Execute query using SparkSQL") public class SparkSqlCommand extends ServiceEnabledCommand { private static final Logger LOGGER = LoggerFactory.getLogger(SparkSqlCommand.class); private static final String STORE_ADAPTER_DELIM = "|"; private static final String CMD_DESCR = " - e.g. 'select * from %storename[" + STORE_ADAPTER_DELIM + "adaptername" + STORE_ADAPTER_DELIM + "viewName] where condition...'"; @Parameter(description = CMD_DESCR) private List parameters = new ArrayList<>(); @ParametersDelegate private SparkSqlOptions sparkSqlOptions = new SparkSqlOptions(); private DataStorePluginOptions outputDataStore = null; private final SqlQueryRunner sqlRunner = new SqlQueryRunner(); // Log some timing Stopwatch stopwatch = new Stopwatch(); @Override public void execute(final OperationParams params) throws Exception { // Ensure we have all the required arguments if (parameters.size() != 1) { throw new ParameterException("Requires argument: "); } computeResults(params); } @Override public Void computeResults(final OperationParams params) throws Exception { // Config file final File configFile = getGeoWaveConfigFile(params); final String sql = parameters.get(0); LOGGER.debug("Input SQL: " + sql); final String cleanSql = initStores(configFile, sql, sparkSqlOptions.getOutputStoreName(), params.getConsole()); LOGGER.debug("Running with cleaned SQL: " + cleanSql); sqlRunner.setSql(cleanSql); sqlRunner.setAppName(sparkSqlOptions.getAppName()); sqlRunner.setHost(sparkSqlOptions.getHost()); sqlRunner.setMaster(sparkSqlOptions.getMaster()); stopwatch.reset(); stopwatch.start(); // Execute the query final Dataset results = sqlRunner.run(); stopwatch.stop(); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Spark SQL query took " + stopwatch.getTimeString()); LOGGER.debug(" and got " + results.count() + " results"); results.printSchema(); } if (sparkSqlOptions.getShowResults() > 0) { results.show(sparkSqlOptions.getShowResults(), false); } params.getConsole().println("GeoWave SparkSQL query returned " + results.count() + " results"); if (outputDataStore != null) { final SqlResultsWriter sqlResultsWriter = new SqlResultsWriter(results, outputDataStore); String typeName = sparkSqlOptions.getOutputTypeName(); if (typeName == null) { typeName = "sqlresults"; } params.getConsole().println("Writing GeoWave SparkSQL query results to datastore..."); sqlResultsWriter.writeResults(typeName); params.getConsole().println("Datastore write complete."); } if (sparkSqlOptions.getCsvOutputFile() != null) { results.repartition(1).write().format("com.databricks.spark.csv").option( "header", "true").mode(SaveMode.Overwrite).save(sparkSqlOptions.getCsvOutputFile()); } sqlRunner.close(); return null; } @SuppressFBWarnings("SF_SWITCH_FALLTHROUGH") private String initStores( final File configFile, final String sql, final String outputStoreName, final Console console) { final Pattern storeDetect = Pattern.compile("(\\\"[^\\\"]*\\\"|'[^']*')|([%][^.,\\s]+)"); final String escapedDelimRegex = java.util.regex.Pattern.quote(STORE_ADAPTER_DELIM); Matcher matchedStore = getFirstPositiveMatcher(storeDetect, sql); String replacedSQL = sql; while (matchedStore != null) { String parseStore = matchedStore.group(2); final String originalStoreText = parseStore; // Drop the first character off string should be % sign parseStore = parseStore.substring(1); parseStore = parseStore.trim(); LOGGER.debug("parsed store: " + parseStore); final String[] storeNameParts = parseStore.split(escapedDelimRegex); LOGGER.debug("Split Count: " + storeNameParts.length); for (final String split : storeNameParts) { LOGGER.debug("Store split: " + split); } String storeName = null; String adapterName = null; String viewName = null; switch (storeNameParts.length) { case 3: viewName = storeNameParts[2].trim(); case 2: adapterName = storeNameParts[1].trim(); case 1: storeName = storeNameParts[0].trim(); break; default: throw new ParameterException( "Ambiguous datastore" + STORE_ADAPTER_DELIM + "adapter designation: " + Arrays.toString(storeNameParts)); } // Attempt to load store. final DataStorePluginOptions storeOptions = CLIUtils.loadStore(storeName, configFile, console); viewName = sqlRunner.addInputStore(storeOptions, adapterName, viewName); if (viewName != null) { replacedSQL = StringUtils.replace(replacedSQL, originalStoreText, viewName, -1); } matchedStore = getNextPositiveMatcher(matchedStore); } return replacedSQL; } private Matcher getFirstPositiveMatcher(final Pattern compiledPattern, final String sql) { final Matcher returnMatch = compiledPattern.matcher(sql); return getNextPositiveMatcher(returnMatch); } private Matcher getNextPositiveMatcher(final Matcher lastMatch) { while (lastMatch.find()) { if (lastMatch.group(2) != null) { return lastMatch; } } return null; } public List getParameters() { return parameters; } public void setParameters(final String sql) { parameters = new ArrayList<>(); parameters.add(sql); } public DataStorePluginOptions getOutputStoreOptions() { return outputDataStore; } public void setOutputStoreOptions(final DataStorePluginOptions outputStoreOptions) { outputDataStore = outputStoreOptions; } public SparkSqlOptions getSparkSqlOptions() { return sparkSqlOptions; } public void setSparkSqlOptions(final SparkSqlOptions sparkSqlOptions) { this.sparkSqlOptions = sparkSqlOptions; } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/operations/SparkSqlOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql.operations; import com.beust.jcommander.Parameter; public class SparkSqlOptions { @Parameter(names = {"-n", "--name"}, description = "The spark application name") private String appName = "GeoWave Spark SQL"; @Parameter(names = {"-ho", "--host"}, description = "The spark driver host") private String host = "localhost"; @Parameter(names = {"-m", "--master"}, description = "The spark master designation") private String master = "yarn"; @Parameter(names = {"--csv"}, description = "The output CSV file name") private String csvOutputFile = null; @Parameter(names = {"--out"}, description = "The output datastore name") private String outputStoreName = null; @Parameter(names = {"--outtype"}, description = "The output feature type (adapter) name") private String outputTypeName = null; @Parameter(names = {"-s", "--show"}, description = "Number of result rows to display") private int showResults = 20; public SparkSqlOptions() {} public String getOutputStoreName() { return outputStoreName; } public String getAppName() { return appName; } public String getHost() { return host; } public String getMaster() { return master; } public void setAppName(final String name) { appName = name; } public void setHost(final String h) { host = h; } public void setMaster(final String m) { master = m; } public void setOutputStoreName(final String outputStoreName) { this.outputStoreName = outputStoreName; } public int getShowResults() { return showResults; } public void setShowResults(final int showResults) { this.showResults = showResults; } public String getOutputTypeName() { return outputTypeName; } public void setOutputTypeName(final String outputTypeName) { this.outputTypeName = outputTypeName; } public String getCsvOutputFile() { return csvOutputFile; } public void setCsvOutputFile(final String csvOutputFile) { this.csvOutputFile = csvOutputFile; } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udf/BufferOperation.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql.udf; public interface BufferOperation { public double getBufferAmount(); } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udf/GeomContains.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql.udf; import org.locationtech.jts.geom.Geometry; public class GeomContains extends GeomFunction { /** * */ private static final long serialVersionUID = 1L; @Override public boolean apply(final Geometry geom1, final Geometry geom2) { return geom1.contains(geom2); } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udf/GeomCovers.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql.udf; import org.locationtech.jts.geom.Geometry; public class GeomCovers extends GeomFunction { /** * */ private static final long serialVersionUID = 1L; @Override public boolean apply(final Geometry geom1, final Geometry geom2) { return geom1.covers(geom2); } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udf/GeomCrosses.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql.udf; import org.locationtech.jts.geom.Geometry; public class GeomCrosses extends GeomFunction { /** * */ private static final long serialVersionUID = 1L; @Override public boolean apply(final Geometry geom1, final Geometry geom2) { return geom1.crosses(geom2); } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udf/GeomDisjoint.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql.udf; import org.locationtech.jts.geom.Geometry; public class GeomDisjoint extends GeomFunction { /** * */ private static final long serialVersionUID = 1L; @Override public boolean apply(final Geometry geom1, final Geometry geom2) { return geom1.disjoint(geom2); } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udf/GeomDistance.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql.udf; import org.apache.spark.sql.api.java.UDF2; import org.locationtech.jts.geom.Geometry; public class GeomDistance implements UDF2 { /** * */ private static final long serialVersionUID = 1L; @Override public Double call(final Geometry leftGeom, final Geometry rightGeom) throws Exception { return leftGeom.distance(rightGeom); } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udf/GeomEquals.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql.udf; import org.locationtech.jts.geom.Geometry; public class GeomEquals extends GeomFunction { /** * */ private static final long serialVersionUID = 1L; @Override public boolean apply(final Geometry geom1, final Geometry geom2) { return geom1.equals(geom2); } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udf/GeomFromWKT.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql.udf; import org.apache.spark.sql.api.java.UDF1; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.io.WKTReader; /** Created by jwileczek on 8/16/18. */ public class GeomFromWKT implements UDF1 { /** * */ private static final long serialVersionUID = 1L; @Override public Geometry call(final String o) throws Exception { return new WKTReader().read(o); } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udf/GeomFunction.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql.udf; import org.apache.spark.sql.api.java.UDF2; import org.locationtech.geowave.analytic.spark.sparksql.util.GeomReader; import org.locationtech.jts.geom.Geometry; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; @SuppressFBWarnings public abstract class GeomFunction implements UDF2, BufferOperation { /** * */ private static final long serialVersionUID = 1L; private final GeomReader geomReader = new GeomReader(); // Base GeomFunction will assume same bucket comparison @Override public double getBufferAmount() { return 0.0; } @Override public Boolean call(final Geometry t1, final Geometry t2) throws Exception { return apply(t1, t2); } public abstract boolean apply(Geometry geom1, Geometry geom2); public String getRegisterName() { return this.getClass().getSimpleName(); } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udf/GeomFunctionRegistry.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql.udf; import java.io.Serializable; import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.types.DataTypes; import org.locationtech.geowave.analytic.spark.sparksql.GeoWaveSpatialEncoders; import org.locationtech.geowave.analytic.spark.sparksql.udf.UDFRegistrySPI.UDFNameAndConstructor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class GeomFunctionRegistry implements Serializable { private static final long serialVersionUID = -1729498500215830962L; private static final Logger LOGGER = LoggerFactory.getLogger(GeomFunctionRegistry.class); private static GeomDistance geomDistanceInstance = new GeomDistance(); private static GeomFromWKT geomWKTInstance = new GeomFromWKT(); public static void registerGeometryFunctions(final SparkSession spark) { // Distance UDF is only exception to GeomFunction interface since it // returns Double spark.udf().register("GeomDistance", geomDistanceInstance, DataTypes.DoubleType); spark.udf().register("GeomFromWKT", geomWKTInstance, GeoWaveSpatialEncoders.geometryUDT); // Register all UDF functions from RegistrySPI final UDFNameAndConstructor[] supportedUDFs = UDFRegistrySPI.getSupportedUDFs(); for (int iUDF = 0; iUDF < supportedUDFs.length; iUDF += 1) { final UDFNameAndConstructor udf = supportedUDFs[iUDF]; final GeomFunction funcInstance = udf.getPredicateConstructor().get(); spark.udf().register(funcInstance.getRegisterName(), funcInstance, DataTypes.BooleanType); } } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udf/GeomIntersects.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql.udf; import org.locationtech.jts.geom.Geometry; public class GeomIntersects extends GeomFunction { /** * */ private static final long serialVersionUID = 1L; @Override public boolean apply(final Geometry geom1, final Geometry geom2) { return geom1.intersects(geom2); } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udf/GeomOverlaps.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql.udf; import org.locationtech.jts.geom.Geometry; public class GeomOverlaps extends GeomFunction { /** * */ private static final long serialVersionUID = 1L; @Override public boolean apply(final Geometry geom1, final Geometry geom2) { return geom1.overlaps(geom2); } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udf/GeomTouches.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql.udf; import org.locationtech.jts.geom.Geometry; public class GeomTouches extends GeomFunction { /** * */ private static final long serialVersionUID = 1L; @Override public boolean apply(final Geometry geom1, final Geometry geom2) { return geom1.touches(geom2); } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udf/GeomWithin.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql.udf; import org.locationtech.jts.geom.Geometry; public class GeomWithin extends GeomFunction { /** * */ private static final long serialVersionUID = 1L; @Override public boolean apply(final Geometry geom1, final Geometry geom2) { return geom1.within(geom2); } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udf/GeomWithinDistance.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql.udf; import org.locationtech.jts.geom.Geometry; public class GeomWithinDistance extends GeomFunction { /** * */ private static final long serialVersionUID = 1L; private double radius; public GeomWithinDistance() { radius = 0.01; } public GeomWithinDistance(final double radius) { this.radius = radius; } @Override public double getBufferAmount() { return radius; } public double getRadius() { return radius; } public void setRadius(final double radius) { this.radius = radius; } @Override public boolean apply(final Geometry geom1, final Geometry geom2) { return geom1.distance(geom2) <= radius; } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udf/UDFRegistrySPI.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql.udf; import java.util.Objects; import java.util.function.Supplier; public class UDFRegistrySPI { public static UDFNameAndConstructor[] getSupportedUDFs() { return new UDFNameAndConstructor[] { new UDFNameAndConstructor(new String[] {"GeomContains"}, GeomContains::new), new UDFNameAndConstructor(new String[] {"GeomCovers"}, GeomCovers::new), new UDFNameAndConstructor(new String[] {"GeomCrosses"}, GeomCrosses::new), new UDFNameAndConstructor(new String[] {"GeomDisjoint"}, GeomDisjoint::new), new UDFNameAndConstructor(new String[] {"GeomEquals"}, GeomEquals::new), new UDFNameAndConstructor(new String[] {"GeomIntersects"}, GeomIntersects::new), new UDFNameAndConstructor(new String[] {"GeomOverlaps"}, GeomOverlaps::new), new UDFNameAndConstructor(new String[] {"GeomTouches"}, GeomTouches::new), new UDFNameAndConstructor(new String[] {"GeomWithin"}, GeomWithin::new), new UDFNameAndConstructor(new String[] {"GeomWithinDistance"}, GeomWithinDistance::new)}; } public static UDFNameAndConstructor findFunctionByName(final String udfName) { final UDFNameAndConstructor[] udfFunctions = UDFRegistrySPI.getSupportedUDFs(); for (int iUDF = 0; iUDF < udfFunctions.length; iUDF += 1) { final UDFNameAndConstructor compare = udfFunctions[iUDF]; if (compare.nameMatch(udfName)) { return compare; } } return null; } public static class UDFNameAndConstructor { private final String[] udfNames; private final Supplier predicateConstructor; public UDFNameAndConstructor( final String[] udfNames, final Supplier predicateConstructor) { this.udfNames = udfNames; this.predicateConstructor = predicateConstructor; } public String[] getUDFNames() { return udfNames; } public boolean nameMatch(final String udfName) { for (int iName = 0; iName < udfNames.length; iName += 1) { if (Objects.equals(udfNames[iName], udfName)) { return true; } } return false; } public Supplier getPredicateConstructor() { return predicateConstructor; } } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udt/AbstractGeometryUDT.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql.udt; import org.apache.spark.sql.catalyst.InternalRow; import org.apache.spark.sql.catalyst.expressions.GenericInternalRow; import org.apache.spark.sql.types.DataType; import org.apache.spark.sql.types.DataTypes; import org.apache.spark.sql.types.Metadata; import org.apache.spark.sql.types.StructField; import org.apache.spark.sql.types.StructType; import org.apache.spark.sql.types.UserDefinedType; import org.locationtech.geowave.core.geotime.util.TWKBReader; import org.locationtech.geowave.core.geotime.util.TWKBWriter; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.io.ParseException; /** Created by jwileczek on 7/20/18. */ public abstract class AbstractGeometryUDT extends UserDefinedType { /** * */ private static final long serialVersionUID = 1L; @Override public DataType sqlType() { return new StructType( new StructField[] {new StructField("wkb", DataTypes.BinaryType, true, Metadata.empty())}); } @Override public String pyUDT() { return "geowave_pyspark.types." + this.getClass().getSimpleName(); } @Override public InternalRow serialize(final T obj) { final byte[] bytes = new TWKBWriter().write(obj); final InternalRow returnRow = new GenericInternalRow(bytes.length); returnRow.update(0, bytes); return returnRow; } @Override public T deserialize(final Object datum) { T geom = null; final InternalRow row = (InternalRow) datum; final byte[] bytes = row.getBinary(0); try { geom = (T) new TWKBReader().read(bytes); } catch (final ParseException e) { e.printStackTrace(); } return geom; } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udt/GeometryUDT.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql.udt; import org.apache.spark.sql.types.DataType; import org.locationtech.jts.geom.Geometry; /** Created by jwileczek on 7/20/18. */ public class GeometryUDT extends AbstractGeometryUDT { /** * */ private static final long serialVersionUID = 1L; @Override public boolean acceptsType(final DataType dataType) { return super.acceptsType(dataType) || (dataType.getClass() == GeometryUDT.class) || (dataType.getClass() == PointUDT.class) || (dataType.getClass() == LineStringUDT.class) || (dataType.getClass() == PolygonUDT.class) || (dataType.getClass() == MultiLineStringUDT.class) || (dataType.getClass() == MultiPointUDT.class) || (dataType.getClass() == MultiPolygonUDT.class); } @Override public Class userClass() { return Geometry.class; } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udt/LineStringUDT.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql.udt; import org.locationtech.jts.geom.LineString; /** Created by jwileczek on 7/20/18. */ public class LineStringUDT extends AbstractGeometryUDT { /** * */ private static final long serialVersionUID = 1L; @Override public Class userClass() { return LineString.class; } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udt/MultiLineStringUDT.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql.udt; import org.locationtech.jts.geom.MultiLineString; /** Created by jwileczek on 7/20/18. */ public class MultiLineStringUDT extends AbstractGeometryUDT { /** * */ private static final long serialVersionUID = 1L; @Override public Class userClass() { return MultiLineString.class; } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udt/MultiPointUDT.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql.udt; import org.locationtech.jts.geom.MultiPoint; /** Created by jwileczek on 7/20/18. */ public class MultiPointUDT extends AbstractGeometryUDT { /** * */ private static final long serialVersionUID = 1L; @Override public Class userClass() { return MultiPoint.class; } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udt/MultiPolygonUDT.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql.udt; import org.locationtech.jts.geom.MultiPolygon; /** Created by jwileczek on 7/20/18. */ public class MultiPolygonUDT extends AbstractGeometryUDT { /** * */ private static final long serialVersionUID = 1L; @Override public Class userClass() { return MultiPolygon.class; } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udt/PointUDT.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql.udt; import org.locationtech.jts.geom.Point; /** Created by jwileczek on 7/20/18. */ public class PointUDT extends AbstractGeometryUDT { /** * */ private static final long serialVersionUID = 1L; @Override public Class userClass() { return Point.class; } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/udt/PolygonUDT.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql.udt; import org.locationtech.jts.geom.Polygon; /** Created by jwileczek on 7/20/18. */ public class PolygonUDT extends AbstractGeometryUDT { /** * */ private static final long serialVersionUID = 1L; @Override public Class userClass() { return Polygon.class; } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/util/GeomReader.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql.util; import java.io.Serializable; import org.locationtech.jts.io.WKBReader; public class GeomReader extends WKBReader implements Serializable { /** * */ private static final long serialVersionUID = 1L; } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/util/GeomWriter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql.util; import java.io.Serializable; import org.locationtech.geowave.core.geotime.util.TWKBWriter; public class GeomWriter extends TWKBWriter implements Serializable { /** * */ private static final long serialVersionUID = 1L; } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/sparksql/util/SchemaConverter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.sparksql.util; import java.util.ArrayList; import java.util.Date; import java.util.List; import org.apache.spark.sql.types.DataType; import org.apache.spark.sql.types.DataTypes; import org.apache.spark.sql.types.StructField; import org.apache.spark.sql.types.StructType; import org.geotools.feature.AttributeTypeBuilder; import org.geotools.feature.simple.SimpleFeatureTypeBuilder; import org.geotools.feature.type.BasicFeatureTypes; import org.geotools.referencing.CRS; import org.locationtech.geowave.analytic.spark.sparksql.GeoWaveSpatialEncoders; import org.locationtech.geowave.analytic.spark.sparksql.SimpleFeatureDataType; import org.locationtech.jts.geom.Geometry; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.AttributeDescriptor; import org.opengis.referencing.FactoryException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class SchemaConverter { private static final Logger LOGGER = LoggerFactory.getLogger(SchemaConverter.class); public static SimpleFeatureType schemaToFeatureType( final StructType schema, final String typeName) { final SimpleFeatureTypeBuilder typeBuilder = new SimpleFeatureTypeBuilder(); typeBuilder.setName(typeName); typeBuilder.setNamespaceURI(BasicFeatureTypes.DEFAULT_NAMESPACE); try { typeBuilder.setCRS(CRS.decode("EPSG:4326", true)); } catch (final FactoryException e) { LOGGER.error(e.getMessage(), e); } final AttributeTypeBuilder attrBuilder = new AttributeTypeBuilder(); for (final StructField field : schema.fields()) { final AttributeDescriptor attrDesc = attrDescFromStructField(attrBuilder, field); typeBuilder.add(attrDesc); } return typeBuilder.buildFeatureType(); } private static AttributeDescriptor attrDescFromStructField( final AttributeTypeBuilder attrBuilder, final StructField field) { if (field.name().equals("geom")) { return attrBuilder.binding(Geometry.class).nillable(false).buildDescriptor("geom"); } if (field.dataType() == DataTypes.StringType) { return attrBuilder.binding(String.class).buildDescriptor(field.name()); } else if (field.dataType() == DataTypes.DoubleType) { return attrBuilder.binding(Double.class).buildDescriptor(field.name()); } else if (field.dataType() == DataTypes.FloatType) { return attrBuilder.binding(Float.class).buildDescriptor(field.name()); } else if (field.dataType() == DataTypes.LongType) { return attrBuilder.binding(Long.class).buildDescriptor(field.name()); } else if (field.dataType() == DataTypes.IntegerType) { return attrBuilder.binding(Integer.class).buildDescriptor(field.name()); } else if (field.dataType() == DataTypes.BooleanType) { return attrBuilder.binding(Boolean.class).buildDescriptor(field.name()); } else if (field.dataType() == DataTypes.TimestampType) { return attrBuilder.binding(Date.class).buildDescriptor(field.name()); } return null; } public static StructType schemaFromFeatureType(final SimpleFeatureType featureType) { final List fields = new ArrayList<>(); for (final AttributeDescriptor attrDesc : featureType.getAttributeDescriptors()) { final SimpleFeatureDataType sfDataType = attrDescToDataType(attrDesc); final String fieldName = (sfDataType.isGeom() ? "geom" : attrDesc.getName().getLocalPart()); final StructField field = DataTypes.createStructField(fieldName, sfDataType.getDataType(), true); fields.add(field); } if (fields.isEmpty()) { LOGGER.error("Feature type produced empty dataframe schema!"); return null; } return DataTypes.createStructType(fields); } private static SimpleFeatureDataType attrDescToDataType(final AttributeDescriptor attrDesc) { boolean isGeom = false; DataType dataTypeOut = DataTypes.NullType; if (attrDesc.getType().getBinding().equals(String.class)) { dataTypeOut = DataTypes.StringType; } else if (attrDesc.getType().getBinding().equals(Double.class)) { dataTypeOut = DataTypes.DoubleType; } else if (attrDesc.getType().getBinding().equals(Float.class)) { dataTypeOut = DataTypes.FloatType; } else if (attrDesc.getType().getBinding().equals(Long.class)) { dataTypeOut = DataTypes.LongType; } else if (attrDesc.getType().getBinding().equals(Integer.class)) { dataTypeOut = DataTypes.IntegerType; } else if (attrDesc.getType().getBinding().equals(Boolean.class)) { dataTypeOut = DataTypes.BooleanType; } else if (attrDesc.getType().getBinding().equals(Date.class)) { dataTypeOut = DataTypes.TimestampType; } // Custom geometry types get WKB encoding else if (Geometry.class.isAssignableFrom(attrDesc.getType().getBinding())) { dataTypeOut = GeoWaveSpatialEncoders.geometryUDT; isGeom = true; } return new SimpleFeatureDataType(dataTypeOut, isGeom); } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/spatial/JoinOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.spatial; import java.io.Serializable; public class JoinOptions implements Serializable { /** * */ private static final long serialVersionUID = 1L; public static enum BuildSide { LEFT, RIGHT; } private BuildSide joinBuildSide = BuildSide.LEFT; private boolean negativePredicate = false; public JoinOptions() {} public JoinOptions(final boolean negativeTest) { negativePredicate = negativeTest; } public boolean isNegativePredicate() { return negativePredicate; } public void setNegativePredicate(final boolean negativePredicate) { this.negativePredicate = negativePredicate; } public BuildSide getJoinBuildSide() { return joinBuildSide; } public void setJoinBuildSide(final BuildSide joinBuildSide) { this.joinBuildSide = joinBuildSide; } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/spatial/JoinStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.spatial; import org.locationtech.geowave.analytic.spark.GeoWaveRDD; public abstract class JoinStrategy implements SpatialJoin { /** * */ private static final long serialVersionUID = 1L; // Final joined pair RDDs protected GeoWaveRDD leftJoined = null; protected GeoWaveRDD rightJoined = null; protected JoinOptions joinOpts = new JoinOptions(); public GeoWaveRDD getLeftResults() { return leftJoined; } public void setLeftResults(final GeoWaveRDD leftJoined) { this.leftJoined = leftJoined; } public GeoWaveRDD getRightResults() { return rightJoined; } public void setRightResults(final GeoWaveRDD rightJoined) { this.rightJoined = rightJoined; } public JoinOptions getJoinOptions() { return joinOpts; } public void setJoinOptions(final JoinOptions joinOpts) { this.joinOpts = joinOpts; } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/spatial/SpatialJoin.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.spatial; import java.io.Serializable; import java.util.concurrent.ExecutionException; import org.apache.spark.sql.SparkSession; import org.locationtech.geowave.analytic.spark.GeoWaveIndexedRDD; import org.locationtech.geowave.analytic.spark.sparksql.udf.GeomFunction; import org.locationtech.geowave.core.index.NumericIndexStrategy; public interface SpatialJoin extends Serializable { void join( SparkSession spark, GeoWaveIndexedRDD leftRDD, GeoWaveIndexedRDD rightRDD, GeomFunction predicate) throws InterruptedException, ExecutionException; boolean supportsJoin(NumericIndexStrategy indexStrategy); NumericIndexStrategy createDefaultStrategy(NumericIndexStrategy indexStrategy); } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/spatial/SpatialJoinRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.spatial; import java.io.IOException; import java.io.Serializable; import java.net.URISyntaxException; import java.util.Arrays; import java.util.List; import java.util.Objects; import java.util.concurrent.ExecutionException; import org.apache.commons.io.FilenameUtils; import org.apache.spark.SparkConf; import org.apache.spark.SparkContext; import org.apache.spark.sql.SparkSession; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.adapter.vector.util.FeatureDataUtils; import org.locationtech.geowave.analytic.spark.GeoWaveIndexedRDD; import org.locationtech.geowave.analytic.spark.GeoWaveRDD; import org.locationtech.geowave.analytic.spark.GeoWaveRDDLoader; import org.locationtech.geowave.analytic.spark.GeoWaveSparkConf; import org.locationtech.geowave.analytic.spark.RDDOptions; import org.locationtech.geowave.analytic.spark.RDDUtils; import org.locationtech.geowave.analytic.spark.sparksql.udf.GeomFunction; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.QueryBuilder; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.index.IndexStore; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class SpatialJoinRunner implements Serializable { /** */ private static final long serialVersionUID = 1L; private static final Logger LOGGER = LoggerFactory.getLogger(SpatialJoinRunner.class); // Options provided by user to run join private SparkSession session = null; private transient SparkContext sc = null; private String appName = "SpatialJoinRunner"; private String master = "yarn"; private String host = "localhost"; private Integer partCount = -1; private transient DataStorePluginOptions leftStore = null; private String leftAdapterTypeName = null; private String outLeftAdapterTypeName = null; private transient DataStorePluginOptions rightStore = null; private String rightAdapterTypeName = null; private String outRightAdapterTypeName = null; private boolean negativeTest = false; private transient DataStorePluginOptions outputStore = null; private GeomFunction predicate = null; private transient NumericIndexStrategy indexStrategy = null; // Variables loaded during runner. This can be updated to something cleaner // like GeoWaveRDD in future // to support different situations (indexed vs non indexed etc..) but keep // it hidden in implementation details private GeoWaveIndexedRDD leftRDD = null; private GeoWaveIndexedRDD rightRDD = null; private transient InternalAdapterStore leftInternalAdapterStore; private transient InternalAdapterStore rightInternalAdapterStore; private transient IndexStore leftIndexStore; private transient IndexStore rightIndexStore; // TODO: Join strategy could be supplied as variable or determined // automatically from index store (would require associating index and join // strategy) // for now will just use TieredSpatialJoin as that is the only one we have // implemented. private final JoinStrategy joinStrategy = new TieredSpatialJoin(); public SpatialJoinRunner() {} public SpatialJoinRunner(final SparkSession session) { this.session = session; } public void run() throws InterruptedException, ExecutionException, IOException { leftInternalAdapterStore = leftStore.createInternalAdapterStore(); rightInternalAdapterStore = rightStore.createInternalAdapterStore(); leftIndexStore = leftStore.createIndexStore(); rightIndexStore = rightStore.createIndexStore(); // Init context initContext(); // Load RDDs loadDatasets(); // Verify CRS match/transform possible verifyCRS(); // Run join joinStrategy.getJoinOptions().setNegativePredicate(negativeTest); joinStrategy.join(session, leftRDD, rightRDD, predicate); writeResultsToNewAdapter(); } public void close() { if (session != null) { session.close(); session = null; } } private Index[] getIndicesForAdapter( final DataStorePluginOptions storeOptions, final String typeName, final InternalAdapterStore internalAdapterStore, final IndexStore indexStore) { return Arrays.stream( storeOptions.createAdapterIndexMappingStore().getIndicesForAdapter( internalAdapterStore.getAdapterId(typeName))).map( mapping -> mapping.getIndex(indexStore)).toArray(Index[]::new); } private FeatureDataAdapter createOutputAdapter( final DataStorePluginOptions originalOptions, final String originalTypeName, String outputTypeName) { if (outputTypeName == null) { outputTypeName = createDefaultAdapterTypeName(originalTypeName, originalOptions); } final FeatureDataAdapter newAdapter = FeatureDataUtils.cloneFeatureDataAdapter(originalOptions, originalTypeName, outputTypeName); return newAdapter; } private void writeResultsToNewAdapter() throws IOException { if (outputStore != null) { final Index[] leftIndices = getIndicesForAdapter( leftStore, leftAdapterTypeName, leftInternalAdapterStore, leftIndexStore); final FeatureDataAdapter newLeftAdapter = createOutputAdapter(leftStore, leftAdapterTypeName, outLeftAdapterTypeName); final Index[] rightIndices = getIndicesForAdapter( rightStore, rightAdapterTypeName, rightInternalAdapterStore, rightIndexStore); final FeatureDataAdapter newRightAdapter = createOutputAdapter(rightStore, rightAdapterTypeName, outRightAdapterTypeName); // Write each feature set to new adapter and store using original // indexing methods. RDDUtils.writeRDDToGeoWave(sc, leftIndices, outputStore, newLeftAdapter, getLeftResults()); RDDUtils.writeRDDToGeoWave(sc, rightIndices, outputStore, newRightAdapter, getRightResults()); } } private String createDefaultAdapterTypeName( final String typeName, final DataStorePluginOptions storeOptions) { final StringBuffer defaultAdapterName = new StringBuffer(typeName + "_joined"); final InternalAdapterStore adapterStore = storeOptions.createInternalAdapterStore(); if (adapterStore.getAdapterId(defaultAdapterName.toString()) == null) { return defaultAdapterName.toString(); } Integer iSuffix = 0; final StringBuffer uniNum = new StringBuffer("_" + String.format("%02d", iSuffix)); defaultAdapterName.append(uniNum); while (adapterStore.getAdapterId(defaultAdapterName.toString()) != null) { // Should be _00 _01 etc iSuffix += 1; uniNum.append("_").append(String.format("%02d", iSuffix)); defaultAdapterName.append(uniNum); } return defaultAdapterName.toString(); } private void initContext() { if (session == null) { String jar = ""; try { jar = SpatialJoinRunner.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath(); if (!FilenameUtils.isExtension(jar.toLowerCase(), "jar")) { jar = ""; } } catch (final URISyntaxException e) { LOGGER.error("Unable to set jar location in spark configuration", e); } SparkConf addonOptions = GeoWaveSparkConf.getDefaultConfig(); addonOptions = addonOptions.setAppName(appName).setMaster(master).set("spark.jars", jar); if (!Objects.equals(master, "yarn")) { addonOptions = addonOptions.set("spark.driver.host", host); } // Since default parallelism is normally set by spark-defaults only // set this to config if supplied by user if (partCount != -1) { addonOptions = addonOptions.set("spark.default.parallelism", partCount.toString()); } session = GeoWaveSparkConf.createDefaultSession(addonOptions); } sc = session.sparkContext(); } private GeoWaveIndexedRDD createRDDFromOptions( final DataStorePluginOptions storeOptions, String adapterTypeName, final InternalAdapterStore internalAdapterStore, final IndexStore indexStore) throws IOException { // If no adapterId provided by user grab first adapterId // available. if (adapterTypeName == null) { final List typeNames = FeatureDataUtils.getFeatureTypeNames(storeOptions); if (!typeNames.isEmpty()) { adapterTypeName = typeNames.get(0); } else { LOGGER.error("No valid adapter found in store to perform join."); return null; } } final RDDOptions rddOpts = new RDDOptions(); rddOpts.setQuery(QueryBuilder.newBuilder().addTypeName(adapterTypeName).build()); rddOpts.setMinSplits(partCount); rddOpts.setMaxSplits(partCount); NumericIndexStrategy rddStrategy = null; // Did the user provide a strategy for join? if (indexStrategy == null) { final Index[] rddIndices = getIndicesForAdapter(storeOptions, adapterTypeName, internalAdapterStore, indexStore); if (rddIndices.length > 0) { rddStrategy = rddIndices[0].getIndexStrategy(); } } else { rddStrategy = indexStrategy; } return GeoWaveRDDLoader.loadIndexedRDD(sc, storeOptions, rddOpts, rddStrategy); } private void loadDatasets() throws IOException { if (leftStore != null) { if (leftRDD == null) { leftRDD = createRDDFromOptions( leftStore, leftAdapterTypeName, leftInternalAdapterStore, leftIndexStore); } } if (rightStore != null) { if (rightRDD == null) { rightRDD = createRDDFromOptions( rightStore, rightAdapterTypeName, rightInternalAdapterStore, rightIndexStore); } } } private void verifyCRS() { // TODO: Verify that both stores have matching CRS or that one CRS can // be transformed into the other } // Accessors and Mutators public GeoWaveRDD getLeftResults() { return joinStrategy.getLeftResults(); } public GeoWaveRDD getRightResults() { return joinStrategy.getRightResults(); } public DataStorePluginOptions getLeftStore() { return leftStore; } public void setLeftStore(final DataStorePluginOptions leftStore) { this.leftStore = leftStore; } public String getLeftAdapterTypeName() { return leftAdapterTypeName; } public void setLeftAdapterTypeName(final String leftAdapterTypeName) { this.leftAdapterTypeName = leftAdapterTypeName; } public DataStorePluginOptions getRightStore() { return rightStore; } public void setRightStore(final DataStorePluginOptions rightStore) { this.rightStore = rightStore; } public String getRightAdapterTypeName() { return rightAdapterTypeName; } public void setRightAdapterTypeName(final String rightAdapterTypeName) { this.rightAdapterTypeName = rightAdapterTypeName; } public DataStorePluginOptions getOutputStore() { return outputStore; } public void setOutputStore(final DataStorePluginOptions outputStore) { this.outputStore = outputStore; } public GeomFunction getPredicate() { return predicate; } public void setPredicate(final GeomFunction predicate) { this.predicate = predicate; } public NumericIndexStrategy getIndexStrategy() { return indexStrategy; } public void setIndexStrategy(final NumericIndexStrategy indexStrategy) { this.indexStrategy = indexStrategy; } public String getAppName() { return appName; } public void setAppName(final String appName) { this.appName = appName; } public String getMaster() { return master; } public void setMaster(final String master) { this.master = master; } public String getHost() { return host; } public void setHost(final String host) { this.host = host; } public Integer getPartCount() { return partCount; } public void setPartCount(final Integer partCount) { this.partCount = partCount; } public void setSession(final SparkSession session) { this.session = session; } public String getOutputLeftAdapterTypeName() { return outLeftAdapterTypeName; } public void setOutputLeftAdapterTypeName(final String outLeftAdapterTypeName) { this.outLeftAdapterTypeName = outLeftAdapterTypeName; } public String getOutputRightAdapterTypeName() { return outRightAdapterTypeName; } public void setOutputRightAdapterTypeName(final String outRightAdapterTypeName) { this.outRightAdapterTypeName = outRightAdapterTypeName; } public void setLeftRDD(final GeoWaveIndexedRDD leftRDD) { this.leftRDD = leftRDD; } public void setRightRDD(final GeoWaveIndexedRDD rightRDD) { this.rightRDD = rightRDD; } public boolean isNegativeTest() { return negativeTest; } public void setNegativeTest(final boolean negativeTest) { this.negativeTest = negativeTest; } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/spatial/TieredSpatialJoin.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.spatial; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutionException; import org.apache.commons.lang.ArrayUtils; import org.apache.spark.HashPartitioner; import org.apache.spark.SparkContext; import org.apache.spark.api.java.JavaFutureAction; import org.apache.spark.api.java.JavaPairRDD; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.api.java.function.FlatMapFunction; import org.apache.spark.api.java.function.Function; import org.apache.spark.api.java.function.PairFlatMapFunction; import org.apache.spark.broadcast.Broadcast; import org.apache.spark.sql.SparkSession; import org.apache.spark.storage.StorageLevel; import org.locationtech.geowave.analytic.spark.GeoWaveIndexedRDD; import org.locationtech.geowave.analytic.spark.GeoWaveRDD; import org.locationtech.geowave.analytic.spark.RDDUtils; import org.locationtech.geowave.analytic.spark.sparksql.udf.GeomFunction; import org.locationtech.geowave.analytic.spark.spatial.JoinOptions.BuildSide; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialTemporalOptions; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.HierarchicalNumericIndexStrategy.SubStrategy; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.index.sfc.SFCFactory.SFCType; import org.locationtech.geowave.core.index.sfc.tiered.SingleTierSubStrategy; import org.locationtech.geowave.core.index.sfc.tiered.TieredSFCIndexFactory; import org.locationtech.geowave.core.index.sfc.tiered.TieredSFCIndexStrategy; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; import org.opengis.feature.simple.SimpleFeature; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import jersey.repackaged.com.google.common.collect.Maps; import scala.Tuple2; public class TieredSpatialJoin extends JoinStrategy { /** * */ private static final long serialVersionUID = 1L; private static final Logger LOGGER = LoggerFactory.getLogger(TieredSpatialJoin.class); // Combined matching pairs private JavaPairRDD combinedResults = null; private final List> tierMatches = Lists.newArrayList(); private double bufferDistance = 0.0; public TieredSpatialJoin() {} @Override public void join( final SparkSession spark, final GeoWaveIndexedRDD leftRDD, final GeoWaveIndexedRDD rightRDD, final GeomFunction predicate) throws InterruptedException, ExecutionException { // Get SparkContext from session final SparkContext sc = spark.sparkContext(); final JavaSparkContext javaSC = JavaSparkContext.fromSparkContext(sc); final NumericIndexStrategy leftStrategy = leftRDD.getIndexStrategy().getValue(); final NumericIndexStrategy rightStrategy = rightRDD.getIndexStrategy().getValue(); // Check if either dataset supports the join TieredSFCIndexStrategy tieredStrategy = null; // Determine if either strategy needs to be reindexed to support join algorithm boolean reindexLeft = false; boolean reindexRight = false; final boolean leftSupport = supportsJoin(leftStrategy); final boolean rightSupport = supportsJoin(rightStrategy); if (leftSupport && rightSupport) { if (leftStrategy.equals(rightStrategy)) { // Both strategies match we don't have to reindex tieredStrategy = (TieredSFCIndexStrategy) leftStrategy; } else { // Join build side determines what side we will build strategy off of when strategies // support but don't match if (getJoinOptions().getJoinBuildSide() == JoinOptions.BuildSide.LEFT) { reindexRight = true; tieredStrategy = (TieredSFCIndexStrategy) leftStrategy; } else { reindexLeft = true; tieredStrategy = (TieredSFCIndexStrategy) rightStrategy; } } } else if (leftSupport) { reindexRight = true; tieredStrategy = (TieredSFCIndexStrategy) leftStrategy; } else if (rightSupport) { reindexLeft = true; tieredStrategy = (TieredSFCIndexStrategy) rightStrategy; } else { tieredStrategy = (TieredSFCIndexStrategy) createDefaultStrategy(leftStrategy); if (tieredStrategy == null) { tieredStrategy = (TieredSFCIndexStrategy) createDefaultStrategy(rightStrategy); } if (tieredStrategy == null) { LOGGER.error( "Cannot create default strategy from either provided strategy. Datasets cannot be joined."); return; } reindexLeft = true; reindexRight = true; } // Pull information and broadcast strategy used for join final SubStrategy[] tierStrategies = tieredStrategy.getSubStrategies(); final int tierCount = tierStrategies.length; // Create broadcast variable for indexing strategy // Cast is safe because we must be instance of TieredSFCIndexStrategy to support join. final Broadcast broadcastStrategy = (Broadcast) RDDUtils.broadcastIndexStrategy(sc, tieredStrategy); final Broadcast geomPredicate = javaSC.broadcast(predicate); // If needed reindex one of the strategies we will wrap the buffer operation into the reindex // operation // Otherwise we buffer based off the buildside of the join. setBufferAmount(predicate.getBufferAmount()); // Reindex if necessary and get RDD of indexed Geometry JavaPairRDD> leftIndex = null; JavaPairRDD> rightIndex = null; if (reindexLeft && reindexRight) { leftRDD.reindex(broadcastStrategy); rightRDD.reindex(broadcastStrategy); } else if (reindexLeft) { leftRDD.reindex(broadcastStrategy); } else if (reindexRight) { rightRDD.reindex(broadcastStrategy); } if (joinOpts.getJoinBuildSide() == BuildSide.LEFT) { rightIndex = rightRDD.getIndexedGeometryRDD(bufferDistance, true); leftIndex = leftRDD.getIndexedGeometryRDD(); } else { leftIndex = leftRDD.getIndexedGeometryRDD(bufferDistance, true); rightIndex = rightRDD.getIndexedGeometryRDD(); } final int leftPartCount = leftIndex.getNumPartitions(); final int rightPartCount = rightIndex.getNumPartitions(); final int highestPartCount = (leftPartCount > rightPartCount) ? leftPartCount : rightPartCount; final int largePartitionerCount = (int) (1.5 * highestPartCount); final HashPartitioner partitioner = new HashPartitioner(largePartitionerCount); final JavaFutureAction> leftFuture = leftIndex.setName("LeftIndex").keys().map(t -> t.getBytes()[0]).distinct(4).collectAsync(); final JavaFutureAction> rightFuture = rightIndex.setName("RightIndex").keys().map(t -> t.getBytes()[0]).distinct( 4).collectAsync(); // Get the result of future final List rightDataTiers = Lists.newArrayList(rightFuture.get()); // Sort tiers highest to lowest and collect information. final Byte[] rightTierArr = rightDataTiers.toArray(new Byte[0]); Arrays.sort(rightTierArr); final int rightTierCount = rightTierArr.length; final List leftDataTiers = Lists.newArrayList(leftFuture.get()); final Byte[] leftTierArr = leftDataTiers.toArray(new Byte[0]); Arrays.sort(leftTierArr); final int leftTierCount = leftTierArr.length; // Determine if there are common higher tiers for whole dataset on either side. final byte highestLeftTier = leftTierArr[leftTierArr.length - 1]; final byte highestRightTier = rightTierArr[rightTierArr.length - 1]; // Find a common run of higher tiers Byte[] commonLeftTiers = ArrayUtils.EMPTY_BYTE_OBJECT_ARRAY; Byte[] commonRightTiers = ArrayUtils.EMPTY_BYTE_OBJECT_ARRAY; boolean skipMapCreate = false; if (leftTierArr[0] > highestRightTier) { // Whole left dataset is higher tiers than right commonLeftTiers = leftTierArr; skipMapCreate = true; } else if (rightTierArr[0] > highestLeftTier) { // Whole right dataset is higher tiers than left commonRightTiers = rightTierArr; skipMapCreate = true; } LOGGER.debug("Tier Count: " + tierCount); LOGGER.debug("Left Tier Count: " + leftTierCount + " Right Tier Count: " + rightTierCount); LOGGER.debug("Left Tiers: " + leftDataTiers); LOGGER.debug("Right Tiers: " + rightDataTiers); Map> rightReprojectMap = new HashMap<>(); Map> leftReprojectMap = new HashMap<>(); final HashSet sharedTiers = Sets.newHashSetWithExpectedSize(tierCount / 2); if (!skipMapCreate) { leftReprojectMap = createReprojectMap(leftTierArr, rightTierArr, sharedTiers); rightReprojectMap = createReprojectMap(rightTierArr, leftTierArr, sharedTiers); } JavaRDD> commonRightRDD = null; final boolean commonRightExist = commonRightTiers != ArrayUtils.EMPTY_BYTE_OBJECT_ARRAY; if (commonRightExist) { commonRightRDD = rightRDD.getGeoWaveRDD().getRawRDD().filter( t -> t._2.getDefaultGeometry() != null).mapValues( (Function) t -> { return (Geometry) t.getDefaultGeometry(); }).distinct(largePartitionerCount).rdd().toJavaRDD(); } JavaRDD> commonLeftRDD = null; final boolean commonLeftExist = commonLeftTiers != ArrayUtils.EMPTY_BYTE_OBJECT_ARRAY; if (commonLeftExist) { commonLeftRDD = leftRDD.getGeoWaveRDD().getRawRDD().filter( t -> t._2.getDefaultGeometry() != null).mapValues( (Function) t -> { return (Geometry) t.getDefaultGeometry(); }).distinct(largePartitionerCount).rdd().toJavaRDD(); } // Iterate through left tiers. Joining higher right and same level tiers for (final Byte leftTierId : leftDataTiers) { final HashSet higherRightTiers = leftReprojectMap.get(leftTierId); JavaPairRDD> leftTier = null; final boolean higherTiersExist = ((higherRightTiers != null) && !higherRightTiers.isEmpty()); final boolean sameTierExist = sharedTiers.contains(leftTierId); if (commonRightExist || higherTiersExist || sameTierExist) { leftTier = filterTier(leftIndex, leftTierId); } else { // No tiers to compare against this tier continue; } // Check for same tier existence on both sides and join without reprojection. if (sameTierExist) { final JavaPairRDD> rightTier = rightIndex.filter(t -> t._1().getBytes()[0] == leftTierId); final JavaPairRDD finalMatches = joinAndCompareTiers(leftTier, rightTier, geomPredicate, highestPartCount, partitioner); addMatches(finalMatches); } // Join against higher common tiers for this dataset JavaRDD> rightTiers = null; if (commonRightExist) { rightTiers = commonRightRDD; } else if (higherTiersExist) { final Broadcast> higherBroadcast = javaSC.broadcast(higherRightTiers); rightTiers = prepareForReproject( rightIndex.filter(t -> higherBroadcast.value().contains(t._1().getBytes()[0])), largePartitionerCount); } if (rightTiers != null) { final JavaPairRDD> reprojected = reprojectToTier( rightTiers, leftTierId, broadcastStrategy, getBufferAmount(BuildSide.RIGHT), partitioner); final JavaPairRDD finalMatches = joinAndCompareTiers( leftTier, reprojected, geomPredicate, highestPartCount, partitioner); addMatches(finalMatches); } } for (final Byte rightTierId : rightDataTiers) { final HashSet higherLeftTiers = rightReprojectMap.get(rightTierId); JavaPairRDD> rightTier = null; final boolean higherLeftExist = ((higherLeftTiers != null) && !higherLeftTiers.isEmpty()); if (commonLeftExist || higherLeftExist) { rightTier = rightIndex.filter(t -> t._1().getBytes()[0] == rightTierId); } else { // No tiers to compare against this tier continue; } JavaPairRDD finalMatches = null; JavaRDD> leftTiers = null; if (commonLeftExist) { leftTiers = commonLeftRDD; } else { final Broadcast> higherBroadcast = javaSC.broadcast(higherLeftTiers); leftTiers = prepareForReproject( leftIndex.filter(t -> higherBroadcast.value().contains(t._1.getBytes()[0])), largePartitionerCount); } final JavaPairRDD> reprojected = reprojectToTier( leftTiers, rightTierId, broadcastStrategy, getBufferAmount(BuildSide.LEFT), partitioner); finalMatches = joinAndCompareTiers(reprojected, rightTier, geomPredicate, highestPartCount, partitioner); addMatches(finalMatches); } // Remove duplicates between tiers combinedResults = javaSC.union( (JavaPairRDD[]) (ArrayUtils.add( tierMatches.toArray(new JavaPairRDD[tierMatches.size()]), combinedResults))); combinedResults = combinedResults.reduceByKey((id1, id2) -> id1); combinedResults = combinedResults.setName("CombinedJoinResults").persist(StorageLevel.MEMORY_ONLY_SER()); // Force evaluation of RDD at the join function call. // Otherwise it doesn't actually perform work until something is called // on left/right joined. // Wish there was a better way to force evaluation of rdd safely. // isEmpty() triggers take(1) which shouldn't involve a shuffle. combinedResults.isEmpty(); // Join against original dataset to give final joined rdds on each side, and cache results so we // don't recalculate if (getJoinOptions().isNegativePredicate()) { setLeftResults( new GeoWaveRDD( leftRDD.getGeoWaveRDD().getRawRDD().subtractByKey(combinedResults).cache())); setRightResults( new GeoWaveRDD( rightRDD.getGeoWaveRDD().getRawRDD().subtractByKey(combinedResults).cache())); } else { setLeftResults( new GeoWaveRDD( leftRDD.getGeoWaveRDD().getRawRDD().join(combinedResults).mapToPair( t -> new Tuple2<>(t._1(), t._2._1())).cache())); setRightResults( new GeoWaveRDD( rightRDD.getGeoWaveRDD().getRawRDD().join(combinedResults).mapToPair( t -> new Tuple2<>(t._1(), t._2._1())).cache())); } leftIndex.unpersist(); rightIndex.unpersist(); } private Map> createReprojectMap( final Byte[] buildSide, final Byte[] testSide, final HashSet sharedTiers) { final Map> resultMap = Maps.newHashMap(); final int testLastIndex = testSide.length; for (final Byte tierLeft : buildSide) { final int firstGreater = Arrays.binarySearch(testSide, tierLeft); if (firstGreater >= 0) { // Found in array sharedTiers.add(tierLeft); } final int insertionPoint = Math.abs(firstGreater); if (insertionPoint >= testLastIndex) { // Not present in array, and none greater than this value continue; } // There is at least one value greater than the current copy it and // add to map final HashSet higherTiers = Sets.newHashSet(Arrays.copyOfRange(testSide, insertionPoint, testLastIndex)); resultMap.put(tierLeft, higherTiers); } return resultMap; } private void setBufferAmount(final double bufferAmount) { bufferDistance = bufferAmount; } private double getBufferAmount(final BuildSide testSide) { return (joinOpts.getJoinBuildSide() != testSide) ? bufferDistance : 0.0; } @Override public boolean supportsJoin(final NumericIndexStrategy indexStrategy) { return (indexStrategy != null) && indexStrategy.getClass().isInstance(TieredSFCIndexStrategy.class); } @Override public NumericIndexStrategy createDefaultStrategy(final NumericIndexStrategy indexStrategy) { if (SpatialTemporalDimensionalityTypeProvider.isSpatialTemporal(indexStrategy)) { final SpatialTemporalOptions options = new SpatialTemporalOptions(); return TieredSFCIndexFactory.createFullIncrementalTieredStrategy( SpatialTemporalDimensionalityTypeProvider.SPATIAL_TEMPORAL_DIMENSIONS, new int[] { options.getBias().getSpatialPrecision(), options.getBias().getSpatialPrecision(), options.getBias().getTemporalPrecision()}, SFCType.HILBERT, options.getMaxDuplicates()); } else if (SpatialDimensionalityTypeProvider.isSpatial(indexStrategy)) { return TieredSFCIndexFactory.createFullIncrementalTieredStrategy( SpatialDimensionalityTypeProvider.SPATIAL_DIMENSIONS, new int[] { SpatialDimensionalityTypeProvider.LONGITUDE_BITS, SpatialDimensionalityTypeProvider.LATITUDE_BITS}, SFCType.HILBERT); } return null; } private void addMatches(final JavaPairRDD finalMatches) { if (combinedResults == null) { combinedResults = finalMatches; } else { tierMatches.add(finalMatches); } } private JavaPairRDD> filterTier( final JavaPairRDD> indexedRDD, final byte tierId) { return indexedRDD.filter(v1 -> v1._1().getBytes()[0] == tierId); } private JavaRDD> prepareForReproject( final JavaPairRDD> indexedRDD, final int numPartitions) { return indexedRDD.values().distinct(numPartitions); } private JavaPairRDD> reprojectToTier( final JavaRDD> higherTiers, final byte targetTierId, final Broadcast broadcastStrategy, final double bufferDistance, final HashPartitioner partitioner) { return higherTiers.flatMapToPair( (PairFlatMapFunction, ByteArray, Tuple2>) t -> { final TieredSFCIndexStrategy index = broadcastStrategy.value(); final SubStrategy[] strategies = index.getSubStrategies(); SingleTierSubStrategy useStrat = null; for (final SubStrategy strat : strategies) { final SingleTierSubStrategy tierStrat = (SingleTierSubStrategy) strat.getIndexStrategy(); if (targetTierId == tierStrat.tier) { useStrat = tierStrat; break; } } final Geometry geom = t._2; final Envelope internalEnvelope = geom.getEnvelopeInternal(); internalEnvelope.expandBy(bufferDistance); final MultiDimensionalNumericData boundsRange = GeometryUtils.getBoundsFromEnvelope(internalEnvelope); InsertionIds insertIds = useStrat.getInsertionIds(boundsRange, 80); if (bufferDistance == 0.0) { insertIds = RDDUtils.trimIndexIds(insertIds, geom, index); } final List>> reprojected = Lists.newArrayListWithCapacity(insertIds.getSize()); for (final byte[] id : insertIds.getCompositeInsertionIds()) { final Tuple2> indexPair = new Tuple2<>(new ByteArray(id), t); reprojected.add(indexPair); } return reprojected.iterator(); }).partitionBy(partitioner).persist(StorageLevel.MEMORY_AND_DISK_SER()); } private JavaPairRDD joinAndCompareTiers( final JavaPairRDD> leftTier, final JavaPairRDD> rightTier, final Broadcast geomPredicate, final int highestPartitionCount, final HashPartitioner partitioner) { // Cogroup groups on same tier ByteArrayId and pairs them into Iterable // sets. JavaPairRDD>, Iterable>>> joinedTiers = leftTier.cogroup(rightTier, partitioner); // Filter only the pairs that have data on both sides, bucket strategy // should have been accounted for by this point. // We need to go through the pairs and test each feature against each // other // End with a combined RDD for that tier. joinedTiers = joinedTiers.filter(t -> t._2._1.iterator().hasNext() && t._2._2.iterator().hasNext()); final JavaPairRDD finalMatches = joinedTiers.flatMapValues( (FlatMapFunction>, Iterable>>, GeoWaveInputKey>) t -> { final GeomFunction predicate = geomPredicate.value(); final HashSet results = Sets.newHashSet(); for (final Tuple2 leftTuple : t._1) { for (final Tuple2 rightTuple : t._2) { if (predicate.call(leftTuple._2, rightTuple._2)) { results.add(leftTuple._1); results.add(rightTuple._1); } } } return results.iterator(); }).mapToPair(Tuple2::swap).reduceByKey(partitioner, (id1, id2) -> id1).persist( StorageLevel.MEMORY_ONLY_SER()); return finalMatches; } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/spatial/operations/SpatialJoinCmdOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.spatial.operations; import com.beust.jcommander.Parameter; public class SpatialJoinCmdOptions { @Parameter(names = {"-n", "--name"}, description = "The spark application name") private String appName = "Spatial Join Spark"; @Parameter(names = {"-ho", "--host"}, description = "The spark driver host") private String host = "localhost"; @Parameter(names = {"-m", "--master"}, description = "The spark master designation") private String master = "yarn"; @Parameter( names = {"-pc", "--partCount",}, description = "The default partition count to set for Spark RDDs. Should be big enough to support largest RDD that will be used. Sets spark.default.parallelism") private Integer partCount = -1; @Parameter( names = {"-lt", "--leftTypeName"}, description = "Feature type name of left Store to use in join") private String leftAdapterTypeName = null; @Parameter( names = {"-ol", "--outLeftTypeName"}, description = "Feature type name of left join results.") private String outLeftAdapterTypeName = null; @Parameter( names = {"-rt", "--rightTypeName"}, description = "Feature type name of right Store to use in join") private String rightAdapterTypeName = null; @Parameter( names = {"-or", "--outRightTypeName"}, description = "Feature type name of right join results.") private String outRightAdapterTypeName = null; @Parameter( names = {"-p", "--predicate"}, description = "Name of the UDF function to use when performing Spatial Join") private String predicate = "GeomIntersects"; @Parameter( names = {"-r", "--radius",}, description = "Used for distance join predicate and other spatial operations that require a scalar radius.") private Double radius = 0.01; @Parameter( names = {"-not", "--negative",}, description = "Used for testing a negative result from geometry predicate. i.e GeomIntersects() == false") private boolean negativeTest = false; // TODO: Experiment with collecting + broadcasting rdds when one side can // fit into memory public SpatialJoinCmdOptions() {} public String getAppName() { return appName; } public void setAppName(final String appName) { this.appName = appName; } public String getHost() { return host; } public void setHost(final String host) { this.host = host; } public String getMaster() { return master; } public void setMaster(final String master) { this.master = master; } public Integer getPartCount() { return partCount; } public void setPartCount(final Integer partCount) { this.partCount = partCount; } public String getLeftAdapterTypeName() { return leftAdapterTypeName; } public void setLeftAdapterTypeName(final String leftAdapterTypeName) { this.leftAdapterTypeName = leftAdapterTypeName; } public String getRightAdapterTypeName() { return rightAdapterTypeName; } public void setRightAdapterTypeName(final String rightAdapterTypeName) { this.rightAdapterTypeName = rightAdapterTypeName; } public String getPredicate() { return predicate; } public void setPredicate(final String predicate) { this.predicate = predicate; } public Double getRadius() { return radius; } public void setRadius(final Double radius) { this.radius = radius; } public String getOutputLeftAdapterTypeName() { return outLeftAdapterTypeName; } public void setOutputLeftAdapterTypeName(final String outLeftAdapterTypeName) { this.outLeftAdapterTypeName = outLeftAdapterTypeName; } public String getOutputRightAdapterTypeName() { return outRightAdapterTypeName; } public void setOutputRightAdapterTypeName(final String outRightAdapterTypeName) { this.outRightAdapterTypeName = outRightAdapterTypeName; } public boolean isNegativeTest() { return negativeTest; } public void setNegativeTest(final boolean negativeTest) { this.negativeTest = negativeTest; } } ================================================ FILE: analytics/spark/src/main/java/org/locationtech/geowave/analytic/spark/spatial/operations/SpatialJoinCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.analytic.spark.spatial.operations; import java.io.File; import java.util.ArrayList; import java.util.List; import org.locationtech.geowave.analytic.PropertyManagement; import org.locationtech.geowave.analytic.mapreduce.operations.AnalyticSection; import org.locationtech.geowave.analytic.mapreduce.operations.options.PropertyManagementConverter; import org.locationtech.geowave.analytic.param.StoreParameters; import org.locationtech.geowave.analytic.spark.sparksql.udf.GeomFunction; import org.locationtech.geowave.analytic.spark.sparksql.udf.GeomWithinDistance; import org.locationtech.geowave.analytic.spark.sparksql.udf.UDFRegistrySPI; import org.locationtech.geowave.analytic.spark.sparksql.udf.UDFRegistrySPI.UDFNameAndConstructor; import org.locationtech.geowave.analytic.spark.spatial.SpatialJoinRunner; import org.locationtech.geowave.analytic.store.PersistableStore; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import com.beust.jcommander.ParametersDelegate; @GeowaveOperation(name = "spatialjoin", parentOperation = AnalyticSection.class) @Parameters(commandDescription = "Spatial join using Spark ") public class SpatialJoinCommand extends ServiceEnabledCommand { @Parameter(description = " ") private List parameters = new ArrayList<>(); @ParametersDelegate private SpatialJoinCmdOptions spatialJoinOptions = new SpatialJoinCmdOptions(); DataStorePluginOptions leftDataStore = null; DataStorePluginOptions rightDataStore = null; DataStorePluginOptions outputDataStore = null; public void setParameters(final List parameters) { this.parameters = parameters; } @Override public void execute(final OperationParams params) throws Exception { // Ensure we have all the required arguments if (parameters.size() != 3) { throw new ParameterException( "Requires arguments: "); } computeResults(params); } public void setSpatialJoinOptions(final SpatialJoinCmdOptions spatialJoinOptions) { this.spatialJoinOptions = spatialJoinOptions; } @Override public Void computeResults(final OperationParams params) throws Exception { final String leftStoreName = parameters.get(0); final String rightStoreName = parameters.get(1); final String outputStoreName = parameters.get(2); // Config file final File configFile = getGeoWaveConfigFile(params); // Attempt to load stores. if (leftDataStore == null) { leftDataStore = CLIUtils.loadStore(leftStoreName, configFile, params.getConsole()); } if (rightDataStore == null) { rightDataStore = CLIUtils.loadStore(rightStoreName, configFile, params.getConsole()); } if (outputDataStore == null) { outputDataStore = CLIUtils.loadStore(outputStoreName, configFile, params.getConsole()); } // Save a reference to the output store in the property management. final PersistableStore persistedStore = new PersistableStore(outputDataStore); final PropertyManagement properties = new PropertyManagement(); properties.store(StoreParameters.StoreParam.OUTPUT_STORE, persistedStore); // Convert properties from DBScanOptions and CommonOptions final PropertyManagementConverter converter = new PropertyManagementConverter(properties); converter.readProperties(spatialJoinOptions); // TODO: Create GeomPredicate function from name final UDFNameAndConstructor udfFunc = UDFRegistrySPI.findFunctionByName(spatialJoinOptions.getPredicate()); if (udfFunc == null) { throw new ParameterException( "UDF function matching " + spatialJoinOptions.getPredicate() + " not found."); } final GeomFunction predicate = udfFunc.getPredicateConstructor().get(); // Special case for distance function since it takes a scalar radius. if (predicate instanceof GeomWithinDistance) { ((GeomWithinDistance) predicate).setRadius(spatialJoinOptions.getRadius()); } final SpatialJoinRunner runner = new SpatialJoinRunner(); runner.setAppName(spatialJoinOptions.getAppName()); runner.setMaster(spatialJoinOptions.getMaster()); runner.setHost(spatialJoinOptions.getHost()); runner.setPartCount(spatialJoinOptions.getPartCount()); runner.setPredicate(predicate); // set DataStore options for runner runner.setLeftStore(leftDataStore); if (spatialJoinOptions.getLeftAdapterTypeName() != null) { runner.setLeftAdapterTypeName(spatialJoinOptions.getLeftAdapterTypeName()); } runner.setRightStore(rightDataStore); if (spatialJoinOptions.getRightAdapterTypeName() != null) { runner.setRightAdapterTypeName(spatialJoinOptions.getRightAdapterTypeName()); } runner.setOutputStore(outputDataStore); if (spatialJoinOptions.getOutputLeftAdapterTypeName() != null) { runner.setOutputLeftAdapterTypeName(spatialJoinOptions.getOutputLeftAdapterTypeName()); } if (spatialJoinOptions.getOutputRightAdapterTypeName() != null) { runner.setOutputRightAdapterTypeName(spatialJoinOptions.getOutputRightAdapterTypeName()); } runner.setNegativeTest(spatialJoinOptions.isNegativeTest()); // Finally call run to execute the join runner.run(); runner.close(); return null; } } ================================================ FILE: analytics/spark/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi ================================================ org.locationtech.geowave.analytic.spark.AnalyticOperationCLIProvider ================================================ FILE: core/cli/pom.xml ================================================ 4.0.0 org.locationtech.geowave geowave-core-parent ../ 2.0.2-SNAPSHOT geowave-core-cli GeoWave CLI Command Line Interface for GeoWave Tools 1.7 org.apache.commons commons-lang3 commons-codec commons-codec commons-io commons-io com.beust jcommander org.javassist javassist 3.20.0-GA net.sf.json-lib json-lib jdk15 org.bouncycastle bcprov-jdk15on ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/Constants.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ /** */ package org.locationtech.geowave.core.cli; /** */ public interface Constants { /** * Name of the GeoWave Descriptions Bundle for storing descriptions that override the CLI * descriptions */ public static final String GEOWAVE_DESCRIPTIONS_BUNDLE_NAME = "GeoWaveLabels"; /** Properties file key denoting if a console echo is enabled by default */ /* * HP Fortify "Use of Hard-coded Credentials - Key Management: Hardcoded Encryption Key" false * positive This is not an encryption key, just a configuration flag that denotes if encryption * should be enabled in the source. */ public static final String CONSOLE_DEFAULT_ECHO_ENABLED_KEY = "geowave.console.default.echo.enabled"; /** Properties file key denoting if a console echo is enabled for passwords */ /* * HP Fortify "Use of Hard-coded Password - Password Management: Hardcoded Password" false * positive This is not a hard-coded password, just a configuration flag related to passwords, to * enable or disable passwords being echoed on the CLI when a user is entering their password */ public static final String CONSOLE_PASSWORD_ECHO_ENABLED_KEY = "geowave.console.password.echo.enabled"; /** Properties file key denoting if encryption is enabled for passwords */ public static final String ENCRYPTION_ENABLED_KEY = "geowave.encryption.enabled"; /** * Default setting for encryption turned on. Currently defaults to disabled. Must be a boolean * string. */ public static final String ENCRYPTION_ENABLED_DEFAULT = Boolean.TRUE.toString(); } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/GeoWaveMain.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli; import org.locationtech.geowave.core.cli.api.Command; import org.locationtech.geowave.core.cli.api.Operation; import org.locationtech.geowave.core.cli.operations.ExplainCommand; import org.locationtech.geowave.core.cli.operations.GeoWaveTopLevelSection; import org.locationtech.geowave.core.cli.operations.HelpCommand; import org.locationtech.geowave.core.cli.parser.CommandLineOperationParams; import org.locationtech.geowave.core.cli.parser.OperationParser; import org.locationtech.geowave.core.cli.spi.OperationEntry; import org.locationtech.geowave.core.cli.spi.OperationRegistry; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This is the primary entry point for command line tools. When run it will expect an operation is * specified, and will use the appropriate command-line driver for the chosen operation. */ public class GeoWaveMain { private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveMain.class); public static void main(final String[] args) { // Take an initial stab at running geowave with the given arguments. final OperationParser parser = new OperationParser(prepRegistry()); final CommandLineOperationParams params = parser.parse(GeoWaveTopLevelSection.class, args); // Run the command if no issue. // successCode == 1 means that prepare returned false // successCode == 0 means that everything went find // successCode == -1 means that something errored. if (params.getSuccessCode() == 0) { run(params); } // Now that successCode has been updated by run(), // assess it. // Log error to console if any. if (params.getSuccessCode() < 0) { doHelp(params); LOGGER.debug(params.getSuccessMessage(), params.getSuccessException()); params.getCommander().getConsole().println("\n" + params.getSuccessMessage()); } else if ((params.getSuccessCode() == 0) && !params.isCommandPresent()) { doHelp(params); } System.exit(params.getSuccessCode()); } /** * Run the operations contained in CommandLineOperationParams. * * @param params */ private static void run(final CommandLineOperationParams params) { // Execute the command for (final Operation operation : params.getOperationMap().values()) { if (operation instanceof Command) { try { ((Command) operation).execute(params); } catch (final Exception p) { LOGGER.warn("Unable to execute operation", p); params.setSuccessCode(-1); params.setSuccessMessage( String.format("Unable to execute operation: %s", p.getMessage())); params.setSuccessException(p); } // Only execute the first command. break; } } } /** * This adds the help and explain commands to have all operations as children, so the user can do * 'help command' or 'explain command' * * @return */ private static OperationRegistry prepRegistry() { final OperationRegistry registry = OperationRegistry.getInstance(); final OperationEntry explainCommand = registry.getOperation(ExplainCommand.class); final OperationEntry helpCommand = registry.getOperation(HelpCommand.class); final OperationEntry topLevel = registry.getOperation(GeoWaveTopLevelSection.class); // Special processing for "HelpSection". This special section will be // added as a child to // top level, and will have all the same children as top level. for (final OperationEntry entry : topLevel.getChildren()) { if ((entry != helpCommand) && (entry != explainCommand)) { helpCommand.addChild(entry); explainCommand.addChild(entry); } } return registry; } /** This function will show options for the given operation/section. */ private static void doHelp(final CommandLineOperationParams params) { final HelpCommand command = new HelpCommand(); command.execute(params); } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/VersionUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli; import java.io.IOException; import java.io.InputStream; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Properties; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.internal.Console; public class VersionUtils { private static final Logger LOGGER = LoggerFactory.getLogger(VersionUtils.class); private static final String BUILD_PROPERTIES_FILE_NAME = "build.properties"; private static final String VERSION_PROPERTY_KEY = "project.version"; public static Properties getBuildProperties(final Console console) { final Properties props = new Properties(); try (InputStream stream = VersionUtils.class.getClassLoader().getResourceAsStream(BUILD_PROPERTIES_FILE_NAME);) { if (stream != null) { props.load(stream); } return props; } catch (final IOException e) { LOGGER.warn("Cannot read GeoWave build properties to show version information", e); if (console != null) { console.println( "Cannot read GeoWave build properties to show version information: " + e.getMessage()); } } return props; } public static String getVersion() { return getVersion(null); } public static String getVersion(final Console console) { return getBuildProperties(console).getProperty(VERSION_PROPERTY_KEY); } public static List getVersionInfo() { return getVersionInfo(null); } public static List getVersionInfo(final Console console) { final List buildAndPropertyList = Arrays.asList(getBuildProperties(console).toString().split(",")); Collections.sort(buildAndPropertyList.subList(1, buildAndPropertyList.size())); return buildAndPropertyList; } public static String asLineDelimitedString(final List value) { final StringBuilder str = new StringBuilder(); for (final String v : value) { str.append(v).append('\n'); } return str.toString(); } public static void printVersionInfo(final Console console) { final List buildAndPropertyList = getVersionInfo(console); for (final String str : buildAndPropertyList) { console.println(str); } } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/annotations/GeowaveOperation.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.annotations; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @Retention(RetentionPolicy.RUNTIME) @Target({ElementType.TYPE}) public @interface GeowaveOperation { String[] name(); Class parentOperation() default Object.class; } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/annotations/PrefixParameter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.annotations; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @Retention(RetentionPolicy.RUNTIME) @Target({ElementType.FIELD, ElementType.METHOD}) public @interface PrefixParameter { String prefix(); } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/api/Command.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.api; /** * An operation may choose to implement Command, which will then lead to the 'execute' method being * called during the execute() phase. */ public interface Command extends Operation { /** * Execute the command, and return whether we want to continue execution */ public void execute(OperationParams params) throws Exception; } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/api/DefaultOperation.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.api; import java.io.File; import java.io.IOException; import java.util.Iterator; import java.util.Properties; import java.util.ServiceLoader; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.cli.operations.config.security.crypto.BaseEncryption; import org.locationtech.geowave.core.cli.operations.config.security.utils.SecurityUtils; import org.locationtech.geowave.core.cli.spi.DefaultConfigProviderSpi; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.ParameterException; /** * The default operation prevents implementors from having to implement the 'prepare' function, if * they don't want to. */ public abstract class DefaultOperation implements Operation { private static final Logger sLog = LoggerFactory.getLogger(DefaultOperation.class); private File geowaveDirectory = null; private File geowaveConfigFile = null; private File securityTokenFile = null; @Override public boolean prepare(final OperationParams params) throws ParameterException { try { checkForGeoWaveDirectory(params); } catch (final Exception e) { throw new ParameterException( "Error occurred during preparing phase: " + e.getLocalizedMessage(), e); } return true; } /** * Check if encryption token exists. If not, create one initially This method must assume the * config file is set and just names the token file ${configfile}.key */ private void checkForToken() { final File tokenFile = SecurityUtils.getFormattedTokenKeyFileForConfig(geowaveConfigFile); if ((tokenFile == null) || !tokenFile.exists()) { generateNewEncryptionToken(tokenFile); } setSecurityTokenFile(tokenFile); } /** * Ensure that a geowave home directory exists at ~/.geowave. This is where encryption token file * will be stored. This method will attempt to load the config options from the given config file. * If it can't find it, it will try to create it. It will then set the contextual variables * 'properties' and 'properties-file', which can be used by commands to overwrite/update the * properties. * * @param params * @throws Exception */ private void checkForGeoWaveDirectory(final OperationParams params) throws Exception { setGeoWaveConfigFile(getGeoWaveConfigFile(params)); if (getGeoWaveConfigFile(params) == null) { // if file does not exist setGeoWaveConfigFile(ConfigOptions.getDefaultPropertyFile(params.getConsole())); setDefaultConfigProperties(params); } setGeowaveDirectory(getGeoWaveConfigFile(params).getParentFile()); if (!getGeoWaveDirectory().exists()) { try { final boolean created = getGeoWaveDirectory().mkdir(); if (!created) { sLog.error("An error occurred creating a user '.geowave' in home directory"); } } catch (final Exception e) { sLog.error( "An error occurred creating a user '.geowave' in home directory: " + e.getLocalizedMessage(), e); throw new ParameterException(e); } } if (!getGeoWaveConfigFile(params).exists()) { // config file does not exist, attempt to create it. try { if (!getGeoWaveConfigFile(params).createNewFile()) { throw new Exception( "Could not create property cache file: " + getGeoWaveConfigFile(params)); } } catch (final IOException e) { sLog.error("Could not create property cache file: " + getGeoWaveConfigFile(params), e); throw new ParameterException(e); } setDefaultConfigProperties(params); } checkForToken(); } /** * Generate a new token value in a specified file. * * @param tokenFile * @return {@code true} if the encryption tocken was successfully generated */ protected boolean generateNewEncryptionToken(final File tokenFile) { try { return BaseEncryption.generateNewEncryptionToken(tokenFile); } catch (final Exception ex) { sLog.error( "An error occurred writing new encryption token to file: " + ex.getLocalizedMessage(), ex); } return false; } /** @return the securityTokenFile */ public File getSecurityTokenFile() { return securityTokenFile; } /** @param securityTokenFile the securityTokenFile to set */ public void setSecurityTokenFile(final File securityTokenFile) { this.securityTokenFile = securityTokenFile; } /** @return the geowaveDirectory */ public File getGeoWaveDirectory() { return geowaveDirectory; } /** @param geowaveDirectory the geowaveDirectory to set */ private void setGeowaveDirectory(final File geowaveDirectory) { this.geowaveDirectory = geowaveDirectory; } /** @return the geowaveConfigFile */ public File getGeoWaveConfigFile(final OperationParams params) { if (getGeoWaveConfigFile() == null) { setGeoWaveConfigFile((File) params.getContext().get(ConfigOptions.PROPERTIES_FILE_CONTEXT)); } return getGeoWaveConfigFile(); } public File getGeoWaveConfigFile() { return geowaveConfigFile; } /** @param geowaveConfigFile the geowaveConfigFile to set */ private void setGeoWaveConfigFile(final File geowaveConfigFile) { this.geowaveConfigFile = geowaveConfigFile; } public Properties getGeoWaveConfigProperties(final OperationParams params, final String filter) { return ConfigOptions.loadProperties(getGeoWaveConfigFile(params), filter); } public Properties getGeoWaveConfigProperties(final OperationParams params) { return getGeoWaveConfigProperties(params, null); } public Properties getGeoWaveConfigProperties() { return ConfigOptions.loadProperties(getGeoWaveConfigFile()); } /** Uses SPI to find all projects that have defaults to add to the config-properties file */ private void setDefaultConfigProperties(final OperationParams params) { final Properties defaultProperties = new Properties(); final Iterator defaultPropertiesProviders = ServiceLoader.load(DefaultConfigProviderSpi.class).iterator(); while (defaultPropertiesProviders.hasNext()) { final DefaultConfigProviderSpi defaultPropertiesProvider = defaultPropertiesProviders.next(); defaultProperties.putAll(defaultPropertiesProvider.getDefaultConfig()); } ConfigOptions.writeProperties(getGeoWaveConfigFile(), defaultProperties, params.getConsole()); } @Override public String usage() { return null; } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/api/DefaultPluginOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.api; import java.util.Properties; import org.locationtech.geowave.core.cli.prefix.JCommanderPropertiesTransformer; /** * This class has some default implementations for the PluginOptions interface, such as saving and * loading plugin options. */ public abstract class DefaultPluginOptions { public static final String OPTS = "opts"; public static final String TYPE = "type"; /** * This is implemented by the PluginOptions interface by child classes * * @param qualifier */ public abstract void selectPlugin(String qualifier); /** * This is implemented by the PluginOptions interface by child classes * * @return the plugin type */ public abstract String getType(); /** * Transform to properties, making all option values live in the "opts" namespace. */ public void save(final Properties properties, final String namespace) { final JCommanderPropertiesTransformer jcpt = new JCommanderPropertiesTransformer(String.format("%s.%s", namespace, OPTS)); jcpt.addObject(this); jcpt.transformToProperties(properties); // Add the entry for the type property. final String typeProperty = String.format("%s.%s", namespace, TYPE); properties.setProperty(typeProperty, getType()); } /** * Transform from properties, reading values that live in the "opts" namespace. */ public boolean load(final Properties properties, final String namespace) { // Get the qualifier. final String typeProperty = String.format("%s.%s", namespace, TYPE); final String typeValue = properties.getProperty(typeProperty); if (typeValue == null) { return false; } if (getType() == null) { selectPlugin(typeValue); } final JCommanderPropertiesTransformer jcpt = new JCommanderPropertiesTransformer(String.format("%s.%s", namespace, OPTS)); jcpt.addObject(this); jcpt.transformFromProperties(properties); return true; } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/api/Operation.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.api; /** * An operation in GeoWave is something that can be prepared() and executed(). The prepare() * function will look at parameters and based on their values, set @ParametersDelegate classes which * can soak up more parameters. Then, the parameters are parsed again before being fed into the * execute() command, if the operation also implements Command. */ public interface Operation { /** * NOTE: ONLY USE THIS METHOD TO SET @PARAMETERSDELEGATE options. If you throw exceptions or do * validation, then it will make help/explain commands not work correctly. */ boolean prepare(OperationParams params); /** * Method to allow commands the option to override the default usage from jcommander where all the * fields are printed out in alphabetical order. Some classes may want to put the basic/required * fields first, with optional fields at the bottom, or however other custom usage's would be * necessary.

If method returns null, the default usage from jcommander is used */ String usage(); } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/api/OperationParams.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.api; import java.util.Map; import com.beust.jcommander.internal.Console; /** * This arguments are used to allow sections and commands to modify how arguments are parsed during * prepare / execution stage. */ public interface OperationParams { /** * @return Operations that were parsed & instantiated for execution. */ Map getOperationMap(); /** * @return Key value pairs for contextual information during command parsing. */ Map getContext(); /** * Get the console to print commandline messages * * @return the console */ Console getConsole(); } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/api/PluginOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.api; import java.util.Properties; /** All plugins must provide this interface */ public interface PluginOptions { public String getType(); public void selectPlugin(String qualifier); public void save(Properties properties, String namespace); public boolean load(Properties properties, String namespace); } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/api/ServiceEnabledCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.api; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; public abstract class ServiceEnabledCommand extends DefaultOperation implements Command { protected String path = null; public abstract T computeResults(OperationParams params) throws Exception; /** * this method provides a hint to the service running the command whether it should be run * asynchronously or not * * @return should this method be run asynchronously */ public boolean runAsync() { return false; } /** * the method to expose as a resource * * @return the HTTP method */ public HttpMethod getMethod() { final String path = getPath(); if (path.contains("get") || path.contains("list")) { return HttpMethod.GET; } return HttpMethod.POST; } /** * Get the status code to return if execution was success. * *

By default: POST -> 201 OTHER -> 200 * *

Should be overridden in subclasses as needed (i.e., for a POST that does not create * anything). * * @return The potential status if REST call is successful. */ public Boolean successStatusIs200() { switch (getMethod()) { case POST: return false; default: return true; } } /** * get the path to expose as a resource * * @return the path (use {param} for path encoded params) */ public String getPath() { if (path == null) { path = defaultGetPath(); } return path.replace("geowave", "v0"); } public String getId() { return defaultId(); } /** * this is for ease if a class wants to merely override the final portion of a resource name and * not the entire path * * @return the final portion of a resource name */ protected String getName() { return null; } private String defaultId() { // TODO this is used by swagger and it may determine layout but its // uncertain if (getClass().isAnnotationPresent(GeowaveOperation.class)) { final GeowaveOperation op = getClass().getAnnotation(GeowaveOperation.class); return op.parentOperation().getName() + "." + op.name()[0]; } else if ((getName() != null) && !getName().trim().isEmpty()) { return getName(); } return getClass().getTypeName(); } private String defaultGetPath() { final Class operation = getClass(); if (operation.isAnnotationPresent(GeowaveOperation.class)) { return pathFor(operation, getName()).substring(1); } else if ((getName() != null) && !getName().trim().isEmpty()) { return getName(); } return operation.getTypeName(); } /** * Get the path for a command based on the operation hierarchy Return the path as a string in the * format "/first/next/next" * * @param operation - the operation to find the path for * @return the formatted path as a string */ private static String pathFor(final Class operation, final String resourcePathOverride) { // Top level of hierarchy if (operation == Object.class) { return ""; } final GeowaveOperation operationInfo = operation.getAnnotation(GeowaveOperation.class); return pathFor(operationInfo.parentOperation(), null) + "/" + resolveName(operationInfo.name()[0], resourcePathOverride); } private static String resolveName(final String operationName, final String resourcePathOverride) { if ((resourcePathOverride == null) || resourcePathOverride.trim().isEmpty()) { return operationName; } return resourcePathOverride; } public static enum HttpMethod { GET, POST, PUT, PATCH, DELETE } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/api/ServiceStatus.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.api; public enum ServiceStatus { OK, NOT_FOUND, DUPLICATE, INTERNAL_ERROR } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/converters/GeoWaveBaseConverter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ /** */ package org.locationtech.geowave.core.cli.converters; import com.beust.jcommander.converters.BaseConverter; import com.beust.jcommander.internal.Console; import com.beust.jcommander.internal.DefaultConsole; import com.beust.jcommander.internal.JDK6Console; import org.locationtech.geowave.core.cli.Constants; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.cli.utils.PropertiesUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.lang.reflect.Method; import java.util.Properties; /** * Base value converter for handling field conversions of varying types * * @param */ public abstract class GeoWaveBaseConverter extends BaseConverter { private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveBaseConverter.class); private String propertyKey; private static Console console; private static Properties properties; public GeoWaveBaseConverter() { super(""); init(); } public GeoWaveBaseConverter(final String optionName) { super(optionName); init(); } private void init() { File propertyFile = null; if (new ConfigOptions().getConfigFile() != null) { propertyFile = new File(new ConfigOptions().getConfigFile()); } else { propertyFile = ConfigOptions.getDefaultPropertyFile(getConsole()); } if ((propertyFile != null) && propertyFile.exists()) { setProperties(ConfigOptions.loadProperties(propertyFile)); } } protected static Console getConsole() { if (console == null) { try { Method consoleMethod = System.class.getDeclaredMethod("console"); Object systemConsole = consoleMethod.invoke(null); if (systemConsole == null) { console = new DefaultConsole(); } else { console = new JDK6Console(systemConsole); } } catch (Throwable t) { console = new DefaultConsole(); } } return console; } /** * Prompt a user for a standard value and return the input. * * @param promptMessage the prompt message * @return the value that was read */ public static String promptAndReadValue(final String promptMessage) { LOGGER.trace("ENTER :: promptAndReadValue()"); final PropertiesUtils propsUtils = new PropertiesUtils(getProperties()); final boolean defaultEchoEnabled = propsUtils.getBoolean(Constants.CONSOLE_DEFAULT_ECHO_ENABLED_KEY, false); LOGGER.debug( "Default console echo is {}", new Object[] {defaultEchoEnabled ? "enabled" : "disabled"}); getConsole().print(promptMessage); char[] responseChars = getConsole().readPassword(defaultEchoEnabled); final String response = new String(responseChars); responseChars = null; return response; } /** * Prompt a user for a password and return the input. * * @param promptMessage the prompt message * @return the value that was read */ public static String promptAndReadPassword(final String promptMessage) { LOGGER.trace("ENTER :: promptAndReadPassword()"); final PropertiesUtils propsUtils = new PropertiesUtils(getProperties()); final boolean defaultEchoEnabled = propsUtils.getBoolean(Constants.CONSOLE_DEFAULT_ECHO_ENABLED_KEY, false); final boolean passwordEchoEnabled = propsUtils.getBoolean(Constants.CONSOLE_PASSWORD_ECHO_ENABLED_KEY, defaultEchoEnabled); LOGGER.debug( "Password console echo is {}", new Object[] {passwordEchoEnabled ? "enabled" : "disabled"}); getConsole().print(promptMessage); char[] passwordChars = getConsole().readPassword(passwordEchoEnabled); final String strPassword = new String(passwordChars); passwordChars = null; return strPassword; } /** @return the propertyKey */ public String getPropertyKey() { return propertyKey; } /** @param propertyKey the propertyKey to set */ public void setPropertyKey(final String propertyKey) { this.propertyKey = propertyKey; } /** * Specify if a converter is for a password field. This allows a password field to be specified, * though side-stepping most of the default jcommander password functionality. * * @return {@code true} if the converter is for a password field */ public boolean isPassword() { return false; } /** * Specify if a field is required. * * @return {@code true} if the field is required */ public boolean isRequired() { return false; } /** @return the properties */ private static Properties getProperties() { return properties; } /** @param properties the properties to set */ private void setProperties(final Properties props) { properties = props; } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/converters/OptionalPasswordConverter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ /** */ package org.locationtech.geowave.core.cli.converters; /** * Extends the password converter class to force required=false * *

This class will allow support for user's passing in passwords through a variety of ways. * Current supported options for passwords include standard password input (pass), an environment * variable (env), a file containing the password text (file), a properties file containing the * password associated with a specific key (propfile), and the user being prompted to enter the * password at command line (stdin).

Required notation for specifying varying inputs are: * *

  • pass:<password>
  • env:<variable containing the password> *
  • file:<local file containing the password>
  • propfile:<local * properties file containing the password>:<property file key>
  • stdin *
*/ public class OptionalPasswordConverter extends PasswordConverter { public OptionalPasswordConverter() { this(""); } public OptionalPasswordConverter(final String optionName) { super(optionName); } @Override public String convert(final String value) { return super.convert(value); } @Override public boolean isPassword() { return true; } @Override public boolean isRequired() { return false; } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/converters/PasswordConverter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ /** */ package org.locationtech.geowave.core.cli.converters; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.util.Properties; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.cli.utils.FileUtils; import org.locationtech.geowave.core.cli.utils.PropertiesUtils; import com.beust.jcommander.ParameterException; /** * This class will allow support for user's passing in passwords through a variety of ways. Current * supported options for passwords include standard password input (pass), an environment variable * (env), a file containing the password text (file), a properties file containing the password * associated with a specific key (propfile), and the user being prompted to enter the password at * command line (stdin).

Required notation for specifying varying inputs are: * *

  • pass:<password>
  • env:<variable containing the password> *
  • file:<local file containing the password>
  • propfile:<local * properties file containing the password>:<property file key>
  • stdin *
*/ public class PasswordConverter extends GeoWaveBaseConverter { public PasswordConverter(final String optionName) { super(optionName); } /* * HP Fortify "Use of Hard-coded Password - Password Management: Hardcoded Password" false * positive This is not a hard-coded password, just a description telling users options they have * for entering a password */ public static final String DEFAULT_PASSWORD_DESCRIPTION = "Can be specified as 'pass:', 'file:', " + "'propfile::', 'env:', or stdin"; public static final String STDIN = "stdin"; private static final String SEPARATOR = ":"; private enum KeyType { PASS("pass" + SEPARATOR) { @Override String process(final String password) { return password; } }, ENV("env" + SEPARATOR) { @Override String process(final String envVariable) { return System.getenv(envVariable); } }, FILE("file" + SEPARATOR) { @Override String process(final String value) { try { final String password = FileUtils.readFileContent(new File(value)); if ((password != null) && !"".equals(password.trim())) { return password; } } catch (final Exception ex) { throw new ParameterException(ex); } return null; } }, PROPFILE("propfile" + SEPARATOR) { @Override String process(final String value) { if ((value != null) && !"".equals(value.trim())) { if (value.indexOf(SEPARATOR) != -1) { String propertyFilePath = value.split(SEPARATOR)[0]; String propertyKey = value.split(SEPARATOR)[1]; if ((propertyFilePath != null) && !"".equals(propertyFilePath.trim())) { propertyFilePath = propertyFilePath.trim(); final File propsFile = new File(propertyFilePath); if ((propsFile != null) && propsFile.exists()) { final Properties properties = PropertiesUtils.fromFile(propsFile); if ((propertyKey != null) && !"".equals(propertyKey.trim())) { propertyKey = propertyKey.trim(); } if ((properties != null) && properties.containsKey(propertyKey)) { return properties.getProperty(propertyKey); } } else { try { throw new ParameterException( new FileNotFoundException( propsFile != null ? "Properties file not found at path: " + propsFile.getCanonicalPath() : "No properties file specified")); } catch (final IOException e) { throw new ParameterException(e); } } } else { throw new ParameterException("No properties file path specified"); } } else { throw new ParameterException( "Property File values are expected in input format ::"); } } else { throw new ParameterException(new Exception("No properties file specified")); } return value; } }, STDIN(PasswordConverter.STDIN) { private String input = null; @Override public boolean matches(final String value) { return prefix.equals(value); } @Override String process(final String value) { if (input == null) { input = promptAndReadPassword("Enter password: "); } return input; } }, DEFAULT("") { @Override String process(final String password) { return password; } }; String prefix; private KeyType(final String prefix) { this.prefix = prefix; } public boolean matches(final String value) { return value.startsWith(prefix); } public String convert(final String value) { return process(value.substring(prefix.length())); } String process(final String value) { return value; } } @Override public String convert(final String value) { for (final KeyType keyType : KeyType.values()) { if (keyType.matches(value)) { return keyType.convert(value); } } return value; } @Override public boolean isPassword() { return true; } @Override public boolean isRequired() { return true; } protected Properties getGeoWaveConfigProperties() { final File geowaveConfigPropsFile = getGeoWaveConfigFile(); return ConfigOptions.loadProperties(geowaveConfigPropsFile); } protected File getGeoWaveConfigFile() { return ConfigOptions.getDefaultPropertyFile(getConsole()); } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/converters/RequiredFieldConverter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ /** */ package org.locationtech.geowave.core.cli.converters; /** * This converter does nothing other than ensure that a required field is setup. Using this - over * the standard JCommander 'required=true' - allows a user to be prompted for the field, rather than * always throwing an error (i.e. a more gracious way of reporting the error) */ public class RequiredFieldConverter extends GeoWaveBaseConverter { public RequiredFieldConverter(final String optionName) { super(optionName); } @Override public String convert(final String value) { return value; } @Override public boolean isRequired() { return true; } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/exceptions/DuplicateEntryException.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.exceptions; public class DuplicateEntryException extends Exception { /** * */ private static final long serialVersionUID = 1L; public DuplicateEntryException() { super(); } public DuplicateEntryException(final String message) { super(message); } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/exceptions/TargetNotFoundException.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.exceptions; public class TargetNotFoundException extends Exception { /** * */ private static final long serialVersionUID = 1L; public TargetNotFoundException() { super(); } public TargetNotFoundException(final String message) { super(message); } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/operations/ExplainCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.operations; import java.util.List; import java.util.SortedMap; import java.util.TreeMap; import org.apache.commons.lang3.StringUtils; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.Command; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.parser.CommandLineOperationParams; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.JCommander; import com.beust.jcommander.ParameterDescription; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "explain", parentOperation = GeoWaveTopLevelSection.class) @Parameters( commandDescription = "See what arguments are missing and " + "what values will be used for GeoWave commands") public class ExplainCommand extends DefaultOperation implements Command { private static Logger LOGGER = LoggerFactory.getLogger(ExplainCommand.class); @Override public boolean prepare(final OperationParams inputParams) { super.prepare(inputParams); final CommandLineOperationParams params = (CommandLineOperationParams) inputParams; params.setValidate(false); params.setAllowUnknown(true); // Prepared successfully. return true; } @Override public void execute(final OperationParams inputParams) { final CommandLineOperationParams params = (CommandLineOperationParams) inputParams; final StringBuilder builder = new StringBuilder(); // Sort first String nextCommand = "geowave"; JCommander commander = params.getCommander(); while (commander != null) { if ((commander.getParameters() != null) && (commander.getParameters().size() > 0)) { builder.append("Command: "); builder.append(nextCommand); builder.append(" [options]"); if (commander.getParsedCommand() != null) { builder.append(" ..."); } builder.append("\n\n"); builder.append(explainCommander(commander)); builder.append("\n"); } else if (commander.getMainParameter() != null) { builder.append("Command: "); builder.append(nextCommand); if (commander.getParsedCommand() != null) { builder.append(" ..."); } builder.append("\n\n"); builder.append(explainMainParameter(commander)); builder.append("\n"); } nextCommand = commander.getParsedCommand(); commander = commander.getCommands().get(nextCommand); } params.getConsole().println(builder.toString().trim()); } /** * This function will explain the currently selected values for a JCommander. * * @param commander */ public static StringBuilder explainCommander(final JCommander commander) { final StringBuilder builder = new StringBuilder(); builder.append(" "); builder.append(String.format("%1$20s", "VALUE")); builder.append(" "); builder.append("NEEDED "); builder.append(String.format("%1$-40s", "PARAMETER NAMES")); builder.append("\n"); builder.append("----------------------------------------------\n"); // Sort first final SortedMap parameterDescs = new TreeMap<>(); final List parameters = commander.getParameters(); for (final ParameterDescription pd : parameters) { parameterDescs.put(pd.getLongestName(), pd); } // Then output for (final ParameterDescription pd : parameterDescs.values()) { Object value = null; try { // value = tEntry.getParam().get(tEntry.getObject()); value = pd.getParameterized().get(pd.getObject()); } catch (final Exception e) { LOGGER.warn("Unable to set value", e); } boolean required = false; if (pd.getParameterized().getParameter() != null) { required = pd.getParameterized().getParameter().required(); } else if (pd.isDynamicParameter()) { required = pd.getParameter().getDynamicParameter().required(); } final String names = pd.getNames(); final boolean assigned = pd.isAssigned(); // Data we have: // required, assigned, value, names. builder.append("{"); if (value == null) { value = ""; } builder.append(String.format("%1$20s", value)); builder.append("} "); if (required && !assigned) { builder.append("MISSING "); } else { builder.append(" "); } builder.append(String.format("%1$-40s", StringUtils.join(names, ","))); builder.append("\n"); } if (commander.getMainParameter() != null) { builder.append("\n"); builder.append(explainMainParameter(commander)); } return builder; } /** * Output details about the main parameter, if there is one. * * @return the explanation for the main parameter */ @SuppressWarnings("unchecked") public static StringBuilder explainMainParameter(final JCommander commander) { final StringBuilder builder = new StringBuilder(); final ParameterDescription mainParameter = commander.getMainParameterValue(); // Output the main parameter. if (mainParameter != null) { if ((mainParameter.getDescription() != null) && (mainParameter.getDescription().length() > 0)) { builder.append("Expects: "); builder.append(mainParameter.getDescription()); builder.append("\n"); } final boolean assigned = mainParameter.isAssigned(); builder.append("Specified: "); final List mP = (List) mainParameter.getParameterized().get(mainParameter.getObject()); if (!assigned || (mP.size() == 0)) { builder.append(""); } else { builder.append(String.format("%n%s", StringUtils.join(mP, " "))); } builder.append("\n"); } return builder; } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/operations/GeoWaveTopLevelSection.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.operations; import org.apache.logging.log4j.core.appender.ConsoleAppender; import org.apache.logging.log4j.core.config.Configuration; import org.apache.logging.log4j.core.config.Configurator; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.core.Logger; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.core.layout.PatternLayout; import org.apache.logging.log4j.core.layout.PatternLayout.Builder; import org.locationtech.geowave.core.cli.VersionUtils; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import com.beust.jcommander.Parameter; import com.beust.jcommander.Parameters; import com.beust.jcommander.ParametersDelegate; @GeowaveOperation(name = "geowave") @Parameters(commandDescription = "This is the top level section.") public class GeoWaveTopLevelSection extends DefaultOperation { @Parameter(names = "--debug", description = "Verbose output") private Boolean verboseFlag; @Parameter(names = "--version", description = "Output Geowave build version information") private Boolean versionFlag; // This contains methods and parameters for determining where the GeoWave // cached configuration file is. @ParametersDelegate private final ConfigOptions options = new ConfigOptions(); @Override public boolean prepare(final OperationParams inputParams) { // This will load the properties file parameter into the // operation params. options.prepare(inputParams); super.prepare(inputParams); // Up the log level if (Boolean.TRUE.equals(verboseFlag)) { Configurator.setRootLevel(Level.DEBUG); PatternLayout patternLayout = PatternLayout.newBuilder().withPattern("%d{dd MMM HH:mm:ss} %p [%c{2}] - %m%n").build(); PatternLayout.createDefaultLayout(); ConsoleAppender consoleApp = ConsoleAppender.createDefaultAppenderForLayout(patternLayout); ((Logger) LogManager.getRootLogger()).addAppender(consoleApp); } // Print out the version info if requested. if (Boolean.TRUE.equals(versionFlag)) { VersionUtils.printVersionInfo(inputParams.getConsole()); // Do not continue return false; } // Successfully prepared return true; } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/operations/HelpCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.operations; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Map; import org.apache.commons.lang3.StringUtils; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.Command; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.api.Operation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.parser.CommandLineOperationParams; import org.locationtech.geowave.core.cli.prefix.JCommanderPrefixTranslator; import org.locationtech.geowave.core.cli.prefix.JCommanderTranslationMap; import org.locationtech.geowave.core.cli.spi.OperationEntry; import org.locationtech.geowave.core.cli.spi.OperationRegistry; import com.beust.jcommander.JCommander; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "help", parentOperation = GeoWaveTopLevelSection.class) @Parameters(commandDescription = "Get descriptions of arguments for any GeoWave command") public class HelpCommand extends DefaultOperation implements Command { @Override public boolean prepare(final OperationParams inputParams) { super.prepare(inputParams); final CommandLineOperationParams params = (CommandLineOperationParams) inputParams; params.setValidate(false); params.setAllowUnknown(true); // Prepared successfully. return true; } @Override public void execute(final OperationParams inputParams) { final CommandLineOperationParams params = (CommandLineOperationParams) inputParams; final List nameArray = new ArrayList<>(); final OperationRegistry registry = OperationRegistry.getInstance(); StringBuilder builder = new StringBuilder(); Operation lastOperation = null; for (final Map.Entry entry : params.getOperationMap().entrySet()) { if (entry.getValue() == this) { continue; } nameArray.add(entry.getKey()); lastOperation = entry.getValue(); } if (lastOperation == null) { lastOperation = registry.getOperation(GeoWaveTopLevelSection.class).createInstance(); } if (lastOperation != null) { final String usage = lastOperation.usage(); if (usage != null) { System.out.println(usage); } else { // This is done because if we don't, then JCommander will // consider the given parameters as the Default parameters. // It's also done so that we can parse prefix annotations // and special delegate processing. final JCommanderPrefixTranslator translator = new JCommanderPrefixTranslator(); translator.addObject(lastOperation); final JCommanderTranslationMap map = translator.translate(); map.createFacadeObjects(); // Copy default parameters over for help display. map.transformToFacade(); // Execute a prepare // Add processed objects final JCommander jc = new JCommander(); for (final Object obj : map.getObjects()) { jc.addObject(obj); } final String programName = StringUtils.join(nameArray, " "); jc.setProgramName(programName); jc.getUsageFormatter().usage(builder); // Trim excess newlines. final String operations = builder.toString().trim(); builder = new StringBuilder(); builder.append(operations); builder.append("\n\n"); // Add sub-commands final OperationEntry lastEntry = registry.getOperation(lastOperation.getClass()); // Cast to list so we can sort it based on operation name. final List children = new ArrayList<>(lastEntry.getChildren()); Collections.sort(children, getOperationComparator()); if (children.size() > 0) { builder.append(" Commands:\n"); for (final OperationEntry childEntry : children) { // Get description annotation final Parameters p = childEntry.getOperationClass().getAnnotation(Parameters.class); // If not hidden, then output it. if ((p == null) || !p.hidden()) { builder.append( String.format( " %s%n", StringUtils.join(childEntry.getOperationNames(), ", "))); if (p != null) { final String description = p.commandDescription(); builder.append(String.format(" %s%n", description)); } else { builder.append(" \n"); } builder.append("\n"); } } } // Trim excess newlines. final String output = builder.toString().trim(); System.out.println(output); } } } /** * This will sort operations based on their name. Just looks prettier on output. * * @return */ private Comparator getOperationComparator() { return new Comparator() { @Override public int compare(final OperationEntry o1, final OperationEntry o2) { return o1.getOperationNames()[0].compareTo(o2.getOperationNames()[0]); } }; } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/operations/TopLevelOperationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.operations; import org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi; public class TopLevelOperationProvider implements CLIOperationProviderSpi { private static final Class[] BASE_OPERATIONS = new Class[] {GeoWaveTopLevelSection.class, ExplainCommand.class, HelpCommand.class}; @Override public Class[] getOperations() { return BASE_OPERATIONS; } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/operations/config/ConfigOperationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.operations.config; import org.locationtech.geowave.core.cli.operations.config.security.NewTokenCommand; import org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi; public class ConfigOperationProvider implements CLIOperationProviderSpi { private static final Class[] OPERATIONS = new Class[] { ConfigSection.class, ListCommand.class, SetCommand.class, NewTokenCommand.class}; @Override public Class[] getOperations() { return OPERATIONS; } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/operations/config/ConfigSection.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.operations.config; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.operations.GeoWaveTopLevelSection; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "config", parentOperation = GeoWaveTopLevelSection.class) @Parameters(commandDescription = "Commands that affect local configuration only") public class ConfigSection extends DefaultOperation { } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/operations/config/ListCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.operations.config; import java.io.File; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; import java.util.SortedMap; import java.util.TreeMap; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import com.beust.jcommander.JCommander; import com.beust.jcommander.Parameter; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "list", parentOperation = ConfigSection.class) @Parameters(commandDescription = "List GeoWave configuration properties") public class ListCommand extends ServiceEnabledCommand> { @Parameter(names = {"-f", "--filter"}) private String filter; @Override public void execute(final OperationParams params) { final Pair> list = getProperties(params); final String name = list.getKey(); params.getConsole().println("PROPERTIES (" + name + ")"); final SortedMap properties = list.getValue(); for (final Entry e : properties.entrySet()) { params.getConsole().println(e.getKey() + ": " + e.getValue()); } } @Override public SortedMap computeResults(final OperationParams params) { return getProperties(params).getValue(); } private Pair> getProperties(final OperationParams params) { final File f = getGeoWaveConfigFile(params); // Reload options with filter if specified. Properties p = null; if (filter != null) { p = ConfigOptions.loadProperties(f, filter); } else { p = ConfigOptions.loadProperties(f); } return new ImmutablePair<>(f.getName(), new GeoWaveConfig(p)); } protected static class GeoWaveConfig extends TreeMap { private static final long serialVersionUID = 1L; public GeoWaveConfig() { super(); } public GeoWaveConfig(final Map m) { super(m); } } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/operations/config/SetCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.operations.config; import java.io.File; import java.util.ArrayList; import java.util.List; import java.util.Properties; import org.apache.commons.lang3.StringUtils; import org.locationtech.geowave.core.cli.Constants; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.cli.converters.PasswordConverter; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.cli.operations.config.security.utils.SecurityUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "set", parentOperation = ConfigSection.class) @Parameters(commandDescription = "Set GeoWave configuration property directly") public class SetCommand extends ServiceEnabledCommand { /** Return "200 OK" for the set command. */ @Override public Boolean successStatusIs200() { return true; } private static final Logger LOGGER = LoggerFactory.getLogger(SetCommand.class); @Parameter(description = " ") private List parameters = new ArrayList<>(); @Parameter( names = {"--password"}, description = "Specify if the value being set is a password and should be encrypted in the configurations") private Boolean password = false; private boolean isRestCall = true; @Override public void execute(final OperationParams params) { isRestCall = false; computeResults(params); } /** * Add rest endpoint for the set command. Looks for GET params with keys 'key' and 'value' to set. * * @return string containing json with details of success or failure of the set */ @Override public Object computeResults(final OperationParams params) { return setKeyValue(params); } /** Set the key value pair in the config. Store the previous value of the key in prevValue */ private Object setKeyValue(final OperationParams params) { final File f = getGeoWaveConfigFile(params); final Properties p = ConfigOptions.loadProperties(f); String key = null; String value = null; final PasswordConverter converter = new PasswordConverter(null); if ((parameters.size() == 1) && (parameters.get(0).indexOf("=") != -1)) { final String[] parts = StringUtils.split(parameters.get(0), "="); key = parts[0]; if (!isRestCall && password) { value = converter.convert(parts[1]); } else { value = parts[1]; } } else if (parameters.size() == 2) { key = parameters.get(0); if (!isRestCall && password) { value = converter.convert(parameters.get(1)); } else { value = parameters.get(1); } } else { throw new ParameterException("Requires: "); } if (password) { // check if encryption is enabled in configuration if (Boolean.parseBoolean( p.getProperty(Constants.ENCRYPTION_ENABLED_KEY, Constants.ENCRYPTION_ENABLED_DEFAULT))) { try { final File tokenFile = SecurityUtils.getFormattedTokenKeyFileForConfig(getGeoWaveConfigFile()); value = SecurityUtils.encryptAndHexEncodeValue( value, tokenFile.getAbsolutePath(), params.getConsole()); LOGGER.debug("Value was successfully encrypted"); } catch (final Exception e) { LOGGER.error( "An error occurred encrypting the specified value: " + e.getLocalizedMessage(), e); } } else { LOGGER.debug( "Value was set as a password, though encryption is currently disabled, so value was not encrypted. " + "Please enable encryption and re-try.\n" + "Note: To enable encryption, run the following command: geowave config set {}=true", Constants.ENCRYPTION_ENABLED_KEY); } } final Object previousValue = p.setProperty(key, value); if (!ConfigOptions.writeProperties(f, p, params.getConsole())) { throw new WritePropertiesException("Write failure"); } else { return previousValue; } } public List getParameters() { return parameters; } public void setParameters(final String key, final String value) { parameters = new ArrayList<>(); parameters.add(key); parameters.add(value); } private static class WritePropertiesException extends RuntimeException { /** */ private static final long serialVersionUID = 1L; private WritePropertiesException(final String string) { super(string); } } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/operations/config/options/ConfigOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.operations.config.options; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FilenameFilter; import java.io.IOException; import java.io.InputStream; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.lang.annotation.Annotation; import java.lang.reflect.Field; import java.util.Collections; import java.util.Enumeration; import java.util.Properties; import java.util.Scanner; import java.util.Set; import java.util.TreeSet; import java.util.regex.Pattern; import org.locationtech.geowave.core.cli.Constants; import org.locationtech.geowave.core.cli.VersionUtils; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.operations.config.security.utils.SecurityUtils; import org.locationtech.geowave.core.cli.utils.JCommanderParameterUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.Parameter; import com.beust.jcommander.internal.Console; /** * Config options allows the user to override the default location for configuration options, and * also allows commands to load the properties needed for running the program. */ public class ConfigOptions { public static final String CHARSET = "ISO-8859-1"; private static final Logger LOGGER = LoggerFactory.getLogger(ConfigOptions.class); public static final String PROPERTIES_FILE_CONTEXT = "properties-file"; public static final String GEOWAVE_CACHE_PATH = ".geowave"; public static final String GEOWAVE_CACHE_FILE = "config.properties"; /** Allow the user to override the config file location */ @Parameter( names = {"-cf", "--config-file"}, description = "Override configuration file (default is /.geowave/config.properties)") private String configFile; public ConfigOptions() {} public String getConfigFile() { return configFile; } public void setConfigFile(final String configFilePath) { configFile = configFilePath; } /** * The default property file is in the user's home directory, in the .geowave folder. * * @return a property file in the user's home directory */ public static File getDefaultPropertyPath() { // File location // HP Fortify "Path Manipulation" false positive // What Fortify considers "user input" comes only // from users with OS-level access anyway final String cachePath = String.format( "%s%s%s", System.getProperty("user.home"), File.separator, GEOWAVE_CACHE_PATH); return new File(cachePath); } /** * The default property file is in the user's home directory, in the .geowave folder. If the * version can not be found the first available property file in the folder is used. * * @return the default property file */ public static File getDefaultPropertyFile() { return getDefaultPropertyFile(null); } /** * The default property file is in the user's home directory, in the .geowave folder. If the * version can not be found the first available property file in the folder is used. * * @param console console to print output to * * @return the default property file */ public static File getDefaultPropertyFile(final Console console) { // HP Fortify "Path Manipulation" false positive // What Fortify considers "user input" comes only // from users with OS-level access anyway final File defaultPath = getDefaultPropertyPath(); final String version = VersionUtils.getVersion(console); if (version != null) { return formatConfigFile(version, defaultPath); } else { final String[] configFiles = defaultPath.list(new FilenameFilter() { @Override public boolean accept(final File dir, final String name) { return name.endsWith("-config.properties"); } }); if ((configFiles != null) && (configFiles.length > 0)) { final String backupVersion = configFiles[0].substring(0, configFiles[0].length() - 18); return formatConfigFile(backupVersion, defaultPath); } else { return formatConfigFile("unknownversion", defaultPath); } } } /** * Configures a File based on a given path name and version * * @param version * @param defaultPath * @return Configured File */ public static File formatConfigFile(final String version, final File defaultPath) { // HP Fortify "Path Manipulation" false positive // What Fortify considers "user input" comes only // from users with OS-level access anyway final String configFile = String.format( "%s%s%s%s%s", defaultPath.getAbsolutePath(), File.separator, version, "-", GEOWAVE_CACHE_FILE); return new File(configFile); } public static boolean writeProperties( final File configFile, final Properties properties, final Class clazz, final String namespacePrefix, final Console console) { try { final Properties tmp = new Properties() { private static final long serialVersionUID = 1L; @Override public Set keySet() { return Collections.unmodifiableSet(new TreeSet<>(super.keySet())); } @Override public synchronized Enumeration keys() { return Collections.enumeration(new TreeSet<>(super.keySet())); } }; // check if encryption is enabled - it is by default and would need // to be explicitly disabled if (Boolean.parseBoolean( properties.getProperty( Constants.ENCRYPTION_ENABLED_KEY, Constants.ENCRYPTION_ENABLED_DEFAULT))) { // check if any values exist that need to be encrypted before // written to properties if (clazz != null) { final Field[] fields = clazz.getDeclaredFields(); for (final Field field : fields) { for (final Annotation annotation : field.getAnnotations()) { if (annotation.annotationType() == Parameter.class) { final Parameter parameter = (Parameter) annotation; if (JCommanderParameterUtils.isPassword(parameter)) { final String storeFieldName = ((namespacePrefix != null) && !"".equals(namespacePrefix.trim())) ? namespacePrefix + "." + field.getName() : field.getName(); if (properties.containsKey(storeFieldName)) { final String value = properties.getProperty(storeFieldName); String encryptedValue = value; try { final File tokenFile = SecurityUtils.getFormattedTokenKeyFileForConfig(configFile); encryptedValue = SecurityUtils.encryptAndHexEncodeValue( value, tokenFile.getAbsolutePath(), console); } catch (final Exception e) { LOGGER.error( "An error occurred encrypting specified password value: " + e.getLocalizedMessage(), e); encryptedValue = value; } properties.setProperty(storeFieldName, encryptedValue); } } } } } } } tmp.putAll(properties); try (FileOutputStream str = new FileOutputStream(configFile)) { tmp.store( // HPFortify FP: passwords are stored encrypted str, null); } } catch (final FileNotFoundException e) { LOGGER.error("Could not find the property file.", e); return false; } catch (final IOException e) { LOGGER.error("Exception writing property file.", e); return false; } return true; } /** * Write the given properties to the file, and log an error if an exception occurs. * * @return true if success, false if failure */ public static boolean writeProperties( final File configFile, final Properties properties, final Console console) { return writeProperties(configFile, properties, null, null, console); } /** * This helper function will load the properties file, or return null if it can't. It's designed * to be used by other commands. */ public static Properties loadProperties(final File configFile) { return loadProperties(configFile, null); } /** * This helper function will load the properties file, or return null if it can't. It's designed * to be used by other commands. */ public static Properties loadProperties(final File configFile, final String pattern) { // Load the properties file. final Properties properties = new Properties(); if (configFile.exists()) { Pattern p = null; if (pattern != null) { p = Pattern.compile(pattern); } InputStream is = null; try { if (p != null) { try (FileInputStream input = new FileInputStream(configFile); Scanner s = new Scanner(input, CHARSET)) { final ByteArrayOutputStream out = new ByteArrayOutputStream(); final PrintWriter writer = new PrintWriter(new OutputStreamWriter(out, CHARSET)); while (s.hasNext()) { final String line = s.nextLine(); if (p.matcher(line).find()) { writer.println(line); } } writer.flush(); is = new ByteArrayInputStream(out.toByteArray()); } } else { is = new FileInputStream(configFile); } properties.load(is); } catch (final IOException e) { LOGGER.error("Could not find property cache file: " + configFile, e); return null; } finally { if (is != null) { try { is.close(); } catch (final IOException e) { LOGGER.error(e.getMessage(), e); } } } } return properties; } /** * Load the properties file into the input params. * * @param inputParams */ public void prepare(final OperationParams inputParams) { File propertyFile = null; if (getConfigFile() != null) { propertyFile = new File(getConfigFile()); } else { propertyFile = getDefaultPropertyFile(inputParams.getConsole()); } // Set the properties on the context. inputParams.getContext().put(PROPERTIES_FILE_CONTEXT, propertyFile); } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/operations/config/security/NewTokenCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.operations.config.security; import java.io.File; import java.util.Iterator; import java.util.Properties; import java.util.Set; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.Command; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.operations.config.ConfigSection; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.cli.operations.config.security.crypto.BaseEncryption; import org.locationtech.geowave.core.cli.operations.config.security.utils.SecurityUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "newcryptokey", parentOperation = ConfigSection.class) @Parameters( commandDescription = "Generate a new security cryptography key for use with configuration properties") public class NewTokenCommand extends DefaultOperation implements Command { private static final Logger sLog = LoggerFactory.getLogger(NewTokenCommand.class); @Override public void execute(final OperationParams params) { sLog.trace("ENTER :: execute"); final File geowaveDir = getGeoWaveDirectory(); if ((geowaveDir != null) && geowaveDir.exists()) { final File tokenFile = getSecurityTokenFile(); // if token already exists, iterate through config props file and // re-encrypt any encrypted values against the new token if ((tokenFile != null) && tokenFile.exists()) { try { sLog.info( "Existing encryption token file exists already at path [" + tokenFile.getCanonicalPath()); sLog.info( "Creating new encryption token and migrating all passwords in [{}] to be encrypted with new token", ConfigOptions.getDefaultPropertyFile(params.getConsole()).getCanonicalPath()); File backupFile = null; boolean tokenBackedUp = false; try { backupFile = new File(tokenFile.getCanonicalPath() + ".bak"); tokenBackedUp = tokenFile.renameTo(backupFile); generateNewEncryptionToken(tokenFile); } catch (final Exception ex) { sLog.error( "An error occurred backing up existing token file. Please check directory and permissions and try again.", ex); } if (tokenBackedUp) { final Properties configProps = getGeoWaveConfigProperties(params); if (configProps != null) { boolean updated = false; final Set keySet = configProps.keySet(); final Iterator keyIter = keySet.iterator(); if (keyIter != null) { String configKey = null; while (keyIter.hasNext()) { configKey = (String) keyIter.next(); final String configValue = configProps.getProperty(configKey); if ((configValue != null) && !"".equals(configValue.trim()) && BaseEncryption.isProperlyWrapped(configValue)) { // HP Fortify "NULL Pointer Dereference" // false positive // Exception handling will catch if // backupFile is null final String decryptedValue = SecurityUtils.decryptHexEncodedValue( configValue, backupFile.getCanonicalPath(), params.getConsole()); final String encryptedValue = SecurityUtils.encryptAndHexEncodeValue( decryptedValue, tokenFile.getCanonicalPath(), params.getConsole()); configProps.put(configKey, encryptedValue); updated = true; } } } if (updated) { ConfigOptions.writeProperties( getGeoWaveConfigFile(params), configProps, params.getConsole()); } } // HP Fortify "NULL Pointer Dereference" false positive // Exception handling will catch if backupFile is null backupFile.deleteOnExit(); } } catch (final Exception ex) { sLog.error( "An error occurred creating a new encryption token: " + ex.getLocalizedMessage(), ex); } } else { generateNewEncryptionToken(tokenFile); } } sLog.trace("EXIT :: execute"); } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/operations/config/security/crypto/BaseEncryption.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ /** */ package org.locationtech.geowave.core.cli.operations.config.security.crypto; import java.io.File; import java.security.Key; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.crypto.KeyGenerator; import javax.crypto.SecretKey; import javax.crypto.SecretKeyFactory; import javax.crypto.spec.PBEKeySpec; import javax.crypto.spec.SecretKeySpec; import org.apache.commons.codec.DecoderException; import org.apache.commons.codec.binary.Base64; import org.apache.commons.codec.binary.Hex; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.cli.operations.config.security.utils.SecurityUtils; import org.locationtech.geowave.core.cli.utils.FileUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.internal.Console; /** * Abstract base encryption class for setting up and defining common encryption/decryption methods */ public abstract class BaseEncryption { private static final Logger LOGGER = LoggerFactory.getLogger(BaseEncryption.class); public static String resourceName = "geowave_crypto_key.dat"; private String resourceLocation; private Key key = null; /* * PROTECT this value. The salt value is the second-half of the protection mechanism for key used * when encrypting or decrypting the content.
We cannot generate a new random cryptography * key each time, as that would mean two different keys.
At the same time, encrypted values * would be very vulnerable to unintentional exposure if (a wrong) someone got access to the token * key file, so this salt allows us to protect the encryption with "2 locks" - both are needed to * decrypt a value that was encrypted with the SAME two values (salt - below - and token file - * specified at resourceLocation) */ protected byte[] salt = null; protected File tokenFile = null; private static final String PREFIX = "ENC{"; private static final String SUFFIX = "}"; public static final String WRAPPER = PREFIX + SUFFIX; private static final Pattern ENCCodePattern = Pattern.compile(PREFIX.replace("{", "\\{") + "([^}]+)" + SUFFIX.replace("{", "\\{")); private final String KEY_ENCRYPTION_ALGORITHM = "AES"; /** * Base constructor for encryption, allowing a resource location for the cryptography token key to * be specified, rather than using the default-generated path * * @param resourceLocation Path to cryptography token key file */ public BaseEncryption(final String resourceLocation, Console console) { try { setResourceLocation(resourceLocation); init(console); } catch (final Throwable t) { LOGGER.error(t.getLocalizedMessage(), t); } } /** Base constructor for encryption */ public BaseEncryption(Console console) { init(console); } /** * Method to initialize all required fields, check for the existence of the cryptography token * key, and generate the key for encryption/decryption */ private void init(Console console) { try { checkForToken(console); setResourceLocation(tokenFile.getCanonicalPath()); salt = "Ge0W@v3-Ro0t-K3y".getBytes("UTF-8"); generateRootKeyFromToken(); } catch (final Throwable t) { LOGGER.error(t.getLocalizedMessage(), t); } } /** Check if encryption token exists. If not, create one initially */ private void checkForToken(Console console) throws Throwable { if (getResourceLocation() != null) { // this is simply caching the location, ideally under all // circumstances resource location exists tokenFile = new File(getResourceLocation()); } else { // and this is initializing it for the first time, this just assumes // the default config file path // because of that assumption this can cause inconsistency // under all circumstances this seems like it should never happen tokenFile = SecurityUtils.getFormattedTokenKeyFileForConfig( ConfigOptions.getDefaultPropertyFile(console)); } if (!tokenFile.exists()) { generateNewEncryptionToken(tokenFile); } } /** * Generates a token file resource name that includes the current version * * @return formatted token key file name */ public static String getFormattedTokenFileName(final String configFilename) { return String.format("%s.key", configFilename); } /** * Generate a new token value in a specified file * * @param tokenFile * @return {@code true} if the token was successfully generated */ public static boolean generateNewEncryptionToken(final File tokenFile) throws Exception { boolean success = false; try { LOGGER.info("Writing new encryption token to file at path {}", tokenFile.getCanonicalPath()); org.apache.commons.io.FileUtils.writeStringToFile(tokenFile, generateRandomSecretKey()); LOGGER.info("Completed writing new encryption token to file"); success = true; } catch (final Exception ex) { LOGGER.error( "An error occurred writing new encryption token to file: " + ex.getLocalizedMessage(), ex); throw ex; } return success; } /* * INTERNAL METHODS */ /** * Returns the path on the file system to the resource for the token * * @return Path to resource to get the token */ public String getResourceLocation() { return resourceLocation; } /** * Sets the path to the resource for the token * * @param resourceLoc Path to resource to get the token */ public void setResourceLocation(final String resourceLoc) throws Throwable { resourceLocation = resourceLoc; } /** * Checks to see if the data is properly wrapped with ENC{} * * @param data * @return boolean - true if properly wrapped, false otherwise */ public static boolean isProperlyWrapped(final String data) { return ENCCodePattern.matcher(data).matches(); } /** * Converts a binary value to a encoded string * * @param data Binary value to encode as an encoded string * @return Encoded string from the binary value specified */ private String toString(final byte[] data) { return Hex.encodeHexString(data); } /** * Converts a string value to a decoded binary * * @param data String value to convert to decoded hex * @return Decoded binary from the string value specified */ private byte[] fromString(final String data) { try { return Hex.decodeHex(data.toCharArray()); } catch (final DecoderException e) { LOGGER.error(e.getLocalizedMessage(), e); return null; } } /** Method to generate a new secret key from the specified token key file */ private void generateRootKeyFromToken() throws Throwable { if (!tokenFile.exists()) { throw new Throwable("Token file not found at specified path [" + getResourceLocation() + "]"); } try { final String strPassword = FileUtils.readFileContent(tokenFile); final char[] password = strPassword != null ? strPassword.trim().toCharArray() : null; final SecretKeyFactory factory = SecretKeyFactory.getInstance("PBKDF2WithHmacSHA1"); final SecretKey tmp = factory.generateSecret(new PBEKeySpec(password, salt, 65536, 256)); setKey(new SecretKeySpec(tmp.getEncoded(), KEY_ENCRYPTION_ALGORITHM)); } catch (final Exception ex) { LOGGER.error( "An error occurred generating the root key from the specified token: " + ex.getLocalizedMessage(), ex); } } /** * Method to generate a new random token key value * * @return * @throws Exception */ private static String generateRandomSecretKey() throws Exception { final KeyGenerator keyGenerator = KeyGenerator.getInstance("AES"); keyGenerator.init(256); final SecretKey secretKey = keyGenerator.generateKey(); final byte[] encoded = secretKey.getEncoded(); return Base64.encodeBase64String(encoded); } /** * Set the key to use * * @param key */ protected void setKey(final Key key) { this.key = key; } /** * @return the key to use */ protected Key getKey() { return key; } /* * ENCRYPTION METHODS */ /** * Method to encrypt and hex-encode a string value using the specified token resource * * @param data String to encrypt * @return Encrypted and Hex-encoded string value using the specified token resource * @throws Exception */ public String encryptAndHexEncode(final String data) throws Exception { if (data == null) { return null; } final byte[] encryptedBytes = encryptBytes(data.getBytes("UTF-8")); return PREFIX + toString(encryptedBytes) + SUFFIX; } /* * DECRYPTION METHODS */ /** * Returns a decrypted value from the encrypted hex-encoded value specified * * @param data Hex-Encoded string value to decrypt * @return Decrypted value from the encrypted hex-encoded value specified * @throws Exception */ public String decryptHexEncoded(final String data) throws Exception { if (data == null) { return null; } final Matcher matcher = ENCCodePattern.matcher(data); if (matcher.matches()) { final String codedString = matcher.group(1); return new String(decryptBytes(fromString(codedString)), "UTF-8"); } else { return data; } } /* * ABSTRACT METHODS */ /** * Encrypt the data as a byte array * * @param valueToEncrypt value to encrypt */ public abstract byte[] encryptBytes(byte[] valueToEncrypt) throws Exception; /** * Decrypt the encrypted data * * @param valueToDecrypt value to encrypt */ public abstract byte[] decryptBytes(byte[] valueToDecrypt) throws Exception; } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/operations/config/security/crypto/GeoWaveEncryption.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ /** */ package org.locationtech.geowave.core.cli.operations.config.security.crypto; import org.apache.commons.codec.binary.Base64; import org.bouncycastle.crypto.CipherParameters; import org.bouncycastle.crypto.CryptoException; import org.bouncycastle.crypto.engines.AESEngine; import org.bouncycastle.crypto.modes.CBCBlockCipher; import org.bouncycastle.crypto.paddings.PKCS7Padding; import org.bouncycastle.crypto.paddings.PaddedBufferedBlockCipher; import org.bouncycastle.crypto.params.KeyParameter; import org.bouncycastle.crypto.params.ParametersWithIV; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.internal.Console; /** Encryption/Decryption implementation based of symmetric cryptography */ public class GeoWaveEncryption extends BaseEncryption { private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveEncryption.class); /** * Base constructor for encryption, allowing a resource location for the cryptography token key to * be specified, rather than using the default-generated path * * @param resourceLocation Path to cryptography token key file */ public GeoWaveEncryption(final String resourceLocation, Console console) { super(resourceLocation, console); } /** Base constructor for encryption */ public GeoWaveEncryption(Console console) { super(console); } @Override public byte[] encryptBytes(final byte[] valueToEncrypt) throws Exception { return Base64.encodeBase64(encryptValue(valueToEncrypt)); } @Override public byte[] decryptBytes(final byte[] valueToDecrypt) throws Exception { return decryptValue(Base64.decodeBase64(valueToDecrypt)); } private PaddedBufferedBlockCipher getCipher(final boolean encrypt) { final PaddedBufferedBlockCipher cipher = new PaddedBufferedBlockCipher(new CBCBlockCipher(new AESEngine()), new PKCS7Padding()); final CipherParameters ivAndKey = new ParametersWithIV(new KeyParameter(getKey().getEncoded()), salt); cipher.init(encrypt, ivAndKey); return cipher; } /** * Encrypts a binary value using the given key and returns a base 64 encoded encrypted string. * * @param valueToEncrypt Binary value to encrypt * @return Encrypted binary * @throws Exception */ private byte[] encryptValue(final byte[] encodedValue) throws Exception { LOGGER.trace("ENTER :: encyrpt"); final PaddedBufferedBlockCipher cipher = getCipher(true); final byte output[] = new byte[cipher.getOutputSize(encodedValue.length)]; final int length = cipher.processBytes(encodedValue, 0, encodedValue.length, output, 0); try { cipher.doFinal(output, length); } catch (final CryptoException e) { LOGGER.error("An error occurred performing encryption: " + e.getLocalizedMessage(), e); } return output; } /** * Decrypts the base64-decoded value * * @param decodedValue value to decrypt * @return * @throws Exception */ private byte[] decryptValue(final byte[] decodedValue) throws Exception { final StringBuffer result = new StringBuffer(); final PaddedBufferedBlockCipher cipher = getCipher(false); final byte output[] = new byte[cipher.getOutputSize(decodedValue.length)]; final int length = cipher.processBytes(decodedValue, 0, decodedValue.length, output, 0); cipher.doFinal(output, length); if ((output != null) && (output.length != 0)) { final String retval = new String(output, "UTF-8"); for (int i = 0; i < retval.length(); i++) { final char c = retval.charAt(i); if (c != 0) { result.append(c); } } } return result.toString().getBytes("UTF-8"); } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/operations/config/security/utils/SecurityUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ /** */ package org.locationtech.geowave.core.cli.operations.config.security.utils; import java.io.File; import org.locationtech.geowave.core.cli.operations.config.security.crypto.BaseEncryption; import org.locationtech.geowave.core.cli.operations.config.security.crypto.GeoWaveEncryption; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.internal.Console; /** Security utility class for simpler interfacing with */ public class SecurityUtils { private static final Logger LOGGER = LoggerFactory.getLogger(SecurityUtils.class); private static BaseEncryption encService; private static final String WRAPPER = BaseEncryption.WRAPPER; /** * Method to decrypt a value * * @param value Value to decrypt. Should be wrapped with ENC{} * @param resourceLocation Optional value to specify the location of the encryption service * resource location * @return decrypted value */ public static String decryptHexEncodedValue( final String value, final String resourceLocation, Console console) throws Exception { LOGGER.trace("Decrypting hex-encoded value"); if ((value != null) && !"".equals(value.trim())) { if (BaseEncryption.isProperlyWrapped(value.trim())) { try { return getEncryptionService(resourceLocation, console).decryptHexEncoded(value); } catch (final Throwable t) { LOGGER.error( "Encountered exception during content decryption: " + t.getLocalizedMessage(), t); } } else { LOGGER.debug( "WARNING: Value to decrypt was not propertly encoded and wrapped with " + WRAPPER + ". Not decrypting value."); return value; } } else { LOGGER.debug("WARNING: No value specified to decrypt."); } return ""; } /** * Method to encrypt and hex-encode a string value * * @param value value to encrypt and hex-encode * @param resourceLocation resource token to use for encrypting the value * @return If encryption is successful, encrypted and hex-encoded string value is returned wrapped * with ENC{} */ public static String encryptAndHexEncodeValue( final String value, final String resourceLocation, Console console) throws Exception { LOGGER.debug("Encrypting and hex-encoding value"); if ((value != null) && !"".equals(value.trim())) { if (!BaseEncryption.isProperlyWrapped(value)) { try { return getEncryptionService(resourceLocation, console).encryptAndHexEncode(value); } catch (final Throwable t) { LOGGER.error( "Encountered exception during content encryption: " + t.getLocalizedMessage(), t); } } else { LOGGER.debug( "WARNING: Value to encrypt already appears to be encrypted and already wrapped with " + WRAPPER + ". Not encrypting value."); return value; } } else { LOGGER.debug("WARNING: No value specified to encrypt."); return value; } return value; } /** * Returns an instance of the encryption service, initialized with the token at the provided * resource location * * @param resourceLocation location of the resource token to initialize the encryption service * with * @return An initialized instance of the encryption service * @throws Exception */ private static synchronized BaseEncryption getEncryptionService( final String resourceLocation, Console console) throws Throwable { if (encService == null) { if ((resourceLocation != null) && !"".equals(resourceLocation.trim())) { LOGGER.trace( "Setting resource location for encryption service: [" + resourceLocation + "]"); encService = new GeoWaveEncryption(resourceLocation, console); } else { encService = new GeoWaveEncryption(console); } } else { if (!resourceLocation.equals(encService.getResourceLocation())) { encService = new GeoWaveEncryption(resourceLocation, console); } } return encService; } /** * Utilty method to format the file path for the token key file associated with a config file * * @param configFile Location of config file that token key file is associated with * @return File for given config file */ public static File getFormattedTokenKeyFileForConfig(final File configFile) { return new File( // get the resource location configFile.getParentFile(), // get the formatted token file name with version BaseEncryption.getFormattedTokenFileName(configFile.getName())); } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/operations/util/UtilOperationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.operations.util; import org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi; public class UtilOperationProvider implements CLIOperationProviderSpi { private static final Class[] OPERATIONS = new Class[] {UtilSection.class}; @Override public Class[] getOperations() { return OPERATIONS; } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/operations/util/UtilSection.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.operations.util; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.operations.GeoWaveTopLevelSection; import com.beust.jcommander.Parameters; @GeowaveOperation(name = {"util", "utility"}, parentOperation = GeoWaveTopLevelSection.class) @Parameters(commandDescription = "GeoWave utility commands") public class UtilSection extends DefaultOperation { } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/parser/CommandLineOperationParams.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.parser; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; import org.locationtech.geowave.core.cli.api.Operation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.prefix.PrefixedJCommander; import com.beust.jcommander.internal.Console; public class CommandLineOperationParams implements OperationParams { private final Map context = new HashMap<>(); private final Map operationMap = new LinkedHashMap<>(); private final String[] args; private PrefixedJCommander commander; private boolean validate = true; private boolean allowUnknown = false; private boolean commandPresent; private int successCode = 0; private String successMessage; private Throwable successException; public CommandLineOperationParams(final String[] args) { this.args = args; } public String[] getArgs() { return args; } /** Implement parent interface to retrieve operations */ @Override public Map getOperationMap() { return operationMap; } @Override public Map getContext() { return context; } public PrefixedJCommander getCommander() { return commander; } public Console getConsole() { return commander.getConsole(); } public void setValidate(final boolean validate) { this.validate = validate; } public void setAllowUnknown(final boolean allowUnknown) { this.allowUnknown = allowUnknown; } public boolean isValidate() { return validate; } public boolean isAllowUnknown() { return allowUnknown; } public void setCommander(final PrefixedJCommander commander) { this.commander = commander; } public void addOperation(final String name, final Operation operation, final boolean isCommand) { commandPresent |= isCommand; operationMap.put(name, operation); } public boolean isCommandPresent() { return commandPresent; } public int getSuccessCode() { return successCode; } public void setSuccessCode(final int successCode) { this.successCode = successCode; } public String getSuccessMessage() { return successMessage; } public void setSuccessMessage(final String successMessage) { this.successMessage = successMessage; } public Throwable getSuccessException() { return successException; } public void setSuccessException(final Throwable successException) { this.successException = successException; } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/parser/ManualOperationParams.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.parser; import java.util.HashMap; import java.util.Map; import org.locationtech.geowave.core.cli.api.Operation; import org.locationtech.geowave.core.cli.api.OperationParams; import com.beust.jcommander.JCommander; import com.beust.jcommander.internal.Console; public class ManualOperationParams implements OperationParams { private final Map context = new HashMap<>(); @Override public Map getOperationMap() { return new HashMap<>(); } @Override public Map getContext() { return context; } @Override public Console getConsole() { return new JCommander().getConsole(); } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/parser/OperationParser.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.parser; import java.util.Arrays; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.locationtech.geowave.core.cli.api.Operation; import org.locationtech.geowave.core.cli.prefix.PrefixedJCommander; import org.locationtech.geowave.core.cli.prefix.PrefixedJCommander.PrefixedJCommanderInitializer; import org.locationtech.geowave.core.cli.spi.OperationEntry; import org.locationtech.geowave.core.cli.spi.OperationRegistry; import com.beust.jcommander.JCommander; import com.beust.jcommander.ParameterException; public class OperationParser { private final OperationRegistry registry; private final Set additionalObjects = new HashSet<>(); public OperationParser(final OperationRegistry registry) { this.registry = registry; } public OperationParser() { this(OperationRegistry.getInstance()); } /** * Parse command line arguments into the given operation. The operation will be prepared, and then * can be directly executed, or modified before being executed. * * @param operation the operation * @param args the command arguments * @return the parsed parameters */ public CommandLineOperationParams parse(final Operation operation, final String[] args) { final CommandLineOperationParams params = new CommandLineOperationParams(args); final OperationEntry topLevelEntry = registry.getOperation(operation.getClass()); // Populate the operation map. params.getOperationMap().put(topLevelEntry.getOperationNames()[0], operation); parseInternal(params, topLevelEntry); return params; } /** * Search the arguments for the list of commands/operations to execute based on the top level * operation entry given. * * @param topLevel the top level operation class * @param args the command arguments * @return the parsed parameters */ public CommandLineOperationParams parse( final Class topLevel, final String[] args) { final CommandLineOperationParams params = new CommandLineOperationParams(args); final OperationEntry topLevelEntry = registry.getOperation(topLevel); parseInternal(params, topLevelEntry); return params; } /** * Parse, starting from the given entry. * * @param params */ private void parseInternal( final CommandLineOperationParams params, final OperationEntry topLevelEntry) { try { final PrefixedJCommander pluginCommander = new PrefixedJCommander(); pluginCommander.setInitializer(new OperationContext(topLevelEntry, params)); params.setCommander(pluginCommander); for (final Object obj : additionalObjects) { params.getCommander().addPrefixedObject(obj); } // Parse without validation so we can prepare. params.getCommander().setAcceptUnknownOptions(true); params.getCommander().setValidate(false); params.getCommander().parse(params.getArgs()); // Prepare stage: for (final Operation operation : params.getOperationMap().values()) { // Do not continue if (!operation.prepare(params)) { params.setSuccessCode(1); return; } } // Parse with validation final PrefixedJCommander finalCommander = new PrefixedJCommander(); finalCommander.setInitializer(new OperationContext(topLevelEntry, params)); params.setCommander(finalCommander); for (final Object obj : additionalObjects) { params.getCommander().addPrefixedObject(obj); } params.getCommander().setAcceptUnknownOptions(params.isAllowUnknown()); params.getCommander().setValidate(params.isValidate()); params.getCommander().parse(params.getArgs()); } catch (final ParameterException p) { params.setSuccessCode(-1); params.setSuccessMessage("Error: " + p.getMessage()); params.setSuccessException(p); } return; } /** * Parse the command line arguments into the objects given in the 'additionalObjects' array. I * don't really ever forsee this ever being used, but hey, why not. * * @param args */ public CommandLineOperationParams parse(final String[] args) { final CommandLineOperationParams params = new CommandLineOperationParams(args); try { final PrefixedJCommander pluginCommander = new PrefixedJCommander(); params.setCommander(pluginCommander); for (final Object obj : additionalObjects) { params.getCommander().addPrefixedObject(obj); } params.getCommander().parse(params.getArgs()); } catch (final ParameterException p) { params.setSuccessCode(-1); params.setSuccessMessage("Error: " + p.getMessage()); params.setSuccessException(p); } return params; } public Set getAdditionalObjects() { return additionalObjects; } public void addAdditionalObject(final Object obj) { additionalObjects.add(obj); } public OperationRegistry getRegistry() { return registry; } /** * This class is used to lazily init child commands only when they are actually referenced/used by * command line options. It will set itself on the commander, and then add its children as * commands. */ public class OperationContext implements PrefixedJCommanderInitializer { private final OperationEntry operationEntry; private final CommandLineOperationParams params; private Operation operation; public OperationContext(final OperationEntry entry, final CommandLineOperationParams params) { operationEntry = entry; this.params = params; } @Override public void initialize(final PrefixedJCommander commander) { commander.setCaseSensitiveOptions(false); final String[] opNames = operationEntry.getOperationNames(); String opName = opNames[0]; for (int i = 1; i < opNames.length; i++) { for (final String arg : params.getArgs()) { if (arg.equals(opNames[i])) { opName = arg; break; } } } // Add myself. if (params.getOperationMap().containsKey(opName)) { operation = params.getOperationMap().get(opName); } else { operation = operationEntry.createInstance(); params.addOperation(opName, operation, operationEntry.isCommand()); } commander.addPrefixedObject(operation); // initialize the commander by adding child operations. for (final OperationEntry child : operationEntry.getChildren()) { final String[] names = child.getOperationNames(); commander.addCommand(names[0], null, Arrays.copyOfRange(names, 1, names.length)); } // Update each command to add an initializer. final Map childCommanders = commander.getCommands(); for (final OperationEntry child : operationEntry.getChildren()) { final PrefixedJCommander pCommander = (PrefixedJCommander) childCommanders.get(child.getOperationNames()[0]); pCommander.setInitializer(new OperationContext(child, params)); } } public Operation getOperation() { return operation; } public OperationEntry getOperationEntry() { return operationEntry; } } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/prefix/JCommanderPrefixTranslator.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.prefix; import java.lang.reflect.AnnotatedElement; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.Collection; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Queue; import org.locationtech.geowave.core.cli.annotations.PrefixParameter; import com.beust.jcommander.Parameterized; /** * This class will take a collection of objects with JCommander annotations and create a transformed * set of objects with altered option prefixes, based on the @PrefixParameter annotation. It also * expands the capabilities of @ParametersDelegate, allowing you to specify a collection of objects, * or a map, where the String key is prepended as a prefix to the commands under that object. TODO: * This might work better with a Visitor pattern */ public class JCommanderPrefixTranslator { private final Queue queue = new LinkedList<>(); // These will be used to access the "field" or "method" attribute within // Parameterized, // which is a special JCommander class. If the interface changes in the // future, this // may not work anymore. private Field paraField; private Field paraMethod; public JCommanderPrefixTranslator() { try { // HP Fortify "Access Specifier Manipulation" // These fields are being modified by trusted code, // in a way that is not influenced by user input paraField = Parameterized.class.getDeclaredField("field"); paraField.setAccessible(true); paraMethod = Parameterized.class.getDeclaredField("method"); paraMethod.setAccessible(true); } catch (final NoSuchFieldException e) { // This is a programmer error, and will only happen if another // version of JCommander is being used. // newer versions of JCommander have renamed the member variables, try the old names try { paraField = Parameterized.class.getDeclaredField("m_field"); paraField.setAccessible(true); paraMethod = Parameterized.class.getDeclaredField("m_method"); paraMethod.setAccessible(true); } catch (NoSuchFieldException e2) { throw new RuntimeException(e); } } } public void addObject(final Object object) { final ParseContext pc = new ParseContext("", object); queue.add(pc); } public JCommanderTranslationMap translate() { // This map will hold the final translations final JCommanderTranslationMap transMap = new JCommanderTranslationMap(); try { while (queue.size() > 0) { final ParseContext pc = queue.remove(); final Object item = pc.getObject(); // This is the JCommander class used to parse the object // hierarchy for // Parameter annotations. They kept it public ... so I used it. // Otherwise, // I'd have to parse all the annotations myself. final List params = Parameterized.parseArg(item); // Iterate over the parameters, copying the method or field // parameters // into new parameters in 'newClass', ensuring that we maintain // annotations. for (final Parameterized param : params) { final Field f = (Field) paraField.get(param); final Method m = (Method) paraMethod.get(param); final AnnotatedElement annotatedElement = f != null ? f : m; // If this is a delegate, then process prefix parameter, add // the item // to the queue, and move on to the next field. if (param.getDelegateAnnotation() != null) { // JCommander only cares about non null fields when // processing // ParametersDelegate. final Object delegateItem = param.get(item); if (delegateItem != null) { // Prefix parameter only matters for // ParametersDelegate. final PrefixParameter prefixParam = annotatedElement.getAnnotation(PrefixParameter.class); String newPrefix = pc.getPrefix(); if (prefixParam != null) { if (!newPrefix.equals("")) { newPrefix += JCommanderTranslationMap.PREFIX_SEPARATOR; } newPrefix += prefixParam.prefix(); } // Is this a list type? If so then process each // object independently. if (delegateItem instanceof Collection) { final Collection coll = (Collection) delegateItem; for (final Object collItem : coll) { final ParseContext newPc = new ParseContext(newPrefix, collItem); queue.add(newPc); } } // For maps, use the key as an additional prefix // specifier. else if (delegateItem instanceof Map) { final Map mapp = (Map) delegateItem; for (final Map.Entry entry : mapp.entrySet()) { final String prefix = entry.getKey().toString(); final Object mapItem = entry.getValue(); String convertedPrefix = newPrefix; if (!convertedPrefix.equals("")) { convertedPrefix += JCommanderTranslationMap.PREFIX_SEPARATOR; } convertedPrefix += prefix; final ParseContext newPc = new ParseContext(convertedPrefix, mapItem); queue.add(newPc); } } // Normal params delegate. else { final ParseContext newPc = new ParseContext(newPrefix, delegateItem); queue.add(newPc); } } } else { // TODO: In the future, if we wanted to do // @PluginParameter, this is probably // where we'd parse it, from annotatedElement. Then we'd // add it to // transMap below. // Rename the field so there are no conflicts. Name // really doesn't matter, // but it's used for translation in transMap. final String newFieldName = JavassistUtils.getNextUniqueFieldName(); // Now add an entry to the translation map. transMap.addEntry(newFieldName, item, param, pc.getPrefix(), annotatedElement); } } // Iterate Parameterized } // Iterate Queue return transMap; } catch (final IllegalAccessException e) { // This should never happen, but if it does, then it's a programmer // error. throw new RuntimeException(e); } } /** * This class is used to keep context of what the current prefix is during prefix translation for * JCommander. It is stored in the queue. */ private static class ParseContext { private final String prefix; private final Object object; public ParseContext(final String prefix, final Object object) { this.prefix = prefix; this.object = object; } public String getPrefix() { return prefix; } public Object getObject() { return object; } } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/prefix/JCommanderPropertiesTransformer.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.prefix; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; import java.util.Set; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.JCommander; /** * This class will translate a given set of ParameterDescription entries into a properties file or * back given a JCommander translation map. */ public class JCommanderPropertiesTransformer { private static Logger LOGGER = LoggerFactory.getLogger(JCommanderPropertiesTransformer.class); // The namespace is prepended to entries translated via // this translator in the Properties object, or it is used // to only retrieve properties that start with this // namespace. private final String propertyFormat; private final List objects = new ArrayList<>(); public JCommanderPropertiesTransformer(final String namespace) { if (namespace == null) { propertyFormat = "%s"; } else { propertyFormat = String.format("%s.%s", namespace, "%s"); } } public JCommanderPropertiesTransformer() { this(null); } /** * Add an object to be translated * * @param object */ public void addObject(final Object object) { objects.add(object); } /** * Entries are needed to translate to/from the objects using the JCommander prefixes. * * @return */ private Collection generateEntries() { final JCommanderPrefixTranslator translator = new JCommanderPrefixTranslator(); for (final Object obj : objects) { translator.addObject(obj); } final JCommanderTranslationMap map = translator.translate(); return map.getEntries().values(); } /** * Take the options and translate them to a map. * * @param properties */ public void transformToMap(final Map properties) { final Properties props = new Properties(); transformToProperties(props); for (final String prop : props.stringPropertyNames()) { properties.put(prop, props.getProperty(prop)); } } /** * Take the options and translate them from a map. * * @param properties */ public void transformFromMap(final Map properties) { final Properties props = new Properties(); for (final Entry prop : properties.entrySet()) { if (prop.getValue() != null) { props.setProperty(prop.getKey(), prop.getValue()); } } transformFromProperties(props); } /** * Take the given values in the translation map, and convert them to a properties list. * * @param toProperties */ public void transformToProperties(final Properties toProperties) { // Translate all fields. for (final TranslationEntry entry : generateEntries()) { // Get the Properties name. String propertyName = entry.getAsPropertyName(); propertyName = String.format(propertyFormat, propertyName); // Get the value. Object value = null; try { value = entry.getParam().get(entry.getObject()); } catch (final Exception e) { LOGGER.warn("Unable to set value", e); continue; } if (value == null) { continue; } // Dyn parameter, serialize map. if (entry.getParam().isDynamicParameter()) { @SuppressWarnings("unchecked") final Map props = (Map) value; for (final Map.Entry prop : props.entrySet()) { if (prop.getValue() != null) { toProperties.put(String.format("%s.%s", propertyName, prop.getKey()), prop.getValue()); } } } else { toProperties.put(propertyName, value.toString()); } } } /** * Take the given properties list, and convert it to the given objects. * * @param fromProperties */ public void transformFromProperties(final Properties fromProperties) { // This JCommander object is used strictly to use the 'convertValue' // function which happens to be public. final JCommander jc = new JCommander(); // Translate all fields. for (final TranslationEntry entry : generateEntries()) { // Get the Properties name. String propertyName = entry.getAsPropertyName(); propertyName = String.format(propertyFormat, propertyName); // Set the value. if (entry.getParam().isDynamicParameter()) { final Map fromMap = new HashMap<>(); final Set propNames = fromProperties.stringPropertyNames(); for (final String propName : propNames) { if (propName.startsWith(propertyName)) { // Parse final String parsedName = propName.substring(propertyName.length() + 1); fromMap.put(parsedName, fromProperties.getProperty(propName)); } } // Set the map. entry.getParam().set(entry.getObject(), fromMap); } else { final String value = fromProperties.getProperty(propertyName); if (value != null) { // Convert the value to the expected format, and // set it on the original object. entry.getParam().set( entry.getObject(), jc.convertValue(entry.getParam(), entry.getParam().getType(), propertyName, value)); } } } } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/prefix/JCommanderTranslationMap.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.prefix; import java.lang.reflect.AnnotatedElement; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.JCommander; import com.beust.jcommander.Parameterized; import javassist.CannotCompileException; import javassist.ClassClassPath; import javassist.ClassPool; import javassist.CtClass; import javassist.CtField; import javassist.CtMethod; import javassist.NotFoundException; import javassist.bytecode.AccessFlag; import javassist.bytecode.AnnotationsAttribute; import javassist.bytecode.annotation.Annotation; import javassist.bytecode.annotation.ArrayMemberValue; import javassist.bytecode.annotation.BooleanMemberValue; import javassist.bytecode.annotation.MemberValue; import javassist.bytecode.annotation.StringMemberValue; /** * The translation map allows us to easily copy values from the facade objects back to the original * objects. */ public class JCommanderTranslationMap { private static Logger LOGGER = LoggerFactory.getLogger(JCommanderTranslationMap.class); // This package is where classes generated by this translator live in the // classpath. public static final String NAMES_MEMBER = "names"; public static final String REQUIRED_MEMBER = "required"; // HP Fortify "Hardcoded Password - Password Management: Hardcoded Password" // false positive // This is a password label, not a password public static final String PASSWORD_MEMBER = "password"; public static final String PREFIX_SEPARATOR = "."; // Tells us how to translate a field (indexed by facade field id) to // the original objects and back. private final Map translations = new LinkedHashMap<>(); // These are the objects generated by createFacadeObjects() private List translatedObjects = null; public JCommanderTranslationMap() {} /** * Objects are the facades. * * @return the translated objects */ public Collection getObjects() { return Collections.unmodifiableCollection(translatedObjects); } /** * Return all the translations. They are indexed by 'field name', where field name is the field in * the facade object. Allow the user to modify them up until they create the facade objects * * @return the translations */ public Map getEntries() { if (translatedObjects != null) { return Collections.unmodifiableMap(translations); } return translations; } /** * Transfer the values from the facade objects to the original objects using the translation map. */ public void transformToOriginal() { for (final Object obj : translatedObjects) { for (final Field field : obj.getClass().getDeclaredFields()) { final TranslationEntry tEntry = translations.get(field.getName()); try { tEntry.getParam().set(tEntry.getObject(), field.get(obj)); } catch (IllegalArgumentException | IllegalAccessException e) { // Allow these, since they really shouldn't ever happen. LOGGER.warn("Unable to return field object", e); } } } } /** * Transfer the values from the original objects to the facade objects using the translation map. */ public void transformToFacade() { for (final Object obj : translatedObjects) { for (final Field field : obj.getClass().getDeclaredFields()) { final TranslationEntry tEntry = translations.get(field.getName()); try { field.set(obj, tEntry.getParam().get(tEntry.getObject())); } catch (IllegalArgumentException | IllegalAccessException e) { // Ignore, no getter (if it's a method) or there was // a security violation. LOGGER.warn("Unable to set field", e); } } } } /** * This is a mapping between the created facade's field (e.g., field_0) and the JCommander * parameter (param) which lives in the object it was parsed from, 'item'. */ protected void addEntry( final String newFieldName, final Object item, final Parameterized param, final String prefix, final AnnotatedElement member) { translations.put(newFieldName, new TranslationEntry(param, item, prefix, member)); } /** * This will create the facade objects needed in order to parse the fields represented in the * translation map. */ public void createFacadeObjects() { if (translatedObjects != null) { throw new RuntimeException("Cannot use the same translation " + "map twice"); } // Clear old objects. translatedObjects = new ArrayList<>(); // So we don't re-create classes we already created. final Map, CtClass> createdClasses = new HashMap<>(); try { // This class pool will be used to find existing classes and create // new // classes. final ClassPool classPool = ClassPool.getDefault(); final ClassClassPath path = new ClassClassPath(JCommanderPrefixTranslator.class); classPool.insertClassPath(path); // Iterate the final translations and create the classes. for (final Map.Entry mapEntry : translations.entrySet()) { // Cache for later. final String newFieldName = mapEntry.getKey(); final TranslationEntry entry = mapEntry.getValue(); // This is the class we're making a facade of. final Class objectClass = entry.getObject().getClass(); // Get a CtClass reference to the item's class final CtClass oldClass = classPool.get(objectClass.getName()); // Retrieve previously created class to add new field CtClass newClass = createdClasses.get(objectClass); // Create the class if we haven't yet. if (newClass == null) { // Create the class, so we can start adding the new facade // fields to it. newClass = JavassistUtils.generateEmptyClass(); // Copy over the @Parameters annotation, if it is set. JavassistUtils.copyClassAnnotations(oldClass, newClass); // Store for later. createdClasses.put(objectClass, newClass); } // This is a field or method, which means we should add it to // our current // object. CtField newField = null; if (!entry.isMethod()) { // This is a field. This is easy! Just clone the field. It // will // copy over the annotations as well. newField = new CtField(oldClass.getField(entry.getParam().getName()), newClass); } else { // This is a method. This is hard. We can create a field // with the same name, but we gotta copy over the // annotations manually. // We also don't want to copy annotations that specifically // target // METHOD, so we'll only clone annotations that can target // FIELD. final CtClass fieldType = classPool.get(entry.getParam().getType().getName()); newField = new CtField(fieldType, entry.getParam().getName(), newClass); // We need to find the existing method CtMethod reference, // so we can clone // annotations. This method is ugly. Do not look at it. final CtMethod method = JavassistUtils.findMethod(oldClass, (Method) entry.getMember()); // Copy the annotations! JavassistUtils.copyMethodAnnotationsToField(method, newField); } // This is where the meat of the prefix algorithm is. If we have // a prefix // for this class(in ParseContext), then we apply it to the // attributes by // iterating over the annotations, looking for a 'names' member // variable, and // overriding the values one by one. if (entry.getPrefix().length() > 0) { overrideParameterPrefixes(newField, entry.getPrefixedNames()); } // This is a fix for #95 ( // https://github.com/cbeust/jcommander/issues/95 ). // I need this for cpstore, cpindex, etc, but it's only been // implemented as of 1.55, // an unreleased version. if (entry.isRequired() && entry.hasValue()) { disableBooleanMember(REQUIRED_MEMBER, newField); } if (entry.isPassword() && entry.hasValue()) { disableBooleanMember(PASSWORD_MEMBER, newField); } // Rename the field so there are no conflicts. Name really // doesn't matter, // but it's used for translation in transMap. newField.setName(newFieldName); newField.getFieldInfo().setAccessFlags(AccessFlag.PUBLIC); // Add the field to the class newClass.addField(newField); } // Iterate TranslationEntry // Convert the translated CtClass to an actual class. for (final CtClass clz : createdClasses.values()) { final Class toClass = clz.toClass(); final Object instance = toClass.newInstance(); translatedObjects.add(instance); } } catch (InstantiationException | IllegalAccessException | NotFoundException | IllegalStateException | NullPointerException | CannotCompileException e) { LOGGER.error("Unable to create classes", e); throw new RuntimeException(); } /* * catch (Exception e) { // This should never happen, but if it does, then it's a programmer // * error. throw new RuntimeException( e); } */ } /** * Iterate the annotations, look for a 'names' parameter, and override it to prepend the given * prefix. */ private void overrideParameterPrefixes(final CtField field, final String[] names) { // This is the JCommander package name final String packageName = JCommander.class.getPackage().getName(); final AnnotationsAttribute fieldAttributes = (AnnotationsAttribute) field.getFieldInfo().getAttribute(AnnotationsAttribute.visibleTag); // Look for annotations that have a 'names' attribute, and whose package // starts with the expected JCommander package. for (final Annotation annotation : fieldAttributes.getAnnotations()) { if (annotation.getTypeName().startsWith(packageName)) { // See if it has a 'names' member variable. final MemberValue namesMember = annotation.getMemberValue(NAMES_MEMBER); // We have a names member!!! if (namesMember != null) { final ArrayMemberValue arrayNamesMember = (ArrayMemberValue) namesMember; // Iterate and transform each item in 'names()' list and // transform it. final MemberValue[] newMemberValues = new MemberValue[names.length]; for (int i = 0; i < names.length; i++) { newMemberValues[i] = new StringMemberValue(names[i], field.getFieldInfo2().getConstPool()); } // Override the member values in nameMember with the new // one's we've generated arrayNamesMember.setValue(newMemberValues); // This is KEY! For some reason, the existing annotation // will not be modified unless // you call 'setAnnotation' here. I'm guessing // 'getAnnotation()' creates a copy. fieldAttributes.setAnnotation(annotation); // Finished processing names. break; } } } } /** * Iterate the annotations, look for a 'required' parameter, and set it to false. */ private void disableBooleanMember(final String booleanMemberName, final CtField field) { // This is the JCommander package name final String packageName = JCommander.class.getPackage().getName(); final AnnotationsAttribute fieldAttributes = (AnnotationsAttribute) field.getFieldInfo().getAttribute(AnnotationsAttribute.visibleTag); // Look for annotations that have a 'names' attribute, and whose package // starts with the expected JCommander package. for (final Annotation annotation : fieldAttributes.getAnnotations()) { if (annotation.getTypeName().startsWith(packageName)) { // See if it has a 'names' member variable. final MemberValue requiredMember = annotation.getMemberValue(booleanMemberName); // We have a names member!!! if (requiredMember != null) { final BooleanMemberValue booleanRequiredMember = (BooleanMemberValue) requiredMember; // Set it to not required. booleanRequiredMember.setValue(false); // This is KEY! For some reason, the existing annotation // will not be modified unless // you call 'setAnnotation' here. I'm guessing // 'getAnnotation()' creates a copy. fieldAttributes.setAnnotation(annotation); // Finished processing names. break; } } } } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/prefix/JavassistUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.prefix; import java.lang.annotation.ElementType; import java.lang.annotation.Target; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.UUID; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javassist.ClassPool; import javassist.CtClass; import javassist.CtField; import javassist.CtMethod; import javassist.NotFoundException; import javassist.bytecode.AnnotationsAttribute; import javassist.bytecode.ConstPool; import javassist.bytecode.Descriptor; import javassist.bytecode.annotation.Annotation; import javassist.bytecode.annotation.MemberValue; /** * These functions make it less of a pain to deal with Javassist. There's one to find methods, and * one to clone annotations, which is used in several places within JCommanderPrefixTranslator. */ public class JavassistUtils { private static Logger LOGGER = LoggerFactory.getLogger(JavassistUtils.class); public static final String PREFIX_PACKAGE = "org.locationtech.geowave.core.cli.parsed"; private static final String uniqueId; private static int objectCounter = 0; static { uniqueId = UUID.randomUUID().toString().replace('-', '_'); } private JavassistUtils() {} /** * This function will take the given annotations attribute and create a new attribute, cloning all * the annotations and specified values within the attribute. The annotations attribute can then * be set on a method, class, or field. */ public static AnnotationsAttribute cloneAnnotationsAttribute( final ConstPool constPool, final AnnotationsAttribute attr, final ElementType validElementType) { // We can use system class loader here because the annotations for // Target // are part of the Java System. final ClassLoader cl = ClassLoader.getSystemClassLoader(); final AnnotationsAttribute attrNew = new AnnotationsAttribute(constPool, AnnotationsAttribute.visibleTag); if (attr != null) { for (final Annotation annotation : attr.getAnnotations()) { final Annotation newAnnotation = new Annotation(annotation.getTypeName(), constPool); // If this must target a certain type of field, then ensure we // only // copy over annotations that can target that type of field. // For instances, a METHOD annotation can't be applied to a // FIELD or TYPE. Class annoClass; try { annoClass = cl.loadClass(annotation.getTypeName()); final Target target = annoClass.getAnnotation(Target.class); if ((target != null) && !Arrays.asList(target.value()).contains(validElementType)) { continue; } } catch (final ClassNotFoundException e) { // Cannot apply this annotation because its type cannot be // found. LOGGER.error("Cannot apply this annotation because it's type cannot be found", e); continue; } // Copy over the options for this annotation. For example: // @Parameter(names = "-blah") // For this, a member value would be "names" which would be a // StringMemberValue if (annotation.getMemberNames() != null) { for (final Object memberName : annotation.getMemberNames()) { final MemberValue memberValue = annotation.getMemberValue((String) memberName); if (memberValue != null) { newAnnotation.addMemberValue((String) memberName, memberValue); } } } attrNew.addAnnotation(newAnnotation); } } return attrNew; } /** * This class will find the method in the CtClass, and return it as a CtMethod. * * @throws NotFoundException */ public static CtMethod findMethod(final CtClass clz, final Method m) throws NotFoundException { final ClassPool pool = ClassPool.getDefault(); final Class[] paramTypes = m.getParameterTypes(); final List paramTypesCtClass = new ArrayList<>(); for (final Class claz : paramTypes) { paramTypesCtClass.add(pool.get(claz.getName())); } final String desc = Descriptor.ofMethod( pool.get(m.getReturnType().getName()), paramTypesCtClass.toArray(new CtClass[] {})); final CtMethod method = clz.getMethod(m.getName(), desc); return method; } /** * Simple helper method to essentially clone the annotations from one class onto another. */ public static void copyClassAnnotations(final CtClass oldClass, final CtClass newClass) { // Load the existing annotations attributes final AnnotationsAttribute classAnnotations = (AnnotationsAttribute) oldClass.getClassFile().getAttribute( AnnotationsAttribute.visibleTag); // Clone them final AnnotationsAttribute copyClassAttribute = JavassistUtils.cloneAnnotationsAttribute( newClass.getClassFile2().getConstPool(), classAnnotations, ElementType.TYPE); // Set the annotations on the new class newClass.getClassFile().addAttribute(copyClassAttribute); } /** * Simple helper method to take any FIELD targetable annotations from the method and copy them to * the new field. All JCommander annotations can target fields as well as methods, so this should * capture them all. */ public static void copyMethodAnnotationsToField(final CtMethod method, final CtField field) { // Load the existing annotations attributes final AnnotationsAttribute methodAnnotations = (AnnotationsAttribute) method.getMethodInfo().getAttribute(AnnotationsAttribute.visibleTag); // Clone them final AnnotationsAttribute copyMethodAttribute = JavassistUtils.cloneAnnotationsAttribute( field.getFieldInfo2().getConstPool(), methodAnnotations, ElementType.FIELD); // Set the annotations on the new class field.getFieldInfo().addAttribute(copyMethodAttribute); } /** * Allows us to generate unique class names for generated classes * * @return the unique class name */ public static String getNextUniqueClassName() { return String.format("%s.cli_%s_%d", PREFIX_PACKAGE, uniqueId, objectCounter++); } /** * Allows us to generate unique field names for generated classes * * @return the unique field name */ public static String getNextUniqueFieldName() { return String.format("field_%d", objectCounter++); } /** * This will generate a class which is empty. Useful for applying annotations to it * * @return an empty CtClass */ public static CtClass generateEmptyClass() { // Create the class, so we can start adding the new facade fields to it. final ClassPool pool = ClassPool.getDefault(); return pool.makeClass(getNextUniqueClassName()); } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/prefix/PrefixedJCommander.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.prefix; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import org.apache.commons.lang3.NotImplementedException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.IDefaultProvider; import com.beust.jcommander.JCommander; /** * This special JCommander instance does two things: 1. It initializes special Prefixed argument * objects (via addPrefixedObject) and adds them to the JCommanders object list before parsing 2. It * overrides the sub commands that are added to make them instances of PrefixedJCommander 3. It * lazily initializes child commands using an Initializer interface. */ public class PrefixedJCommander extends JCommander { private static Logger LOGGER = LoggerFactory.getLogger(PrefixedJCommander.class); // Allows us to override the commanders list that's being stored // in our parent class. private Map childCommanders; // A list of objects to add to the translator before feeding // into the internal JCommander object. private List prefixedObjects = null; private boolean validate = true; private boolean allowUnknown = false; private IDefaultProvider defaultProvider = null; // The map used to translate the variables back and forth. private JCommanderTranslationMap translationMap = null; // The initializer is used before parse to allow the user // to add additional commands/objects to this commander before // it is used private PrefixedJCommanderInitializer initializer = null; private boolean initialized = false; /** * Creates a new instance of this commander. */ @SuppressWarnings("unchecked") public PrefixedJCommander() { super(); Field commandsField; try { // HP Fortify "Access Specifier Manipulation" // This field is being modified by trusted code, // in a way that is not influenced by user input commandsField = JCommander.class.getDeclaredField("commands"); commandsField.setAccessible(true); childCommanders = (Map) commandsField.get(this); } catch (NoSuchFieldException | IllegalArgumentException | IllegalAccessException e) { // This is a programmer error, and will only happen if another // version of JCommander is being used. // newer versions of JCommander have renamed the member variables, try the old names try { commandsField = JCommander.class.getDeclaredField("m_commands"); commandsField.setAccessible(true); childCommanders = (Map) commandsField.get(this); } catch (final NoSuchFieldException | IllegalArgumentException | IllegalAccessException e2) { LOGGER.error("Another version of JCommander is being used", e2); throw new RuntimeException(e); } } } /** * This function will translate the given prefixed objects into the object list before parsing. * This is so that their descriptions will be picked up. */ private void initialize() { if (!initialized) { if (translationMap != null) { throw new RuntimeException("This PrefixedJCommander has already been used."); } // Initialize if (initializer != null) { initializer.initialize(this); } final JCommanderPrefixTranslator translator = new JCommanderPrefixTranslator(); // And these are the input to the translator! if (prefixedObjects != null) { for (final Object obj : prefixedObjects) { translator.addObject(obj); } } translationMap = translator.translate(); translationMap.createFacadeObjects(); for (final Object obj : translationMap.getObjects()) { addObject(obj); } // Copy default parameters over for parsing. translationMap.transformToFacade(); initialized = true; } } @Override public void addCommand(final String name, final Object object, final String... aliases) { super.addCommand(name, new Object(), aliases); // Super annoying. Can't control creation of JCommander objects, so // just replace it. final Iterator> iter = childCommanders.entrySet().iterator(); Entry last = null; while (iter.hasNext()) { last = iter.next(); } final PrefixedJCommander comm = new PrefixedJCommander(); comm.setProgramName(name, aliases); comm.setDefaultProvider(defaultProvider); comm.setAcceptUnknownOptions(allowUnknown); comm.setValidate(validate); if (object != null) { comm.addPrefixedObject(object); } if (last != null) { childCommanders.put(last.getKey(), comm); } } @Override public void createDescriptions() { // because child commanders are called from a private method parseValues() L796 of JCommander // v1.78, children don't get initialized without this override initialize(); super.createDescriptions(); } @Override public void parse(final String... args) { initialize(); if (validate) { super.parse(args); } else { super.parseWithoutValidation(args); } complete(); } private void complete() { if (initialized) { for (JCommander child : childCommanders.values()) { if (child instanceof PrefixedJCommander) { ((PrefixedJCommander) child).complete(); } } translationMap.transformToOriginal(); translationMap = null; initialized = false; } } /** * We replace the parseWithoutValidation() command with the setValidate option that we apply to * all children. This is because of bug #267 in JCommander. */ @Override public void parseWithoutValidation(final String... args) { throw new NotImplementedException("Do not use this method. Use setValidate()"); } @Override public void setDefaultProvider(final IDefaultProvider defaultProvider) { super.setDefaultProvider(defaultProvider); this.defaultProvider = defaultProvider; } @Override public void setAcceptUnknownOptions(final boolean allowUnknown) { super.setAcceptUnknownOptions(allowUnknown); this.allowUnknown = allowUnknown; } public void setValidate(final boolean validate) { this.validate = validate; } public List getPrefixedObjects() { return prefixedObjects; } public void addPrefixedObject(final Object object) { if (prefixedObjects == null) { prefixedObjects = new ArrayList<>(); } prefixedObjects.add(object); } public JCommanderTranslationMap getTranslationMap() { return translationMap; } public PrefixedJCommanderInitializer getInitializer() { return initializer; } public void setInitializer(final PrefixedJCommanderInitializer initializer) { this.initializer = initializer; } public interface PrefixedJCommanderInitializer { void initialize(PrefixedJCommander commander); } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/prefix/TranslationEntry.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.prefix; import java.lang.reflect.AnnotatedElement; import java.lang.reflect.Method; import java.util.Locale; import java.util.ResourceBundle; import org.locationtech.geowave.core.cli.Constants; import org.locationtech.geowave.core.cli.utils.JCommanderParameterUtils; import com.beust.jcommander.Parameterized; /** * This helper class is just a tuple that allows us to keep track of the parameters, their * translated field names, and the original object they map to. */ public class TranslationEntry { private final Parameterized param; private final Object object; private final String prefix; private final String[] prefixedNames; private final AnnotatedElement member; protected TranslationEntry( final Parameterized param, final Object object, final String prefix, final AnnotatedElement member) { this.param = param; this.object = object; this.prefix = prefix; this.member = member; prefixedNames = addPrefixToNames(); } public Parameterized getParam() { return param; } public Object getObject() { return object; } public String getPrefix() { return prefix; } public boolean isMethod() { return member instanceof Method; } public AnnotatedElement getMember() { return member; } public String[] getPrefixedNames() { return prefixedNames; } /** * Return the description for a field's parameter definition. If the parameter has a description * key specified, the description will be looked up in the resource bundle. If no description is * defined, the default CLI-specified description will be returned. * * @return the description */ public String getDescription() { String description = null; // check to see if a description key is specified. If so, perform a // lookup in the GeoWave labels properties for a description to use // in place of the command line instance if ((getParam().getParameter() != null) && (getParam().getParameter().descriptionKey() != null)) { String descriptionKey = getParam().getParameter().descriptionKey(); if ((descriptionKey != null) && !"".equals(descriptionKey.trim())) { descriptionKey = descriptionKey.trim(); description = getDescriptionFromResourceBundle(descriptionKey); } } else if (getParam().isDynamicParameter() && (getParam().getWrappedParameter() != null) && (getParam().getWrappedParameter().getDynamicParameter() != null)) { String descriptionKey = getParam().getWrappedParameter().getDynamicParameter().descriptionKey(); if ((descriptionKey != null) && !"".equals(descriptionKey.trim())) { descriptionKey = descriptionKey.trim(); description = getDescriptionFromResourceBundle(descriptionKey); } } // if no description is set from GeoWave labels properties, use the one // set from the field parameter annotation definition if ((description == null) || "".equals(description.trim())) { if ((getParam().getParameter() != null) && (getParam().getParameter().description() != null)) { description = getParam().getParameter().description(); } else if (getParam().isDynamicParameter()) { description = getParam().getWrappedParameter().getDynamicParameter().description(); } } return description == null ? "" : description; } /** * If a parameter has a defined description key, this method will lookup the description for the * specified key. * * @param descriptionKey Key to lookup for description * @return the description */ private String getDescriptionFromResourceBundle(final String descriptionKey) { String description = ""; final String bundleName = Constants.GEOWAVE_DESCRIPTIONS_BUNDLE_NAME; final Locale locale = Locale.getDefault(); final String defaultResourcePath = bundleName + ".properties"; final String localeResourcePath = bundleName + "_" + locale.toString() + ".properties"; if ((this.getClass().getResource("/" + defaultResourcePath) != null) || (this.getClass().getResource("/" + localeResourcePath) != null)) { // associate the default locale to the base properties, rather than // the standard resource bundle requiring a separate base // properties (GeoWaveLabels.properties) and a // default-locale-specific properties // (GeoWaveLabels_en_US.properties) final ResourceBundle resourceBundle = ResourceBundle.getBundle( bundleName, locale, ResourceBundle.Control.getNoFallbackControl( ResourceBundle.Control.FORMAT_PROPERTIES)); if (resourceBundle != null) { if (resourceBundle.containsKey(descriptionKey)) { description = resourceBundle.getString(descriptionKey); } } } return description; } /** * Specifies if this field is for a password. * * @return {@code true} if the field is a password */ public boolean isPassword() { boolean isPassword = false; // check if a converter was specified. If so, if the converter is a // GeoWaveBaseConverter instance, check the isPassword value of the // converter isPassword = isPassword || JCommanderParameterUtils.isPassword(getParam().getParameter()); isPassword = isPassword || JCommanderParameterUtils.isPassword(getParam().getWrappedParameter().getParameter()); return isPassword; } /** * Specifies if this field is hidden. * * @return {@code true} if the field is hidden */ public boolean isHidden() { if (getParam().getParameter() != null) { return getParam().getParameter().hidden(); } else if (getParam().getWrappedParameter() != null) { return getParam().getWrappedParameter().hidden(); } return false; } /** * Specifies if this field uses a string converter. * * @return {@code true} if the uses a string converter. */ public boolean hasStringConverter() { if (getParam().getParameter() != null) { return getParam().getParameter().converter() != null; } return false; } /** * Specifies if this field is required. * * @return {@code true} if this field is required */ public boolean isRequired() { boolean isRequired = false; isRequired = isRequired || JCommanderParameterUtils.isRequired(getParam().getParameter()); isRequired = isRequired || JCommanderParameterUtils.isRequired(getParam().getWrappedParameter().getParameter()); return isRequired; } /** * Whether the given object has a value specified. If the current value is non null, then return * true. * * @return {@code true} if this field has a value */ public boolean hasValue() { final Object value = getParam().get(getObject()); return value != null; } /** * Property name is used to write to properties files, but also to report option names to * Geoserver. * * @return the property name */ public String getAsPropertyName() { return trimNonAlphabetic(getLongestParam(getPrefixedNames())); } /** * This function will take the configured prefix (a member variable) and add it to all the names * list. * * @return the list of new names */ private String[] addPrefixToNames() { String[] names = null; if (param.getParameter() != null) { names = param.getParameter().names(); } else { names = param.getWrappedParameter().names(); } final String[] newNames = new String[names.length]; for (int i = 0; i < names.length; i++) { String item = names[i]; final char subPrefix = item.charAt(0); int j = 0; while ((j < item.length()) && (item.charAt(j) == subPrefix)) { j++; } final String prePrefix = item.substring(0, j); item = item.substring(j); newNames[i] = String.format( "%s%s%s%s", prePrefix, prefix, JCommanderTranslationMap.PREFIX_SEPARATOR, item); } return newNames; } /** * For all the entries in names(), look for the largest one. * * @param names the names to check * @return the longest name */ private String getLongestParam(final String[] names) { String longest = null; for (final String name : names) { if ((longest == null) || (name.length() > longest.length())) { longest = name; } } return longest; } /** * Remove any non alphabetic character from the beginning of the string. For example, '--version' * will become 'version'. * * @param str the string to trim * @return the trimmed string */ private String trimNonAlphabetic(final String str) { int i = 0; for (i = 0; i < str.length(); i++) { if (Character.isAlphabetic(str.charAt(i))) { break; } } return str.substring(i); } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/spi/CLIOperationProviderSpi.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.spi; public interface CLIOperationProviderSpi { public Class[] getOperations(); } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/spi/DefaultConfigProviderSpi.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.spi; import java.util.Properties; public interface DefaultConfigProviderSpi { /** * Returns the default configurations form the project * * @return default configuration */ public Properties getDefaultConfig(); } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/spi/OperationEntry.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.spi; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Locale; import java.util.Map; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.Command; import org.locationtech.geowave.core.cli.api.Operation; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * An operation entry represents an Operation Parsed from SPI, which is then subsequently added to * an OperationExecutor for execution. */ public final class OperationEntry { private static Logger LOGGER = LoggerFactory.getLogger(OperationEntry.class); private final String[] operationNames; private final Class operationClass; private final Class parentOperationClass; private final Map childrenMap; private final List children; private final boolean command; private final boolean topLevel; public OperationEntry(final Class operationClass) { this.operationClass = operationClass; final GeowaveOperation operation = this.operationClass.getAnnotation(GeowaveOperation.class); if (operation == null) { throw new RuntimeException( "Expected Operation class to use GeowaveOperation annotation: " + this.operationClass.getCanonicalName()); } operationNames = operation.name(); parentOperationClass = operation.parentOperation(); command = Command.class.isAssignableFrom(operationClass); topLevel = (parentOperationClass == null) || (parentOperationClass == Object.class); childrenMap = new HashMap<>(); children = new LinkedList<>(); } public Class getParentOperationClass() { return parentOperationClass; } public String[] getOperationNames() { return operationNames; } public Class getOperationClass() { return operationClass; } public Collection getChildren() { return Collections.unmodifiableCollection(children); } public void addChild(final OperationEntry child) { for (final String name : child.getOperationNames()) { if (childrenMap.containsKey(name.toLowerCase(Locale.ENGLISH))) { throw new RuntimeException( "Duplicate operation name: " + name + " for " + getOperationClass().getName()); } childrenMap.put(name.toLowerCase(Locale.ENGLISH), child); } children.add(child); } public OperationEntry getChild(final String name) { return childrenMap.get(name); } public boolean isCommand() { return command; } public boolean isTopLevel() { return topLevel; } public Operation createInstance() { try { return (Operation) operationClass.newInstance(); } catch (InstantiationException | IllegalAccessException e) { LOGGER.error("Unable to create new instance", e); return null; } } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/spi/OperationRegistry.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.spi; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.ServiceLoader; import org.locationtech.geowave.core.cli.api.Operation; /** * This implementation uses the SPI to load all Operations across the program, including those * exported by plugins. It parses the entries and places them into a cache. */ public class OperationRegistry { private Map, OperationEntry> operationMapByClass = null; /** Singleton pattern allows us to create a version that can be used by the whole application. */ private static class OperationRegistryHolder { public static final OperationRegistry instance = new OperationRegistry(); } /** But also allow the user to create their own if they want it to be sanitized. */ public OperationRegistry() { init(); } public static OperationRegistry getInstance() { return OperationRegistryHolder.instance; } public OperationRegistry(final List entries) { operationMapByClass = new HashMap<>(); for (final OperationEntry entry : entries) { operationMapByClass.put(entry.getOperationClass(), entry); } } private synchronized void init() { if (operationMapByClass == null) { operationMapByClass = new HashMap<>(); // Load SPI elements final Iterator operationProviders = ServiceLoader.load(CLIOperationProviderSpi.class).iterator(); while (operationProviders.hasNext()) { final CLIOperationProviderSpi operationProvider = operationProviders.next(); for (final Class clz : operationProvider.getOperations()) { if (Operation.class.isAssignableFrom(clz)) { final OperationEntry entry = new OperationEntry(clz); operationMapByClass.put(clz, entry); } else { throw new RuntimeException( "CLI operations must be assignable from Operation.class: " + clz.getCanonicalName()); } } } // Build a hierarchy. for (final OperationEntry entry : operationMapByClass.values()) { if (!entry.isTopLevel()) { final OperationEntry parentEntry = operationMapByClass.get(entry.getParentOperationClass()); if (parentEntry == null) { throw new RuntimeException( "Cannot find parent entry for " + entry.getOperationClass().getName()); } if (parentEntry.isCommand()) { throw new RuntimeException( "Cannot have a command be a parent: " + entry.getClass().getCanonicalName()); } parentEntry.addChild(entry); } } } } /** * @return a collection of all entries to allow for iteration and exploration by the caller */ public Collection getAllOperations() { return Collections.unmodifiableCollection(operationMapByClass.values()); } /** * Get the exported service entry by class name. * * @param operationClass * @return the operation entry, if it exists */ public OperationEntry getOperation(final Class operationClass) { return operationMapByClass.get(operationClass); } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/utils/ConsoleTablePrinter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.utils; import java.io.IOException; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import org.apache.commons.lang3.StringUtils; import com.beust.jcommander.internal.Console; /** * A reusable generic facility for displaying console results */ public class ConsoleTablePrinter { private static final int PADDING = 2; private final int minColumnSize; private final int resultsPerPage; private final Console console; /** * CTOR using default values */ public ConsoleTablePrinter(final Console console) { this(5, 24, console); } /** * CTOR * * @param minColumnSize Fixed character width * @param resultsPerPage When exceeded, will prompt for keyboard input to paginate */ public ConsoleTablePrinter( final int minColumnSize, final int resultsPerPage, final Console console) { this.minColumnSize = minColumnSize; this.resultsPerPage = resultsPerPage; this.console = console; } public void println(final String line) { console.println(line); } /** * Display output to the console. Column widths will be calculated for the each page. * * @param headers The label which appears at the top of each vertical column * @param rowIter An iterator of rows to display */ public void print(final List headers, final Iterator> rowIter) { List> rows = new LinkedList<>(); while (rowIter.hasNext()) { rows.clear(); while (rowIter.hasNext() && rows.size() < resultsPerPage) { rows.add(rowIter.next()); } int[] columnWidths = getColumnWidths(headers, rows); printHeader(columnWidths, headers); for (int i = 0; i < rows.size(); i++) { printRow(rows.get(i), columnWidths); } printFooter(columnWidths); if (rowIter.hasNext()) { console.println("Press for more results..."); try { System.in.read(); } catch (final IOException ignore) { break; } } } } /** * Display output to the console. Column widths will be calculated for the whole table. * * @param headers The label which appears at the top of each vertical column * @param rows A 2D matrix of values to display */ public void print(final List headers, final List> rows) { int[] columnWidths = getColumnWidths(headers, rows); printHeader(columnWidths, headers); for (int i = 0; i < rows.size(); i++) { if (i > 0 && i % resultsPerPage == 0) { console.println("Press for more results..."); try { System.in.read(); } catch (final IOException ignore) { break; } } printRow(rows.get(i), columnWidths); } printFooter(columnWidths); } private void printHeader(final int[] columnWidths, final List headers) { final StringBuilder line = new StringBuilder("+"); final StringBuilder text = new StringBuilder("|"); for (int i = 0; i < columnWidths.length; i++) { for (int j = 0; j < columnWidths[i]; j++) { line.append("-"); } line.append("+"); final String columnName = headers.get(i); text.append(" ").append(columnName); for (int j = columnName.length() + 1; j < columnWidths[i]; j++) { text.append(" "); } text.append("|"); } console.println(line.toString()); console.println(text.toString()); console.println(line.toString()); } private void printRow(final List result, final int[] columnWidths) { final StringBuilder text = new StringBuilder("|"); for (int i = 0; i < columnWidths.length; i++) { final Object value = result.get(i); final String valStr = value == null ? "" : value.toString(); text.append(" ").append(valStr); for (int j = valStr.length() + 1; j < columnWidths[i]; j++) { text.append(" "); } text.append("|"); } console.println(text.toString()); } private void printFooter(final int[] columnWidths) { final StringBuilder line = new StringBuilder("+"); for (int i = 0; i < columnWidths.length; i++) { for (int j = 0; j < columnWidths[i]; j++) { line.append("-"); } line.append("+"); } console.println(line.toString()); } /** * The width of each column is the greatest of (column-label-length, * the-longest-value-in-the-column, minColumnSize) * * @param headers * @param rows * @return */ private int[] getColumnWidths(final List headers, final List> rows) { int[] columnWidths = new int[headers.size()]; // Evaluate the lengths of the column headers for (int i = 0; i < columnWidths.length; i++) { String header = StringUtils.trimToEmpty(headers.get(i)); columnWidths[i] = Math.max(minColumnSize, header.length() + PADDING); } // Check each value. If the length of any single value is > current length of that // column, replace the current column length with the new max value for (List row : rows) { for (int i = 0; i < row.size(); i++) { Object val = row.get(i) == null ? "" : row.get(i); String value = StringUtils.trimToEmpty(String.valueOf(val)); columnWidths[i] = Math.max(columnWidths[i], value.length() + PADDING); } } return columnWidths; } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/utils/FileUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ /** */ package org.locationtech.geowave.core.cli.utils; import java.io.File; import java.io.FileNotFoundException; import java.util.Scanner; import com.beust.jcommander.ParameterException; /** Common file utilities, for performing common operations */ public class FileUtils { /** * Method to format file paths, similar to how command-line substitutions will function. For * example, we want to substitute '~' for a user's home directory, or environment variables * * @param filePath the file path to format * @return the formatted file path */ public static String formatFilePath(String filePath) { if (filePath != null) { if (filePath.indexOf("~") != -1) { filePath = filePath.replace("~", System.getProperty("user.home", "~")); } if (filePath.indexOf("$") != -1) { int startIndex = 0; while ((startIndex != -1) && (filePath.indexOf("$", startIndex) != -1)) { final String variable = getVariable(filePath.substring(startIndex)); final String resolvedValue = resolveVariableValue(variable); // if variable was not resolved to a system property, no // need to perform string replace if (!variable.equals(resolvedValue)) { filePath = filePath.replace(variable, resolvedValue); } startIndex = filePath.indexOf("$", (startIndex + 1)); } } } return filePath; } /** * If an environment variable, or something resembling one, is detected - i.e. starting with '$', * try to resolve it's actual value for resolving a path * * @param variable the string to check * @return the variable name */ private static String getVariable(final String variable) { final StringBuilder sb = new StringBuilder(); char nextChar; for (int index = 0; index < variable.length(); index++) { nextChar = variable.charAt(index); if ((nextChar == '$') || Character.isLetterOrDigit(nextChar) || (nextChar != File.separatorChar)) { sb.append(nextChar); } else { break; } } return sb.toString(); } private static String resolveVariableValue(final String variable) { if (System.getenv().containsKey(variable)) { return System.getenv(variable); } else if (System.getProperties().containsKey(variable)) { return System.getProperty(variable); } return variable; } /** * Reads the content of a file. * * @param inputFile the file to read * @return the contents of the file */ public static String readFileContent(final File inputFile) throws Exception { Scanner scanner = null; try { scanner = new Scanner(inputFile, "UTF-8"); return scanner.nextLine(); } catch (final FileNotFoundException e) { throw new ParameterException(e); } finally { if (scanner != null) { scanner.close(); } } } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/utils/FirstElementListComparator.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.utils; import java.io.Serializable; import java.util.Comparator; import java.util.List; /** * Performs a StringValue comparison of only the first element of equal-sized Lists of Objects, and * trivial sorting rules for lists. */ public class FirstElementListComparator implements Comparator>, Serializable { private static final long serialVersionUID = 1L; @Override public int compare(List listThis, List listOther) { // Re-factored to this awkward structure because of Spot Bugs if (listThis == null) { if (listOther == null) { return 0; // Consider both null as "equal" } else { return -1; // Null sorts ahead of non-null } } else if (listOther == null) { return 1; // Null sorts ahead of non-null } // At this point, neither list can be null if (listThis.size() != listOther.size()) { return listThis.size() - listOther.size(); // shorter list ahead of longer list } else { // lists are equal length if (listThis.size() > 0) { String strThis = String.valueOf(listThis.get(0) == null ? "" : listThis.get(0)); String strOther = String.valueOf(listOther.get(0) == null ? "" : listOther.get(0)); return strThis.compareTo(strOther); } else { // both lists are length zero return 0; } } } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/utils/JCommanderParameterUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ /** */ package org.locationtech.geowave.core.cli.utils; import java.lang.reflect.Constructor; import org.locationtech.geowave.core.cli.converters.GeoWaveBaseConverter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.IStringConverter; import com.beust.jcommander.Parameter; /** */ public class JCommanderParameterUtils { private static Logger LOGGER = LoggerFactory.getLogger(JCommanderParameterUtils.class); public static boolean isPassword(final Parameter parameter) { boolean isPassword = false; if (parameter != null) { Class superClass = null; final Class> converterClass = parameter.converter(); if (converterClass != null) { superClass = converterClass.getSuperclass(); while ((superClass != null) && (superClass != GeoWaveBaseConverter.class)) { superClass = superClass.getSuperclass(); } } if ((superClass != null) && superClass.equals(GeoWaveBaseConverter.class)) { final GeoWaveBaseConverter converter = getParameterBaseConverter(parameter); if (converter != null) { isPassword = isPassword || converter.isPassword(); } } isPassword = isPassword || parameter.password(); } return isPassword; } public static boolean isRequired(final Parameter parameter) { boolean isRequired = false; if (parameter != null) { if ((parameter.converter() != null) && parameter.converter().getSuperclass().equals(GeoWaveBaseConverter.class)) { final GeoWaveBaseConverter converter = getParameterBaseConverter(parameter); if (converter != null) { isRequired = isRequired || converter.isRequired(); } } isRequired = isRequired || parameter.required(); } return isRequired; } private static GeoWaveBaseConverter getParameterBaseConverter(final Parameter parameter) { GeoWaveBaseConverter converter = null; try { final Constructor ctor = parameter.converter().getConstructor(String.class); if (ctor != null) { converter = (GeoWaveBaseConverter) ctor.newInstance(new Object[] {""}); } } catch (final Exception e) { LOGGER.error( "An error occurred getting converter from parameter: " + e.getLocalizedMessage(), e); } return converter; } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/utils/PropertiesUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.utils; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStreamReader; import java.io.Serializable; import java.math.BigDecimal; import java.math.BigInteger; import java.net.URI; import java.net.URL; import java.util.Map; import java.util.Properties; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings("serial") public class PropertiesUtils implements Serializable { /** * */ private static final long serialVersionUID = 1L; private static final Logger LOGGER = LoggerFactory.getLogger(PropertiesUtils.class); public static Properties fromFile(final String propertyFilePath) { return fromFile(new File(propertyFilePath)); } public static Properties fromFile(final File propsFile) { Properties properties = null; if ((propsFile != null) && propsFile.exists()) { properties = new Properties(); try { // HP Fortify "Improper Resource Shutdown or Release" false // positive // FileInputStream is closed automatically below as a result of // isr.close(); final InputStreamReader isr = new InputStreamReader(new FileInputStream(propsFile), "UTF-8"); if (isr != null) { properties.load(isr); isr.close(); } } catch (final FileNotFoundException fnfEx) { LOGGER.error( "Specified properties file was not found: [" + fnfEx.getLocalizedMessage() + "]", fnfEx); } catch (final IOException ioEx) { LOGGER.error( "Exception occurred loading specified properties file: [" + ioEx.getLocalizedMessage() + "]", ioEx); } } return properties; } /** * Interface for providing properties to the configuration object Allows for objects other than * Maps and Properties to be used as a source for settings */ public static interface Getter extends Serializable { /** * @param name Name of setting to lookup * @return Property value or NULL if it does not exist */ public Object get(String name); }; /** The interface to obtain property values */ private final Getter getter; /** * Constructs a properties map that wraps these properties * * @param properties Map of properties to wrap */ @SuppressWarnings({"rawtypes"}) public PropertiesUtils(final Map properties) { this(new Getter() { /** * */ private static final long serialVersionUID = 1L; @Override public Object get(final String name) { return properties.get(name); } }); } /** * Constructs a properties map that wraps these properties * * @param properties Map of properties to wrap */ public PropertiesUtils(final Properties properties) { this(new Getter() { /** * */ private static final long serialVersionUID = 1L; @Override public Object get(final String name) { return properties != null ? properties.get(name) : null; } }); } /** * Constructs a properties map that wraps these properties * * @param getter Getter interface to properties to map */ public PropertiesUtils(final Getter getter) { this.getter = getter; } /** * Returns if this property exists * * @param key Property key to lookup * @return True if this property key exists */ public boolean exists(final String key) { return this.get(key, Object.class) != null; } /** * Gets a value from the property map * * @param name Property name * @param req Is this property required? * @return Value for property */ private Object getPropertyValue(final String name, final boolean req) throws IllegalArgumentException { Object val = null; if (getter != null) { val = getter.get(name); // Treat empty strings as null if ((val != null) && (val instanceof String) && ((String) val).isEmpty()) { val = null; } // HP Fortify "Privacy Violation" false positive // The information in the Properties file is not private or // sensitive if ((val == null) && req) { throw new IllegalArgumentException("Missing required property: " + name); } } return val; } /** * Get a required value from the map - throws an IllegalArgumentException if the value does not * exist * * @param Data type for the return value * @param name Property name * @param clazz Class for type X * @return Value from the property map * @throws IllegalArgumentException Thrown if no value is found */ public final X get(final String name, final Class clazz) throws IllegalArgumentException { final Object val = getPropertyValue(name, true); return ValueConverter.convert(val, clazz); } /** * Get a required value from the map - returns the provided default value if the value is not * found * * @param Data type for the return value * @param name Property name * @param def Default value to return if the map does not include the value * @param clazz Class for type X * @return Value from the property map */ public final X get(final String name, final X def, final Class clazz) { final Object val = getPropertyValue(name, false); return (val == null) ? def : (X) ValueConverter.convert(val, clazz); } // ************************************************************************ // ************************************************************************ // ************************************************************************ // The following are all convience methods for get of various types // ************************************************************************ // ************************************************************************ // ************************************************************************ /** * Return the property value as a string * * @param name Property name * @return Property value converted to a string * @throws IllegalArgumentException */ public final String getString(final String name) throws IllegalArgumentException { return get(name, String.class); } /** * Return the property value as a string if it exists, otherwise return the default value * * @param name Property name * @param def Default value to return if the map does not include the value * @return Property value converted to a string */ public final String getString(final String name, final String def) { return get(name, def, String.class); } /** * Return the property value as an integer * * @param name Property name * @return Property value converted to an integer * @throws IllegalArgumentException */ public final Integer getInt(final String name) throws IllegalArgumentException { return get(name, Integer.class); } /** * Return the property value as an integer if it exists, otherwise return the default value * * @param name Property name * @param def Default value to return if the map does not include the value * @return Property value converted to an integer */ public final Integer getInt(final String name, final Integer def) { return get(name, def, Integer.class); } /** * Return the property value as a long * * @param name Property name * @return Property value converted to a long * @throws IllegalArgumentException */ public final Long getLong(final String name) throws IllegalArgumentException { return get(name, Long.class); } /** * Return the property value as a long if it exists, otherwise return the default value * * @param name Property name * @param def Default value to return if the map does not include the value * @return Property value converted to a long */ public final Long getLong(final String name, final Long def) { return get(name, def, Long.class); } /** * Return the property value as a float * * @param name Property name * @return Property value converted to a float * @throws IllegalArgumentException */ public final Float getFloat(final String name) throws IllegalArgumentException { return get(name, Float.class); } /** * Return the property value as a float if it exists, otherwise return the default value * * @param name Property name * @param def Default value to return if the map does not include the value * @return Property value converted to a float */ public final Float getFloat(final String name, final Float def) { return get(name, def, Float.class); } /** * Return the property value as a double * * @param name Property name * @return Property value converted to a double * @throws IllegalArgumentException */ public final Double getDouble(final String name) throws IllegalArgumentException { return get(name, Double.class); } /** * Return the property value as a double if it exists, otherwise return the default value * * @param name Property name * @param def Default value to return if the map does not include the value * @return Property value converted to a double */ public final Double getDouble(final String name, final Double def) { return get(name, def, Double.class); } /** * Return the property value as a BigInteger * * @param name Property name * @return Property value converted to a BigInteger * @throws IllegalArgumentException */ public final BigInteger getBigInteger(final String name) throws IllegalArgumentException { return get(name, BigInteger.class); } /** * Return the property value as a BigInteger if it exists, otherwise return the default value * * @param name Property name * @param def Default value to return if the map does not include the value * @return Property value converted to a big integer */ public final BigInteger getBigInteger(final String name, final BigInteger def) { return get(name, def, BigInteger.class); } /** * Return the property value as a BigDecimal * * @param name Property name * @return Property value converted to a big decimal * @throws IllegalArgumentException */ public final BigDecimal getBigDecimal(final String name) throws IllegalArgumentException { return get(name, BigDecimal.class); } /** * Return the property value as a BigDecimal if it exists, otherwise return the default value * * @param name Property name * @param def Default value to return if the map does not include the value * @return Property value converted to a big decimal */ public final BigDecimal getBigDecimal(final String name, final BigDecimal def) { return get(name, def, BigDecimal.class); } /** * Return the property value as a binary * * @param name Property name * @return Property value converted to binary * @throws IllegalArgumentException */ public final Byte getByte(final String name) throws IllegalArgumentException { return get(name, Byte.class); } /** * Return the property value as a binary if it exists, otherwise return the default value * * @param name Property name * @param def Default value to return if the map does not include the value * @return Property value converted to binary */ public final Byte getByte(final String name, final Byte def) { return get(name, def, Byte.class); } /** * Return the property value as a boolean * * @param name Property name * @return Property value converted to a boolean * @throws IllegalArgumentException */ public final Boolean getBoolean(final String name) throws IllegalArgumentException { return get(name, Boolean.class); } /** * Return the property value as a boolean if it exists, otherwise return the default value * * @param name Property name * @param def Default value to return if the map does not include the value * @return Property value converted to a boolean */ public final Boolean getBoolean(final String name, final Boolean def) { return get(name, def, Boolean.class); } /** * Return the property value as a URI * * @param name Property name * @return Property value converted to a URI * @throws IllegalArgumentException */ public final URI getURI(final String name) throws IllegalArgumentException { return get(name, URI.class); } /** * Return the property value as a URI if it exists, otherwise return the default value * * @param name Property name * @param def Default value to return if the map does not include the value * @return Property value converted to a URI */ public final URI getURI(final String name, final URI def) { return get(name, def, URI.class); } /** * Return the property value as a URL * * @param name Property name * @return Property value converted to a URL * @throws IllegalArgumentException */ public final URL getURL(final String name) throws IllegalArgumentException { return get(name, URL.class); } /** * Return the property value as a URL if it exists, otherwise return the default value * * @param name Property name * @param def Default value to return if the map does not include the value * @return Property value converted to a URL */ public final URL getURI(final String name, final URL def) { return get(name, def, URL.class); } /** * Return the property value as a string array * * @param name Property name * @return Property value converted to an array of strings * @throws IllegalArgumentException */ public final String[] getStringArray(final String name) throws IllegalArgumentException { return get(name, String[].class); } /** * Return the property value as a string array if it exists, otherwise return the default value * * @param name Property name * @param def Default value to return if the map does not include the value * @return Property value converted to an array of strings */ public final String[] getStringArray(final String name, final String[] def) { return get(name, def, String[].class); } /** * Return the property value as an integer array * * @param name Property name * @return Property value converted to an array of integers * @throws IllegalArgumentException */ public final Integer[] getIntArray(final String name) throws IllegalArgumentException { return get(name, Integer[].class); } /** * Return the property value as an integer array if it exists, otherwise return the default value * * @param name Property name * @param def Default value to return if the map does not include the value * @return Property value converted to an array of integers */ public final Integer[] getIntArray(final String name, final Integer[] def) { return get(name, def, Integer[].class); } /** * Return the property value as a long array * * @param name Property name * @return Property value converted to an array of long values * @throws IllegalArgumentException */ public final Long[] getLongArray(final String name) throws IllegalArgumentException { return get(name, Long[].class); } /** * Return the property value as a long array if it exists, otherwise return the default value * * @param name Property name * @param def Default value to return if the map does not include the value * @return Property value converted to an array of long values */ public final Long[] getLongArray(final String name, final Long[] def) { return get(name, def, Long[].class); } /** * Return the property value as a float array * * @param name Property name * @return Property value converted to an array of float values * @throws IllegalArgumentException */ public final Float[] getFloatArray(final String name) throws IllegalArgumentException { return get(name, Float[].class); } /** * Return the property value as a float array if it exists, otherwise return the default value * * @param name Property name * @param def Default value to return if the map does not include the value * @return Property value converted to an array of float values */ public final Float[] getFloatArray(final String name, final Float[] def) { return get(name, def, Float[].class); } /** * Return the property value as a double array * * @param name Property name * @return Property value converted to an array of double values * @throws IllegalArgumentException */ public final Double[] getDoubleArray(final String name) throws IllegalArgumentException { return get(name, Double[].class); } /** * Return the property value as a double array if it exists, otherwise return the default value * * @param name Property name * @param def Default value to return if the map does not include the value * @return Property value converted to an array of double values */ public final Double[] getDoubleArray(final String name, final Double[] def) { return get(name, def, Double[].class); } /** * Return the property value as a BigInteger array * * @param name Property name * @return Property value converted to an array of big integers * @throws IllegalArgumentException */ public final BigInteger[] getBigIntegerArray(final String name) throws IllegalArgumentException { return get(name, BigInteger[].class); } /** * Return the property value as a BigInteger array if it exists, otherwise return the default * value * * @param name Property name * @param def Default value to return if the map does not include the value * @return Property value converted to an array of big integers */ public final BigInteger[] getBigIntegerArray(final String name, final BigInteger[] def) { return get(name, def, BigInteger[].class); } /** * Return the property value as a BigDecimal array * * @param name Property name * @return Property value converted to an array of big decimals * @throws IllegalArgumentException */ public final BigDecimal[] getBigDecimalArray(final String name) throws IllegalArgumentException { return get(name, BigDecimal[].class); } /** * Return the property value as a BigDecimal array if it exists, otherwise return the default * value * * @param name Property name * @param def Default value to return if the map does not include the value * @return Property value converted to an array of big decimals */ public final BigDecimal[] getBigDecimalArray(final String name, final BigDecimal[] def) { return get(name, def, BigDecimal[].class); } /** * Return the property value as a URI array * * @param name Property name * @return Property value converted to an array of URI's * @throws IllegalArgumentException */ public final URI[] getURIArray(final String name) throws IllegalArgumentException { return get(name, URI[].class); } /** * Return the property value as a URI array if it exists, otherwise return the default value * * @param name Property name * @param def Default value to return if the map does not include the value * @return Property value converted to an array of URI's */ public final URI[] getURIArray(final String name, final URI[] def) { return get(name, def, URI[].class); } /** * Return the property value as a URI array * * @param name Property name * @return Property value converted to an array of URI's * @throws IllegalArgumentException */ public final URI[] getURLArray(final String name) throws IllegalArgumentException { return get(name, URI[].class); } /** * Return the property value as a URI array if it exists, otherwise return the default value * * @param name Property name * @param def Default value to return if the map does not include the value * @return Property value converted to an array of URI's */ public final URI[] getURLArray(final String name, final URI[] def) { return get(name, def, URI[].class); } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/utils/URLUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ /** */ package org.locationtech.geowave.core.cli.utils; import java.net.MalformedURLException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; /** Utility methods relating to URLs, particularly validation focused */ public class URLUtils { private static final String HTTP = "http"; private static final String HTTPS = "https"; private static String[] schemes = {HTTP, HTTPS}; public static String getUrl(String url) throws URISyntaxException, MalformedURLException { if (url != null) { if (isValidURL(url)) { return url; } final boolean valid = isValidScheme(url); if (!valid) { url = HTTP + "://" + url; } URI uri = new URI(url); if (uri.getScheme() == null) { uri = new URI(HTTP + "://" + url); } URL targetURL = uri.toURL(); if (targetURL.getPort() == -1) { targetURL = new URL( targetURL.getProtocol(), targetURL.getHost(), targetURL.getDefaultPort(), // HP Fortify "Path Traversal" False Positive // User input is not used at any point to determine the // file path. // The information is hard code in a single location and // accessible // though this method. targetURL.getFile()); } if (String.valueOf(targetURL.getPort()).endsWith("443")) { targetURL = new URL( HTTPS, targetURL.getHost(), targetURL.getPort(), // HP Fortify "Path Traversal" False Positive // User input is not used at any point to determine the // file path. // The information is hard code in a single location and // accessible // though this method. targetURL.getFile()); } return targetURL.toString(); } return url; } /** * Validate a URL to quickly check if it is in proper URL format * * @param url url to validate * @return true if valid, false otherwise */ private static boolean isValidURL(final String url) { URL targetURL = null; try { targetURL = new URL(url); } catch (final MalformedURLException e) { return false; } try { targetURL.toURI(); } catch (final URISyntaxException e) { return false; } return true; } private static boolean isValidScheme(final String url) { final int ix = url.indexOf("://"); if (ix == -1) { return false; } final String inputScheme = url.substring(0, ix); for (final String scheme : getSchemes()) { if (inputScheme.equalsIgnoreCase(scheme)) { return true; } } return false; } /** @return the schemes */ public static String[] getSchemes() { return schemes; } /** @param schemes the schemes to set */ public static void setSchemes(final String[] schemes) { URLUtils.schemes = schemes; } } ================================================ FILE: core/cli/src/main/java/org/locationtech/geowave/core/cli/utils/ValueConverter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ /** */ package org.locationtech.geowave.core.cli.utils; import org.apache.commons.beanutils.ConvertUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import net.sf.json.JSONArray; import net.sf.json.JSONObject; /** Used for general purpose value conversion via appache commons ConvertUtils */ public class ValueConverter { private static Logger LOGGER = LoggerFactory.getLogger(ValueConverter.class); /** Private constructor to prevent accidental instantiation */ private ValueConverter() {} /** * Convert value into the specified type * * @param Class to convert to * @param value Value to convert from * @param targetType Type to convert into * @return The converted value */ @SuppressWarnings("unchecked") public static X convert(final Object value, final Class targetType) { // HP Fortify "Improper Output Neutralization" false positive // What Fortify considers "user input" comes only // from users with OS-level access anyway LOGGER.trace("Attempting to convert " + value + " to class type " + targetType); if (value != null) { // if object is already in intended target type, no need to convert // it, just return as it is if (value.getClass() == targetType) { return (X) value; } if ((value.getClass() == JSONObject.class) || (value.getClass() == JSONArray.class)) { return (X) value; } } final String strValue = String.valueOf(value); final Object retval = ConvertUtils.convert(strValue, targetType); return (X) retval; } } ================================================ FILE: core/cli/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi ================================================ org.locationtech.geowave.core.cli.operations.TopLevelOperationProvider org.locationtech.geowave.core.cli.operations.config.ConfigOperationProvider org.locationtech.geowave.core.cli.operations.util.UtilOperationProvider ================================================ FILE: core/cli/src/test/java/org/locationtech/geowave/core/cli/VersionUtilsTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli; import static org.junit.Assert.assertEquals; import org.junit.Test; import com.beust.jcommander.JCommander; public class VersionUtilsTest { @Test public void testVersion() { final String version = null; // change this value when it gives a // version assertEquals( version, // change this value when it gives a version VersionUtils.getVersion(new JCommander().getConsole())); } } ================================================ FILE: core/cli/src/test/java/org/locationtech/geowave/core/cli/api/ServiceEnableCommandTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.api; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand.HttpMethod; public class ServiceEnableCommandTest { private class ServiceEnabledCommand_TESTING extends ServiceEnabledCommand { private final HttpMethod method; public ServiceEnabledCommand_TESTING(final HttpMethod method) { this.method = method; } @Override public void execute(final OperationParams params) throws Exception {} @Override public Object computeResults(final OperationParams params) throws Exception { return null; } @Override public HttpMethod getMethod() { return method; } } @Before public void setUp() throws Exception {} @After public void tearDown() throws Exception {} @Test public void defaultSuccessStatusIs200ForGET() { final ServiceEnabledCommand_TESTING classUnderTest = new ServiceEnabledCommand_TESTING(HttpMethod.GET); Assert.assertEquals(true, classUnderTest.successStatusIs200()); } @Test public void defaultSuccessStatusIs201ForPOST() { final ServiceEnabledCommand_TESTING classUnderTest = new ServiceEnabledCommand_TESTING(HttpMethod.POST); Assert.assertEquals(false, classUnderTest.successStatusIs200()); } } ================================================ FILE: core/cli/src/test/java/org/locationtech/geowave/core/cli/operations/ExplainCommandTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.operations; import static org.junit.Assert.assertEquals; import org.junit.Test; import org.locationtech.geowave.core.cli.parser.CommandLineOperationParams; import org.locationtech.geowave.core.cli.parser.OperationParser; import org.locationtech.geowave.core.cli.spi.OperationRegistry; public class ExplainCommandTest { @Test public void testPrepare() { final String[] args = {"explain"}; final OperationRegistry registry = OperationRegistry.getInstance(); final OperationParser parser = new OperationParser(registry); final CommandLineOperationParams params = parser.parse(GeoWaveTopLevelSection.class, args); final ExplainCommand expcommand = new ExplainCommand(); expcommand.prepare(params); assertEquals(false, params.isValidate()); assertEquals(true, params.isAllowUnknown()); } } ================================================ FILE: core/cli/src/test/java/org/locationtech/geowave/core/cli/operations/HelpCommandTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.operations; import static org.junit.Assert.assertEquals; import org.junit.Test; import org.locationtech.geowave.core.cli.parser.CommandLineOperationParams; import org.locationtech.geowave.core.cli.parser.OperationParser; import org.locationtech.geowave.core.cli.spi.OperationRegistry; public class HelpCommandTest { @Test public void testPrepare() { final String[] args = {"help"}; final OperationRegistry registry = OperationRegistry.getInstance(); final OperationParser parser = new OperationParser(registry); final CommandLineOperationParams params = parser.parse(GeoWaveTopLevelSection.class, args); final HelpCommand helpcommand = new HelpCommand(); helpcommand.prepare(params); assertEquals(false, params.isValidate()); assertEquals(true, params.isAllowUnknown()); } } ================================================ FILE: core/cli/src/test/java/org/locationtech/geowave/core/cli/operations/config/SetCommandTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.operations.config; import static org.junit.Assert.assertEquals; import java.io.File; import java.util.Properties; import org.junit.Test; import org.locationtech.geowave.core.cli.operations.GeoWaveTopLevelSection; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.cli.parser.CommandLineOperationParams; import org.locationtech.geowave.core.cli.parser.OperationParser; import org.locationtech.geowave.core.cli.spi.OperationRegistry; public class SetCommandTest { @Test public void testExecute() { final String[] args = {"config", "set", "name", "value"}; final OperationRegistry registry = OperationRegistry.getInstance(); final OperationParser parser = new OperationParser(registry); final CommandLineOperationParams params = parser.parse(GeoWaveTopLevelSection.class, args); final SetCommand setcommand = new SetCommand(); final String name = "name"; final String value = "value"; setcommand.setParameters(name, value); setcommand.prepare(params); setcommand.execute(params); final File f = (File) params.getContext().get(ConfigOptions.PROPERTIES_FILE_CONTEXT); final Properties p = ConfigOptions.loadProperties(f); assertEquals(value, p.getProperty(name)); } } ================================================ FILE: core/cli/src/test/java/org/locationtech/geowave/core/cli/operations/config/options/ConfigOptionsTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.operations.config.options; import static org.junit.Assert.assertEquals; import java.io.File; import java.util.Properties; import org.junit.Test; import com.beust.jcommander.JCommander; public class ConfigOptionsTest { @Test public void testWriteProperty() { final String parent = String.format("%s", System.getProperty("user.home")); final File path = new File(parent); final File configfile = ConfigOptions.formatConfigFile("0", path); final Properties prop = new Properties(); final String key = "key"; final String value = "value"; prop.setProperty(key, value); final boolean success = ConfigOptions.writeProperties(configfile, prop, new JCommander().getConsole()); if (success) { final Properties loadprop = ConfigOptions.loadProperties(configfile); assertEquals(value, loadprop.getProperty(key)); } } } ================================================ FILE: core/cli/src/test/java/org/locationtech/geowave/core/cli/operations/config/security/SecurityUtilsTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ /** */ package org.locationtech.geowave.core.cli.operations.config.security; import static org.junit.Assert.assertEquals; import java.io.File; import org.junit.Test; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.cli.operations.config.security.utils.SecurityUtils; import com.beust.jcommander.JCommander; import com.beust.jcommander.internal.Console; /** Unit test cases for encrypting and decrypting values */ public class SecurityUtilsTest { @Test public void testEncryptionDecryption() throws Exception { final String rawInput = "geowave"; Console console = new JCommander().getConsole(); final File tokenFile = SecurityUtils.getFormattedTokenKeyFileForConfig( ConfigOptions.getDefaultPropertyFile(console)); if ((tokenFile != null) && tokenFile.exists()) { final String encryptedValue = SecurityUtils.encryptAndHexEncodeValue(rawInput, tokenFile.getCanonicalPath(), console); final String decryptedValue = SecurityUtils.decryptHexEncodedValue( encryptedValue, tokenFile.getCanonicalPath(), console); assertEquals(decryptedValue, rawInput); } } } ================================================ FILE: core/cli/src/test/java/org/locationtech/geowave/core/cli/parser/OperationParserTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.parser; import java.util.ArrayList; import java.util.List; import org.junit.Assert; import org.junit.Test; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.Command; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.spi.OperationEntry; import org.locationtech.geowave.core.cli.spi.OperationRegistry; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParametersDelegate; public class OperationParserTest { @Test public void testParseTopLevel() { final OperationEntry op1Entry = new OperationEntry(Op1.class); final OperationEntry op2Entry = new OperationEntry(Op2.class); op1Entry.addChild(op2Entry); final List entries = new ArrayList<>(); entries.add(op1Entry); entries.add(op2Entry); final OperationParser parser = new OperationParser(new OperationRegistry(entries)); final CommandLineOperationParams params = parser.parse(Op1.class, new String[] {"op", "--username", "user", "--password", "blah"}); final Op2 op2 = (Op2) params.getOperationMap().get("op"); Assert.assertEquals("blah", op2.args.passWord); Assert.assertEquals("user", op2.args.userName); } @Test public void testParseArgs() { final OperationParser parser = new OperationParser(); final Args args = new Args(); parser.addAdditionalObject(args); parser.parse(new String[] {"--username", "user", "--password", "blah"}); Assert.assertEquals("blah", args.passWord); Assert.assertEquals("user", args.userName); } @Test public void testParseOperation() { final OperationEntry op1Entry = new OperationEntry(Op1.class); final OperationEntry op2Entry = new OperationEntry(Op2.class); op1Entry.addChild(op2Entry); final List entries = new ArrayList<>(); entries.add(op1Entry); entries.add(op2Entry); final OperationParser parser = new OperationParser(new OperationRegistry(entries)); final Op2 op2 = new Op2(); parser.parse(op2, new String[] {"--username", "user", "--password", "blah"}); Assert.assertEquals("blah", op2.args.passWord); Assert.assertEquals("user", op2.args.userName); } public static class Args { @Parameter(names = "--username") private String userName; @Parameter(names = "--password") private String passWord; } @GeowaveOperation(name = "toplevel") public static class Op1 extends DefaultOperation { } @GeowaveOperation(name = "op", parentOperation = Op1.class) public static class Op2 extends DefaultOperation implements Command { @ParametersDelegate private final Args args = new Args(); @Override public void execute(final OperationParams params) throws Exception {} } } ================================================ FILE: core/cli/src/test/java/org/locationtech/geowave/core/cli/prefix/JCommanderPrefixTranslatorTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.prefix; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.junit.Assert; import org.junit.Test; import org.locationtech.geowave.core.cli.annotations.PrefixParameter; import com.beust.jcommander.JCommander; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParametersDelegate; public class JCommanderPrefixTranslatorTest { private JCommander prepareCommander(final JCommanderTranslationMap map) { final JCommander commander = new JCommander(); map.createFacadeObjects(); for (final Object obj : map.getObjects()) { commander.addObject(obj); } return commander; } @Test public void testNullDelegate() { final JCommanderPrefixTranslator translator = new JCommanderPrefixTranslator(); translator.addObject(new NullDelegate()); final JCommander commander = prepareCommander(translator.translate()); commander.parse(); } @Test public void testMapDelegatesPrefix() { final JCommanderPrefixTranslator translator = new JCommanderPrefixTranslator(); final Arguments args = new Arguments(); args.argChildren.put("abc", new ArgumentChildren()); args.argChildren.put("def", new ArgumentChildren()); translator.addObject(args); final JCommanderTranslationMap map = translator.translate(); final JCommander commander = prepareCommander(map); commander.parse("--abc.arg", "5", "--def.arg", "blah"); map.transformToOriginal(); Assert.assertEquals("5", args.argChildren.get("abc").arg); Assert.assertEquals("blah", args.argChildren.get("def").arg); } @Test public void testCollectionDelegatesPrefix() { final JCommanderPrefixTranslator translator = new JCommanderPrefixTranslator(); final ArgumentsCollection args = new ArgumentsCollection(); args.argChildren.add(new ArgumentChildren()); args.argChildren.add(new ArgumentChildrenOther()); translator.addObject(args); final JCommanderTranslationMap map = translator.translate(); final JCommander commander = prepareCommander(map); commander.parse("--arg", "5", "--arg2", "blah"); map.transformToOriginal(); Assert.assertEquals("5", ((ArgumentChildren) args.argChildren.get(0)).arg); Assert.assertEquals("blah", ((ArgumentChildrenOther) args.argChildren.get(1)).arg2); } @Test public void testPrefixParameter() { final JCommanderPrefixTranslator translator = new JCommanderPrefixTranslator(); final PrefixedArguments args = new PrefixedArguments(); translator.addObject(args); final JCommanderTranslationMap map = translator.translate(); final JCommander commander = prepareCommander(map); commander.parse("--abc.arg", "5", "--arg", "blah"); map.transformToOriginal(); Assert.assertEquals("5", args.child.arg); Assert.assertEquals("blah", args.blah); } public static class PrefixedArguments { @ParametersDelegate @PrefixParameter(prefix = "abc") private final ArgumentChildren child = new ArgumentChildren(); @Parameter(names = "--arg") private String blah; } public static class NullDelegate { @ParametersDelegate private final ArgumentChildren value = null; } public static class ArgumentsCollection { @ParametersDelegate private final List argChildren = new ArrayList<>(); } public static class Arguments { @ParametersDelegate private final Map argChildren = new HashMap<>(); } public static class ArgumentChildren { @Parameter(names = "--arg") private String arg; } public static class ArgumentChildrenOther { @Parameter(names = "--arg2") private String arg2; } } ================================================ FILE: core/cli/src/test/java/org/locationtech/geowave/core/cli/prefix/JCommanderPropertiesTransformerTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.prefix; import java.util.HashMap; import java.util.Map; import org.junit.Assert; import org.junit.Test; import org.locationtech.geowave.core.cli.annotations.PrefixParameter; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParametersDelegate; public class JCommanderPropertiesTransformerTest { @Test public void testWithoutDelegate() { final Args args = new Args(); args.passWord = "blah"; args.userName = "user"; final JCommanderPropertiesTransformer transformer = new JCommanderPropertiesTransformer(); transformer.addObject(args); final Map props = new HashMap<>(); transformer.transformToMap(props); Assert.assertEquals(2, props.size()); Assert.assertEquals("blah", props.get("password")); Assert.assertEquals("user", props.get("username")); } @Test public void testWithDelegate() { final DelegateArgs args = new DelegateArgs(); args.args.passWord = "blah"; args.args.userName = "user"; args.additional = "add"; final JCommanderPropertiesTransformer transformer = new JCommanderPropertiesTransformer(); transformer.addObject(args); final Map props = new HashMap<>(); transformer.transformToMap(props); Assert.assertEquals(3, props.size()); Assert.assertEquals("blah", props.get("password")); Assert.assertEquals("user", props.get("username")); Assert.assertEquals("add", props.get("additional")); } @Test public void testWithPrefix() { final DelegatePrefixArgs args = new DelegatePrefixArgs(); args.args.passWord = "blah"; args.args.userName = "user"; args.additional = "add"; final JCommanderPropertiesTransformer transformer = new JCommanderPropertiesTransformer(); transformer.addObject(args); final Map props = new HashMap<>(); transformer.transformToMap(props); Assert.assertEquals(3, props.size()); Assert.assertEquals("blah", props.get("abc.password")); Assert.assertEquals("user", props.get("abc.username")); Assert.assertEquals("add", props.get("additional")); } public class Args { @Parameter(names = "--username") private String userName; @Parameter(names = "--password") private String passWord; } public class DelegateArgs { @ParametersDelegate private final Args args = new Args(); @Parameter(names = "--additional") private String additional; } public class DelegatePrefixArgs { @ParametersDelegate @PrefixParameter(prefix = "abc") private final Args args = new Args(); @Parameter(names = "--additional") private String additional; } } ================================================ FILE: core/cli/src/test/java/org/locationtech/geowave/core/cli/prefix/JCommanderTranslationMapTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.prefix; import org.junit.Assert; import org.junit.Test; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParametersDelegate; public class JCommanderTranslationMapTest { @Test public void testCreateFacadesWithoutDelegate() { final JCommanderPrefixTranslator translator = new JCommanderPrefixTranslator(); translator.addObject(new ArgumentChildren()); final JCommanderTranslationMap map = translator.translate(); map.createFacadeObjects(); Assert.assertEquals(1, map.getObjects().size()); } @Test public void testCreateFacadesWithDelegate() { final JCommanderPrefixTranslator translator = new JCommanderPrefixTranslator(); translator.addObject(new Arguments()); final JCommanderTranslationMap map = translator.translate(); map.createFacadeObjects(); Assert.assertEquals(2, map.getObjects().size()); } public static class Arguments { @ParametersDelegate private final ArgumentChildren children = new ArgumentChildren(); @Parameter(names = "--arg2") private String arg2; } public static class ArgumentChildren { @Parameter(names = "--arg") private String arg; } } ================================================ FILE: core/cli/src/test/java/org/locationtech/geowave/core/cli/prefix/JavassistUtilsTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.prefix; import static org.junit.Assert.fail; import java.lang.reflect.Method; import org.junit.Assert; import org.junit.Test; import javassist.CannotCompileException; import javassist.ClassPool; import javassist.CtClass; import javassist.CtField; import javassist.CtMethod; import javassist.CtNewMethod; import javassist.NotFoundException; import javassist.bytecode.AnnotationsAttribute; import javassist.bytecode.ConstPool; import javassist.bytecode.annotation.Annotation; import javassist.bytecode.annotation.IntegerMemberValue; public class JavassistUtilsTest { @Test public void testCloneAnnotationsAttribute() { final CtClass clz = ClassPool.getDefault().makeClass("testCloneAnnotationsAttribute"); final CtMethod ctmethod = addNewMethod(clz, "origMethod"); final AnnotationsAttribute attr = annotateMethod(ctmethod, "origAnno", 135); final AnnotationsAttribute clonedAttr = JavassistUtils.cloneAnnotationsAttribute( ctmethod.getMethodInfo().getConstPool(), attr, java.lang.annotation.ElementType.METHOD); Assert.assertEquals( 135, ((IntegerMemberValue) clonedAttr.getAnnotation("java.lang.Integer").getMemberValue( "origAnno")).getValue()); } private static class FindMethodTest { public void method1() { return; } public void methodA() { return; } } @Test public void testFindMethod() { final CtClass ctclass = ClassPool.getDefault().makeClass("testFindMethodClass"); addNewMethod(ctclass, "method1"); addNewMethod(ctclass, "method2"); Method m = null; try { m = FindMethodTest.class.getMethod("method1"); } catch (NoSuchMethodException | SecurityException e1) { e1.printStackTrace(); return; } try { final CtMethod foundMethod = JavassistUtils.findMethod(ctclass, m); Assert.assertEquals("method1", foundMethod.getName()); } catch (final NotFoundException e) { e.printStackTrace(); fail("Could not find method in CtClass"); } } @Test public void testCopyClassAnnontations() { final CtClass fromClass = ClassPool.getDefault().makeClass("fromClass"); final CtClass toClass = ClassPool.getDefault().makeClass("toClass"); // Create class annotations final ConstPool fromPool = fromClass.getClassFile().getConstPool(); final AnnotationsAttribute attr = new AnnotationsAttribute(fromPool, AnnotationsAttribute.visibleTag); final Annotation anno = new Annotation("java.lang.Integer", fromPool); anno.addMemberValue("copyClassName", new IntegerMemberValue(fromPool, 246)); attr.addAnnotation(anno); fromClass.getClassFile().addAttribute(attr); JavassistUtils.copyClassAnnotations(fromClass, toClass); final Annotation toAnno = ((AnnotationsAttribute) toClass.getClassFile().getAttribute( AnnotationsAttribute.visibleTag)).getAnnotation("java.lang.Integer"); Assert.assertEquals( 246, ((IntegerMemberValue) toAnno.getMemberValue("copyClassName")).getValue()); } @Test public void testCopyMethodAnnotationsToField() { final CtClass ctclass = ClassPool.getDefault().makeClass("test"); final CtMethod createdMethod = addNewMethod(ctclass, "doNothing"); annotateMethod(createdMethod, "value", 123); final CtField createdField = addNewField(ctclass, "toField"); JavassistUtils.copyMethodAnnotationsToField(createdMethod, createdField); IntegerMemberValue i = null; for (final Annotation annot : ((AnnotationsAttribute) createdField.getFieldInfo().getAttribute( AnnotationsAttribute.visibleTag)).getAnnotations()) { i = (IntegerMemberValue) annot.getMemberValue("value"); if (i != null) { break; } } if ((i == null) || (i.getValue() != 123)) { fail("Expected annotation value 123 but found " + i); } } @Test public void testGetNextUniqueClassName() { final String unique1 = JavassistUtils.getNextUniqueClassName(); final String unique2 = JavassistUtils.getNextUniqueClassName(); Assert.assertFalse(unique1.equals(unique2)); } @Test public void testGetNextUniqueFieldName() { final String unique1 = JavassistUtils.getNextUniqueFieldName(); final String unique2 = JavassistUtils.getNextUniqueFieldName(); Assert.assertFalse(unique1.equals(unique2)); } @Test public void testGenerateEmptyClass() { final CtClass emptyClass = JavassistUtils.generateEmptyClass(); final CtClass anotherEmptyClass = JavassistUtils.generateEmptyClass(); Assert.assertFalse(emptyClass.equals(anotherEmptyClass)); // test empty class works as expected final CtMethod method = addNewMethod(emptyClass, "a"); annotateMethod(method, "abc", 7); final CtField field = addNewField(emptyClass, "d"); annotateField(field, "def", 9); Assert.assertEquals( 7, ((IntegerMemberValue) ((AnnotationsAttribute) method.getMethodInfo().getAttribute( AnnotationsAttribute.visibleTag)).getAnnotation("java.lang.Integer").getMemberValue( "abc")).getValue()); Assert.assertEquals( 9, ((IntegerMemberValue) ((AnnotationsAttribute) field.getFieldInfo().getAttribute( AnnotationsAttribute.visibleTag)).getAnnotation("java.lang.Integer").getMemberValue( "def")).getValue()); } class TestClass { int field1; String field2; public void doNothing() { return; } } private CtMethod addNewMethod(final CtClass clz, final String methodName) { CtMethod ctmethod = null; try { ctmethod = CtNewMethod.make("void " + methodName + "(){ return; }", clz); clz.addMethod(ctmethod); } catch (final CannotCompileException e) { e.printStackTrace(); } if (ctmethod == null) { fail("Could not create method"); } return ctmethod; } private AnnotationsAttribute annotateMethod( final CtMethod ctmethod, final String annotationName, final int annotationValue) { final AnnotationsAttribute attr = new AnnotationsAttribute( ctmethod.getMethodInfo().getConstPool(), AnnotationsAttribute.visibleTag); final Annotation anno = new Annotation("java.lang.Integer", ctmethod.getMethodInfo().getConstPool()); anno.addMemberValue( annotationName, new IntegerMemberValue(ctmethod.getMethodInfo().getConstPool(), annotationValue)); attr.addAnnotation(anno); ctmethod.getMethodInfo().addAttribute(attr); return attr; } private CtField addNewField(final CtClass clz, final String fieldName) { CtField ctfield = null; try { ctfield = new CtField(clz, fieldName, clz); clz.addField(ctfield); } catch (final CannotCompileException e) { e.printStackTrace(); } if (ctfield == null) { fail("Could not create method"); } return ctfield; } private void annotateField( final CtField ctfield, final String annotationName, final int annotationValue) { final AnnotationsAttribute attr = new AnnotationsAttribute( ctfield.getFieldInfo().getConstPool(), AnnotationsAttribute.visibleTag); final Annotation anno = new Annotation("java.lang.Integer", ctfield.getFieldInfo().getConstPool()); anno.addMemberValue( annotationName, new IntegerMemberValue(ctfield.getFieldInfo().getConstPool(), annotationValue)); attr.addAnnotation(anno); ctfield.getFieldInfo().addAttribute(attr); } } ================================================ FILE: core/cli/src/test/java/org/locationtech/geowave/core/cli/prefix/PrefixedJCommanderTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.prefix; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.junit.Assert; import org.junit.Test; import org.locationtech.geowave.core.cli.annotations.PrefixParameter; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParametersDelegate; public class PrefixedJCommanderTest { @Test public void testAddCommand() { final PrefixedJCommander prefixedJCommander = new PrefixedJCommander(); prefixedJCommander.addCommand("abc", (Object) "hello, world", "a"); prefixedJCommander.addCommand("def", (Object) "goodbye, world", "b"); prefixedJCommander.parse("abc"); Assert.assertEquals(prefixedJCommander.getParsedCommand(), "abc"); } @Test public void testNullDelegate() { final PrefixedJCommander commander = new PrefixedJCommander(); final NullDelegate nullDelegate = new NullDelegate(); commander.addPrefixedObject(nullDelegate); commander.parse(); } @Test public void testMapDelegatesPrefix() { final Arguments args = new Arguments(); args.argChildren.put("abc", new ArgumentChildren()); args.argChildren.put("def", new ArgumentChildren()); final PrefixedJCommander commander = new PrefixedJCommander(); commander.addPrefixedObject(args); commander.parse("--abc.arg", "5", "--def.arg", "blah"); Assert.assertEquals("5", args.argChildren.get("abc").arg); Assert.assertEquals("blah", args.argChildren.get("def").arg); } @Test public void testCollectionDelegatesPrefix() { final ArgumentsCollection args = new ArgumentsCollection(); args.argChildren.add(new ArgumentChildren()); args.argChildren.add(new ArgumentChildrenOther()); final PrefixedJCommander commander = new PrefixedJCommander(); commander.addPrefixedObject(args); commander.parse("--arg", "5", "--arg2", "blah"); Assert.assertEquals("5", ((ArgumentChildren) args.argChildren.get(0)).arg); Assert.assertEquals("blah", ((ArgumentChildrenOther) args.argChildren.get(1)).arg2); } @Test public void testPrefixParameter() { final PrefixedArguments args = new PrefixedArguments(); final PrefixedJCommander commander = new PrefixedJCommander(); commander.addPrefixedObject(args); commander.parse("--abc.arg", "5", "--arg", "blah"); Assert.assertEquals("5", args.child.arg); Assert.assertEquals("blah", args.blah); } @Test public void testAddGetPrefixedObjects() { final PrefixedArguments args = new PrefixedArguments(); final PrefixedJCommander commander = new PrefixedJCommander(); commander.addPrefixedObject(args); Assert.assertTrue( commander.getPrefixedObjects().contains(args) && (commander.getPrefixedObjects().size() == 1)); } private static class PrefixedArguments { @ParametersDelegate @PrefixParameter(prefix = "abc") private final ArgumentChildren child = new ArgumentChildren(); @Parameter(names = "--arg") private String blah; } private static class NullDelegate { @ParametersDelegate private final ArgumentChildren value = null; } private static class ArgumentsCollection { @ParametersDelegate private final List argChildren = new ArrayList<>(); } private static class Arguments { @ParametersDelegate private final Map argChildren = new HashMap<>(); } private static class ArgumentChildren { @Parameter(names = "--arg") private String arg; } private static class ArgumentChildrenOther { @Parameter(names = "--arg2") private String arg2; } } ================================================ FILE: core/cli/src/test/java/org/locationtech/geowave/core/cli/prefix/TranslationEntryTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.prefix; import static org.junit.Assert.fail; import java.lang.reflect.AnnotatedElement; import java.util.ArrayList; import java.util.Arrays; import java.util.Map; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.locationtech.geowave.core.cli.annotations.PrefixParameter; import com.beust.jcommander.Parameter; import com.beust.jcommander.Parameterized; import com.beust.jcommander.ParametersDelegate; public class TranslationEntryTest { private static class Arguments { @Parameter(names = "-name", description = "name description") Integer field; @ParametersDelegate @PrefixParameter(prefix = "obj") Map map; } static TranslationEntry entry; static Parameterized param; static Integer obj; static String prefix; static AnnotatedElement aElement; @Before public void setUp() { try { final Arguments args = new Arguments(); final ArrayList params = (ArrayList) Parameterized.parseArg(args); if (params.size() == 0) { fail("Could not find parameter"); } param = params.get(0); } catch (final SecurityException e) { // Should never trigger e.printStackTrace(); } obj = 4; prefix = "prefix"; aElement = Integer.class; entry = new TranslationEntry(param, obj, prefix, aElement); } @Test public void testGetParam() { Assert.assertEquals(param, entry.getParam()); } @Test public void testGetObject() { Assert.assertEquals(obj, entry.getObject()); } @Test public void testGetPrefix() { Assert.assertEquals(prefix, entry.getPrefix()); } @Test public void testIsMethod() { Assert.assertFalse(entry.isMethod()); } @Test public void testGetMember() { Assert.assertEquals(aElement, entry.getMember()); } @Test public void testGetPrefixedNames() { Assert.assertTrue(Arrays.asList(entry.getPrefixedNames()).contains("-" + prefix + ".name")); } @Test public void testGetDescription() { Assert.assertEquals("name description", entry.getDescription()); } @Test public void testIsPassword() { Assert.assertFalse(entry.isPassword()); } @Test public void testIsHidden() { Assert.assertFalse(entry.isHidden()); } @Test public void testIsRequired() { Assert.assertFalse(entry.isRequired()); } @Test public void testGetAsPropertyName() { Assert.assertEquals("prefix.name", entry.getAsPropertyName()); } } ================================================ FILE: core/cli/src/test/java/org/locationtech/geowave/core/cli/spi/OperationRegistryTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.cli.spi; import static org.junit.Assert.assertEquals; import java.util.ArrayList; import java.util.List; import org.junit.Test; import org.locationtech.geowave.core.cli.operations.ExplainCommand; public class OperationRegistryTest { @Test public void testGetOperation() { final OperationEntry optentry = new OperationEntry(ExplainCommand.class); final List entries = new ArrayList<>(); entries.add(optentry); final OperationRegistry optreg = new OperationRegistry(entries); assertEquals("explain", optreg.getOperation(ExplainCommand.class).getOperationNames()[0]); assertEquals(true, optreg.getAllOperations().contains(optentry)); } } ================================================ FILE: core/geotime/.gitignore ================================================ /bin/ ================================================ FILE: core/geotime/pom.xml ================================================ 4.0.0 org.locationtech.geowave geowave-core-parent ../ 2.0.2-SNAPSHOT geowave-core-geotime GeoWave Spatial and Temporal Support io.sgr s2-geometry-library-java 1.0.1 com.uber h3 3.7.0 com.github.davidmoten geo 0.7.7 org.locationtech.jts jts-core org.locationtech.geowave geowave-core-index ${project.version} org.geotools gt-epsg-wkt org.locationtech.geowave geowave-core-store ${project.version} org.geotools gt-referencing org.geotools gt-cql org.threeten threeten-extra com.fasterxml.jackson.core jackson-annotations org.locationtech.geowave geowave-core-store tests test-jar ${project.version} test org.apache.maven.plugins maven-jar-plugin 3.2.0 test-jar ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/GeoTimePersistableRegistry.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime; import org.locationtech.geowave.core.geotime.adapter.SpatialFieldDescriptor; import org.locationtech.geowave.core.geotime.adapter.TemporalFieldDescriptor; import org.locationtech.geowave.core.geotime.index.SpatialIndexFilter; import org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition; import org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition; import org.locationtech.geowave.core.geotime.index.dimension.SimpleTimeDefinition; import org.locationtech.geowave.core.geotime.index.dimension.SimpleTimeIndexStrategy; import org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy; import org.locationtech.geowave.core.geotime.index.dimension.TimeDefinition; import org.locationtech.geowave.core.geotime.store.InternalGeotoolsDataAdapterWrapper; import org.locationtech.geowave.core.geotime.store.dimension.CustomCRSBoundedSpatialDimension; import org.locationtech.geowave.core.geotime.store.dimension.CustomCRSBoundedSpatialDimensionX; import org.locationtech.geowave.core.geotime.store.dimension.CustomCRSBoundedSpatialDimensionY; import org.locationtech.geowave.core.geotime.store.dimension.CustomCRSSpatialField; import org.locationtech.geowave.core.geotime.store.dimension.CustomCRSUnboundedSpatialDimension; import org.locationtech.geowave.core.geotime.store.dimension.CustomCRSUnboundedSpatialDimensionX; import org.locationtech.geowave.core.geotime.store.dimension.CustomCRSUnboundedSpatialDimensionY; import org.locationtech.geowave.core.geotime.store.dimension.CustomCrsIndexModel; import org.locationtech.geowave.core.geotime.store.dimension.LatitudeField; import org.locationtech.geowave.core.geotime.store.dimension.LongitudeField; import org.locationtech.geowave.core.geotime.store.dimension.TimeField; import org.locationtech.geowave.core.geotime.store.query.ExplicitCQLQuery; import org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialQuery; import org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialTemporalQuery; import org.locationtech.geowave.core.geotime.store.query.ExplicitTemporalQuery; import org.locationtech.geowave.core.geotime.store.query.IndexOnlySpatialQuery; import org.locationtech.geowave.core.geotime.store.query.OptimalCQLQuery; import org.locationtech.geowave.core.geotime.store.query.SpatialQuery; import org.locationtech.geowave.core.geotime.store.query.SpatialTemporalQuery; import org.locationtech.geowave.core.geotime.store.query.TemporalQuery; import org.locationtech.geowave.core.geotime.store.query.aggregate.CommonIndexBoundingBoxAggregation; import org.locationtech.geowave.core.geotime.store.query.aggregate.CommonIndexTimeRangeAggregation; import org.locationtech.geowave.core.geotime.store.query.aggregate.OptimalVectorBoundingBoxAggregation; import org.locationtech.geowave.core.geotime.store.query.aggregate.OptimalVectorTimeRangeAggregation; import org.locationtech.geowave.core.geotime.store.query.aggregate.SpatialCommonIndexedBinningStrategy; import org.locationtech.geowave.core.geotime.store.query.aggregate.SpatialFieldBinningStrategy; import org.locationtech.geowave.core.geotime.store.query.aggregate.SpatialSimpleFeatureBinningStrategy; import org.locationtech.geowave.core.geotime.store.query.aggregate.VectorBoundingBoxAggregation; import org.locationtech.geowave.core.geotime.store.query.aggregate.VectorTimeRangeAggregation; import org.locationtech.geowave.core.geotime.store.query.filter.CQLQueryFilter; import org.locationtech.geowave.core.geotime.store.query.filter.SpatialQueryFilter; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.BBox; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Crosses; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Disjoint; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Intersects; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Overlaps; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.PreparedFilterGeometry; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialContains; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialEqualTo; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialFieldValue; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialLiteral; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialNotEqualTo; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.TextToSpatialExpression; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Touches; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.UnpreparedFilterGeometry; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Within; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.After; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.Before; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.BeforeOrDuring; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.During; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.DuringOrAfter; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalBetween; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalEqualTo; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalFieldValue; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalLiteral; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalNotEqualTo; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TimeOverlaps; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.geotime.util.TimeDescriptors.TimeDescriptorConfiguration; import org.locationtech.geowave.core.index.persist.InternalPersistableRegistry; import org.locationtech.geowave.core.index.persist.PersistableRegistrySpi; public class GeoTimePersistableRegistry implements PersistableRegistrySpi, InternalPersistableRegistry { // Make sure GeoTools is properly initialized before we do anything static { GeometryUtils.initClassLoader(); } @Override public PersistableIdAndConstructor[] getSupportedPersistables() { return new PersistableIdAndConstructor[] { new PersistableIdAndConstructor((short) 300, LatitudeDefinition::new), new PersistableIdAndConstructor((short) 301, LongitudeDefinition::new), new PersistableIdAndConstructor((short) 302, TemporalBinningStrategy::new), new PersistableIdAndConstructor((short) 303, TimeDefinition::new), // 304 is a legacy class (pre 2.0) // 305 is a legacy class (pre 2.0) // 306-307 are used by GeotimeRegisteredIndexFieldMappers new PersistableIdAndConstructor((short) 308, ExplicitCQLQuery::new), new PersistableIdAndConstructor((short) 309, CQLQueryFilter::new), new PersistableIdAndConstructor((short) 310, TimeField::new), new PersistableIdAndConstructor((short) 311, SpatialQueryFilter::new), new PersistableIdAndConstructor((short) 312, ExplicitSpatialQuery::new), // 313 is a legacy class (pre 2.0) // 523 migrated from adapter-vector, ID is the same to preserve backwards compatibility new PersistableIdAndConstructor((short) 523, TimeDescriptorConfiguration::new), new PersistableIdAndConstructor((short) 314, CustomCRSBoundedSpatialDimension::new), new PersistableIdAndConstructor((short) 315, CustomCrsIndexModel::new), new PersistableIdAndConstructor((short) 316, IndexOnlySpatialQuery::new), new PersistableIdAndConstructor((short) 317, ExplicitSpatialTemporalQuery::new), new PersistableIdAndConstructor((short) 318, ExplicitTemporalQuery::new), new PersistableIdAndConstructor((short) 319, CustomCRSUnboundedSpatialDimension::new), new PersistableIdAndConstructor((short) 320, SpatialIndexFilter::new), new PersistableIdAndConstructor((short) 321, CustomCRSUnboundedSpatialDimensionX::new), new PersistableIdAndConstructor((short) 322, CustomCRSUnboundedSpatialDimensionY::new), new PersistableIdAndConstructor((short) 323, VectorTimeRangeAggregation::new), new PersistableIdAndConstructor((short) 324, CommonIndexTimeRangeAggregation::new), new PersistableIdAndConstructor((short) 325, SpatialFieldBinningStrategy::new), new PersistableIdAndConstructor((short) 326, OptimalVectorTimeRangeAggregation::new), new PersistableIdAndConstructor((short) 327, VectorBoundingBoxAggregation::new), new PersistableIdAndConstructor((short) 328, CommonIndexBoundingBoxAggregation::new), new PersistableIdAndConstructor((short) 329, OptimalVectorBoundingBoxAggregation::new), new PersistableIdAndConstructor((short) 330, OptimalCQLQuery::new), new PersistableIdAndConstructor((short) 331, SpatialQuery::new), new PersistableIdAndConstructor((short) 332, SpatialTemporalQuery::new), new PersistableIdAndConstructor((short) 333, TemporalQuery::new), new PersistableIdAndConstructor((short) 334, SimpleTimeDefinition::new), new PersistableIdAndConstructor((short) 335, SimpleTimeIndexStrategy::new), new PersistableIdAndConstructor((short) 336, CustomCRSBoundedSpatialDimensionX::new), new PersistableIdAndConstructor((short) 337, CustomCRSBoundedSpatialDimensionY::new), new PersistableIdAndConstructor((short) 338, SpatialSimpleFeatureBinningStrategy::new), new PersistableIdAndConstructor((short) 339, SpatialCommonIndexedBinningStrategy::new), new PersistableIdAndConstructor((short) 340, InternalGeotoolsDataAdapterWrapper::new), new PersistableIdAndConstructor((short) 341, SpatialFieldDescriptor::new), new PersistableIdAndConstructor((short) 342, LatitudeField::new), new PersistableIdAndConstructor((short) 343, LongitudeField::new), new PersistableIdAndConstructor((short) 344, CustomCRSSpatialField::new), new PersistableIdAndConstructor((short) 345, TemporalFieldDescriptor::new), new PersistableIdAndConstructor((short) 346, Crosses::new), new PersistableIdAndConstructor((short) 347, Disjoint::new), new PersistableIdAndConstructor((short) 348, Intersects::new), new PersistableIdAndConstructor((short) 349, Overlaps::new), // 350-358 are used by GeotimeRegisteredIndexFieldMappers new PersistableIdAndConstructor((short) 359, SpatialContains::new), new PersistableIdAndConstructor((short) 360, SpatialEqualTo::new), new PersistableIdAndConstructor((short) 361, SpatialNotEqualTo::new), new PersistableIdAndConstructor((short) 362, Touches::new), new PersistableIdAndConstructor((short) 363, Within::new), new PersistableIdAndConstructor((short) 364, PreparedFilterGeometry::new), new PersistableIdAndConstructor((short) 365, UnpreparedFilterGeometry::new), new PersistableIdAndConstructor((short) 366, SpatialFieldValue::new), new PersistableIdAndConstructor((short) 367, SpatialLiteral::new), new PersistableIdAndConstructor((short) 368, After::new), new PersistableIdAndConstructor((short) 369, Before::new), new PersistableIdAndConstructor((short) 370, BeforeOrDuring::new), new PersistableIdAndConstructor((short) 371, DuringOrAfter::new), new PersistableIdAndConstructor((short) 372, During::new), new PersistableIdAndConstructor((short) 373, TemporalBetween::new), new PersistableIdAndConstructor((short) 374, TimeOverlaps::new), new PersistableIdAndConstructor((short) 375, TemporalFieldValue::new), new PersistableIdAndConstructor((short) 376, TemporalLiteral::new), new PersistableIdAndConstructor((short) 377, BBox::new), new PersistableIdAndConstructor((short) 378, TemporalEqualTo::new), new PersistableIdAndConstructor((short) 379, TemporalNotEqualTo::new), new PersistableIdAndConstructor((short) 380, TextToSpatialExpression::new)}; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/GeometryFieldMapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.adapter; import java.util.List; import java.util.Set; import org.locationtech.geowave.core.store.api.RowBuilder; import org.locationtech.jts.geom.Geometry; import com.google.common.collect.Sets; /** * Maps a `Geometry` adapter field to a `Geometry` index field. */ public class GeometryFieldMapper extends SpatialFieldMapper { @Override protected Geometry getNativeGeometry(List nativeFieldValues) { return nativeFieldValues.get(0); } @Override public void toAdapter(final Geometry indexFieldValue, final RowBuilder rowBuilder) { rowBuilder.setField(adapterFields[0], indexFieldValue); } @Override public short adapterFieldCount() { return 1; } @Override public Class adapterFieldType() { return Geometry.class; } @Override public Set getLowerCaseSuggestedFieldNames() { return Sets.newHashSet("geom", "geometry", "the_geom"); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/GeotimeRegisteredIndexFieldMappers.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.adapter; import org.locationtech.geowave.core.geotime.adapter.LatLonFieldMapper.DoubleLatLonFieldMapper; import org.locationtech.geowave.core.geotime.adapter.LatLonFieldMapper.FloatLatLonFieldMapper; import org.locationtech.geowave.core.geotime.adapter.TemporalLongFieldMapper.CalendarLongFieldMapper; import org.locationtech.geowave.core.geotime.adapter.TemporalLongFieldMapper.DateLongFieldMapper; import org.locationtech.geowave.core.geotime.adapter.TimeInstantFieldMapper.CalendarInstantFieldMapper; import org.locationtech.geowave.core.geotime.adapter.TimeInstantFieldMapper.DateInstantFieldMapper; import org.locationtech.geowave.core.geotime.adapter.TimeInstantFieldMapper.LongInstantFieldMapper; import org.locationtech.geowave.core.geotime.adapter.TimeRangeFieldMapper.CalendarRangeFieldMapper; import org.locationtech.geowave.core.geotime.adapter.TimeRangeFieldMapper.DateRangeFieldMapper; import org.locationtech.geowave.core.geotime.adapter.TimeRangeFieldMapper.LongRangeFieldMapper; import org.locationtech.geowave.core.store.index.IndexFieldMapperRegistrySPI; /** * Registered spatial and temporal adapter to index field mappers. */ public class GeotimeRegisteredIndexFieldMappers implements IndexFieldMapperRegistrySPI { @Override public RegisteredFieldMapper[] getRegisteredFieldMappers() { return new RegisteredFieldMapper[] { new RegisteredFieldMapper(DateLongFieldMapper::new, (short) 306), new RegisteredFieldMapper(CalendarLongFieldMapper::new, (short) 307), new RegisteredFieldMapper(GeometryFieldMapper::new, (short) 350), new RegisteredFieldMapper(DoubleLatLonFieldMapper::new, (short) 351), new RegisteredFieldMapper(FloatLatLonFieldMapper::new, (short) 352), new RegisteredFieldMapper(CalendarInstantFieldMapper::new, (short) 353), new RegisteredFieldMapper(DateInstantFieldMapper::new, (short) 354), new RegisteredFieldMapper(LongInstantFieldMapper::new, (short) 355), new RegisteredFieldMapper(CalendarRangeFieldMapper::new, (short) 356), new RegisteredFieldMapper(DateRangeFieldMapper::new, (short) 357), new RegisteredFieldMapper(LongRangeFieldMapper::new, (short) 358),}; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/LatLonFieldMapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.adapter; import java.nio.ByteBuffer; import java.util.List; import java.util.Set; import org.locationtech.geowave.core.geotime.store.dimension.SpatialField; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.api.RowBuilder; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.Point; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; /** * Abstract field mapper for mapping latitude and longitude adapter fields to a singular `Geometry` * index field. * * @param the adapter field type */ public abstract class LatLonFieldMapper extends SpatialFieldMapper { private static Set suggestedLongitudeFieldNames = Sets.newHashSet("longitude", "lon", "x"); private static Set suggestedLatitudeFieldNames = Sets.newHashSet("latitude", "lat", "y"); protected boolean xAxisFirst = true; @Override public void initFromOptions( final List> inputFieldDescriptors, final IndexFieldOptions options) { if (inputFieldDescriptors.size() != 2) { throw new RuntimeException("Latitude/Longitude index field mapper expects exactly 2 fields."); } if (inputFieldDescriptors.get(0).indexHints().contains(SpatialField.LONGITUDE_DIMENSION_HINT) && inputFieldDescriptors.get(1).indexHints().contains( SpatialField.LONGITUDE_DIMENSION_HINT)) { throw new RuntimeException("Two longitude dimension hints were given."); } else if (inputFieldDescriptors.get(0).indexHints().contains( SpatialField.LATITUDE_DIMENSION_HINT) && inputFieldDescriptors.get(1).indexHints().contains( SpatialField.LATITUDE_DIMENSION_HINT)) { throw new RuntimeException("Two latitude dimension hints were given."); } xAxisFirst = inputFieldDescriptors.get(0).indexHints().contains(SpatialField.LONGITUDE_DIMENSION_HINT) || inputFieldDescriptors.get(1).indexHints().contains( SpatialField.LATITUDE_DIMENSION_HINT) || suggestedLongitudeFieldNames.contains( inputFieldDescriptors.get(0).fieldName().toLowerCase()); super.initFromOptions(inputFieldDescriptors, options); } @Override public String[] getIndexOrderedAdapterFields() { if (!xAxisFirst) { return new String[] {adapterFields[1], adapterFields[0]}; } return adapterFields; } @Override public void toAdapter(final Geometry indexFieldValue, final RowBuilder rowBuilder) { final Point centroid = indexFieldValue.getCentroid(); if (xAxisFirst) { setField(adapterFields[0], centroid.getX(), rowBuilder); setField(adapterFields[1], centroid.getY(), rowBuilder); } setField(adapterFields[0], centroid.getY(), rowBuilder); setField(adapterFields[1], centroid.getX(), rowBuilder); } protected abstract void setField( final String fieldName, final Double Value, final RowBuilder rowBuilder); @Override protected Geometry getNativeGeometry(List nativeFieldValues) { final Coordinate coordinate = xAxisFirst ? toCoordinate(nativeFieldValues.get(0), nativeFieldValues.get(1)) : toCoordinate(nativeFieldValues.get(1), nativeFieldValues.get(0)); return GeometryUtils.GEOMETRY_FACTORY.createPoint(coordinate); } protected abstract Coordinate toCoordinate(final N xValue, final N yValue); @Override public short adapterFieldCount() { return 2; } @Override public Set getLowerCaseSuggestedFieldNames() { return Sets.newHashSet( Iterables.concat(suggestedLongitudeFieldNames, suggestedLatitudeFieldNames)); } @Override protected int byteLength() { return super.byteLength() + 1; } protected void writeBytes(final ByteBuffer buffer) { super.writeBytes(buffer); buffer.put((byte) (xAxisFirst ? 1 : 0)); } protected void readBytes(final ByteBuffer buffer) { super.readBytes(buffer); xAxisFirst = buffer.get() != 0; } /** * Maps `Double` latitude and longitude adapter fields to a `Geometry` index field. */ public static class DoubleLatLonFieldMapper extends LatLonFieldMapper { @Override public Class adapterFieldType() { return Double.class; } @Override protected void setField( final String fieldName, final Double value, final RowBuilder rowBuilder) { rowBuilder.setField(fieldName, value); } @Override protected Coordinate toCoordinate(Double xValue, Double yValue) { return new Coordinate(xValue, yValue); } } /** * Maps `Float` latitude and longitude adapter fields to a `Geometry` index field. */ public static class FloatLatLonFieldMapper extends LatLonFieldMapper { @Override public Class adapterFieldType() { return Float.class; } @Override protected void setField( final String fieldName, final Double value, final RowBuilder rowBuilder) { rowBuilder.setField(fieldName, value.floatValue()); } @Override protected Coordinate toCoordinate(Float xValue, Float yValue) { return new Coordinate(xValue, yValue); } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/SpatialFieldDescriptor.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.adapter; import java.nio.ByteBuffer; import java.util.Set; import org.geotools.referencing.CRS; import org.locationtech.geowave.core.index.IndexDimensionHint; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.adapter.BaseFieldDescriptor; import org.opengis.referencing.FactoryException; import org.opengis.referencing.crs.CoordinateReferenceSystem; /** * An adapter field descriptor that also contains a `CoordinateReferenceSystem`. This is used for * determining if the adapter field should be transformed to the CRS of the index when ingesting. * * @param the adapter field type */ public class SpatialFieldDescriptor extends BaseFieldDescriptor { private CoordinateReferenceSystem crs; public SpatialFieldDescriptor() {} public SpatialFieldDescriptor( final Class bindingClass, final String fieldName, final Set indexHints, final CoordinateReferenceSystem crs) { super(bindingClass, fieldName, indexHints); this.crs = crs; } public CoordinateReferenceSystem crs() { return this.crs; } @Override public byte[] toBinary() { final byte[] parentBytes = super.toBinary(); final byte[] crsBytes = StringUtils.stringToBinary(crs.toWKT()); final ByteBuffer buffer = ByteBuffer.allocate( VarintUtils.unsignedIntByteLength(parentBytes.length) + VarintUtils.unsignedIntByteLength(crsBytes.length) + parentBytes.length + crsBytes.length); VarintUtils.writeUnsignedInt(parentBytes.length, buffer); buffer.put(parentBytes); VarintUtils.writeUnsignedInt(crsBytes.length, buffer); buffer.put(crsBytes); return buffer.array(); } @Override public void fromBinary(byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); final byte[] parentBytes = new byte[VarintUtils.readUnsignedInt(buffer)]; buffer.get(parentBytes); super.fromBinary(parentBytes); final byte[] crsBytes = new byte[VarintUtils.readUnsignedInt(buffer)]; buffer.get(crsBytes); try { crs = CRS.parseWKT(StringUtils.stringFromBinary(crsBytes)); } catch (FactoryException e) { throw new RuntimeException( "Unable to decode coordinate reference system for spatial field descriptor."); } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/SpatialFieldDescriptorBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.adapter; import org.locationtech.geowave.core.geotime.store.dimension.SpatialField; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.store.adapter.FieldDescriptorBuilder; import org.opengis.referencing.crs.CoordinateReferenceSystem; /** * A field descriptor builder that includes helper functions for spatial indexing hints and * `CoordinateReferenceSystem`. * * @param the adapter field type */ public class SpatialFieldDescriptorBuilder extends FieldDescriptorBuilder, SpatialFieldDescriptorBuilder> { protected CoordinateReferenceSystem crs = GeometryUtils.getDefaultCRS(); public SpatialFieldDescriptorBuilder(final Class bindingClass) { super(bindingClass); } /** * Hint that the field contains both latitude and longitude information and should be used in * spatial indexing. * * @return the spatial field descriptor builder */ public SpatialFieldDescriptorBuilder spatialIndexHint() { return this.indexHint(SpatialField.LONGITUDE_DIMENSION_HINT).indexHint( SpatialField.LATITUDE_DIMENSION_HINT); } /** * Hint that the field contains latitude information and should be used in spatial indexing. * * @return the spatial field descriptor builder */ public SpatialFieldDescriptorBuilder latitudeIndexHint() { return this.indexHint(SpatialField.LATITUDE_DIMENSION_HINT); } /** * Hint that the field contains longitude information and should be used in spatial indexing. * * @return the spatial field descriptor builder */ public SpatialFieldDescriptorBuilder longitudeIndexHint() { return this.indexHint(SpatialField.LONGITUDE_DIMENSION_HINT); } /** * Specify the coordinate reference system of the spatial field. * * @return the spatial field descriptor builder */ public SpatialFieldDescriptorBuilder crs(final CoordinateReferenceSystem crs) { this.crs = crs; return this; } @Override public SpatialFieldDescriptor build() { return new SpatialFieldDescriptor<>(bindingClass, fieldName, indexHints, crs); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/SpatialFieldMapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.adapter; import java.nio.ByteBuffer; import java.util.List; import org.apache.commons.lang.ArrayUtils; import org.geotools.geometry.jts.JTS; import org.geotools.referencing.CRS; import org.locationtech.geowave.core.geotime.store.dimension.SpatialField.SpatialIndexFieldOptions; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.api.IndexFieldMapper; import org.locationtech.jts.geom.Geometry; import org.opengis.geometry.MismatchedDimensionException; import org.opengis.referencing.FactoryException; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.opengis.referencing.operation.MathTransform; import org.opengis.referencing.operation.TransformException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Maps an adapter spatial field or fields to a geometry index field, transforming the geometry to * the appropriate CRS if necessary. * * @param The class of the adapter spatial field */ public abstract class SpatialFieldMapper extends IndexFieldMapper { private static final Logger LOGGER = LoggerFactory.getLogger(SpatialFieldMapper.class); private CoordinateReferenceSystem adapterCRS = null; private CoordinateReferenceSystem indexCRS = null; private MathTransform transform = null; @Override public Geometry toIndex(List nativeFieldValues) { final Geometry nativeGeometry = getNativeGeometry(nativeFieldValues); try { if (transform != null) { return JTS.transform(nativeGeometry, transform); } } catch (MismatchedDimensionException | TransformException e) { LOGGER.warn( "Unable to perform transform to specified CRS of the index, the feature geometry will remain in its original CRS", e); } return nativeGeometry; } /** * Builds a `Geometry` from the native adapter field values. * * @param nativeFieldValues the adapter field values * @return a `Geometry` that represents the adapter field values */ protected abstract Geometry getNativeGeometry(List nativeFieldValues); @Override public Class indexFieldType() { return Geometry.class; } @Override protected void initFromOptions( final List> inputFieldDescriptors, final IndexFieldOptions options) { indexCRS = GeometryUtils.getDefaultCRS(); adapterCRS = GeometryUtils.getDefaultCRS(); if (options instanceof SpatialIndexFieldOptions) { indexCRS = ((SpatialIndexFieldOptions) options).crs(); } for (FieldDescriptor field : inputFieldDescriptors) { if (field instanceof SpatialFieldDescriptor && ((SpatialFieldDescriptor) field).crs() != null) { adapterCRS = ((SpatialFieldDescriptor) field).crs(); break; } } if (!indexCRS.equals(adapterCRS)) { try { transform = CRS.findMathTransform(adapterCRS, indexCRS, true); } catch (FactoryException e) { LOGGER.warn("Unable to create coordinate reference system transform", e); } } } @Override public void transformFieldDescriptors(final FieldDescriptor[] inputFieldDescriptors) { if (!indexCRS.equals(adapterCRS)) { final String[] mappedFields = getAdapterFields(); for (int i = 0; i < inputFieldDescriptors.length; i++) { final FieldDescriptor field = inputFieldDescriptors[i]; if (ArrayUtils.contains(mappedFields, field.fieldName())) { inputFieldDescriptors[i] = new SpatialFieldDescriptorBuilder<>(field.bindingClass()).fieldName( field.fieldName()).crs(indexCRS).build(); } } } } private byte[] indexCRSBytes = null; private byte[] adapterCRSBytes = null; @Override protected int byteLength() { indexCRSBytes = StringUtils.stringToBinary(indexCRS.toWKT()); adapterCRSBytes = StringUtils.stringToBinary(adapterCRS.toWKT()); return super.byteLength() + VarintUtils.unsignedShortByteLength((short) indexCRSBytes.length) + VarintUtils.unsignedShortByteLength((short) adapterCRSBytes.length) + indexCRSBytes.length + adapterCRSBytes.length; } @Override protected void writeBytes(final ByteBuffer buffer) { VarintUtils.writeUnsignedShort((short) indexCRSBytes.length, buffer); buffer.put(indexCRSBytes); VarintUtils.writeUnsignedShort((short) adapterCRSBytes.length, buffer); buffer.put(adapterCRSBytes); super.writeBytes(buffer); } @Override protected void readBytes(final ByteBuffer buffer) { indexCRSBytes = new byte[VarintUtils.readUnsignedShort(buffer)]; buffer.get(indexCRSBytes); adapterCRSBytes = new byte[VarintUtils.readUnsignedShort(buffer)]; buffer.get(adapterCRSBytes); try { indexCRS = CRS.parseWKT(StringUtils.stringFromBinary(indexCRSBytes)); adapterCRS = CRS.parseWKT(StringUtils.stringFromBinary(adapterCRSBytes)); if (!indexCRS.equals(adapterCRS)) { transform = CRS.findMathTransform(adapterCRS, indexCRS, true); } else { transform = null; } } catch (FactoryException e) { throw new RuntimeException( "Unable to decode coordinate reference system for spatial index field mapper."); } super.readBytes(buffer); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/TemporalFieldDescriptor.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.adapter; import java.util.Set; import org.locationtech.geowave.core.index.IndexDimensionHint; import org.locationtech.geowave.core.store.adapter.BaseFieldDescriptor; /** * An adapter field descriptor to represent temporal fields. * * @param the adapter field type */ public class TemporalFieldDescriptor extends BaseFieldDescriptor { public TemporalFieldDescriptor() {} public TemporalFieldDescriptor( final Class bindingClass, final String fieldName, final Set indexHints) { super(bindingClass, fieldName, indexHints); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/TemporalFieldDescriptorBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.adapter; import org.locationtech.geowave.core.store.adapter.FieldDescriptorBuilder; import static org.locationtech.geowave.core.geotime.store.dimension.TimeField.TIME_DIMENSION_HINT; import static org.locationtech.geowave.core.geotime.store.dimension.TimeField.START_TIME_DIMENSION_HINT; import static org.locationtech.geowave.core.geotime.store.dimension.TimeField.END_TIME_DIMENSION_HINT; /** * A field descriptor builder for adapter fields that contain time information. * * @param the adapter field type */ public class TemporalFieldDescriptorBuilder extends FieldDescriptorBuilder, TemporalFieldDescriptorBuilder> { public TemporalFieldDescriptorBuilder(final Class bindingClass) { super(bindingClass); } /** * Hint that the field is a time instant and should be used for temporal indexing. * * @return the temporal field descriptor builder */ public TemporalFieldDescriptorBuilder timeIndexHint() { return this.indexHint(TIME_DIMENSION_HINT); } /** * Hint that the field is the start of a time range and should be used for temporal indexing. * There should be a corresponding end time index hint specified in the schema. * * @return the temporal field descriptor builder */ public TemporalFieldDescriptorBuilder startTimeIndexHint() { return this.indexHint(START_TIME_DIMENSION_HINT); } /** * Hint that the field is the end of a time range and should be used for temporal indexing. There * should be a corresponding start time index hint specified in the schema. * * @return the temporal field descriptor builder */ public TemporalFieldDescriptorBuilder endTimeIndexHint() { return this.indexHint(END_TIME_DIMENSION_HINT); } @Override public TemporalFieldDescriptor build() { return new TemporalFieldDescriptor<>(bindingClass, fieldName, indexHints); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/TemporalIntervalFieldMapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.adapter; import java.util.List; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.api.IndexFieldMapper; import org.threeten.extra.Interval; /** * Maps an adapter temporal field or fields to an `Interval` index field. * * @param the adapter field type */ public abstract class TemporalIntervalFieldMapper extends IndexFieldMapper { @Override public Class indexFieldType() { return Interval.class; } @Override public void transformFieldDescriptors(final FieldDescriptor[] inputFieldDescriptors) {} @Override protected void initFromOptions( List> inputFieldDescriptors, IndexFieldOptions options) {} } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/TemporalLongFieldMapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.adapter; import java.util.Calendar; import java.util.Date; import java.util.List; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.api.IndexFieldMapper; import org.locationtech.geowave.core.store.api.RowBuilder; /** * Maps an adapter temporal field or fields to a `Long` index field. * * @param the adapter field type */ public abstract class TemporalLongFieldMapper extends IndexFieldMapper { @Override public Class indexFieldType() { return Long.class; } @Override public void transformFieldDescriptors(final FieldDescriptor[] inputFieldDescriptors) {} @Override protected void initFromOptions( List> inputFieldDescriptors, IndexFieldOptions options) {} @Override public short adapterFieldCount() { return 1; } /** * Maps a `Calendar` adapter field to an `Long` index field. */ public static class CalendarLongFieldMapper extends TemporalLongFieldMapper { @Override public Class adapterFieldType() { return Calendar.class; } @Override public Long toIndex(List nativeFieldValues) { return nativeFieldValues.get(0).getTimeInMillis(); } @Override public void toAdapter(Long indexFieldValue, final RowBuilder rowBuilder) { final Calendar calendar = Calendar.getInstance(); calendar.setTimeInMillis(indexFieldValue); rowBuilder.setField(adapterFields[0], calendar); } } /** * Maps a `Date` adapter field to an `Long` index field. */ public static class DateLongFieldMapper extends TemporalLongFieldMapper { @Override public Class adapterFieldType() { return Date.class; } @Override public Long toIndex(List nativeFieldValues) { return nativeFieldValues.get(0).getTime(); } @Override public void toAdapter(Long indexFieldValue, final RowBuilder rowBuilder) { rowBuilder.setField(adapterFields[0], new Date(indexFieldValue)); } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/TimeInstantFieldMapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.adapter; import java.util.Calendar; import java.util.Date; import java.util.List; import java.util.Set; import org.locationtech.geowave.core.geotime.util.TimeUtils; import org.locationtech.geowave.core.store.api.RowBuilder; import org.threeten.extra.Interval; import com.google.common.collect.Sets; /** * Maps a single adapter field that represents an instant in time to an `Interval` index field. * * @param the adapter field type */ public abstract class TimeInstantFieldMapper extends TemporalIntervalFieldMapper { @Override public Interval toIndex(List nativeFieldValues) { return TimeUtils.getInterval(nativeFieldValues.get(0)); } @Override public void toAdapter(final Interval indexFieldValue, final RowBuilder rowBuilder) { rowBuilder.setField( adapterFields[0], TimeUtils.getTimeValue( this.adapterFieldType(), ((Interval) indexFieldValue).getStart().toEpochMilli())); } @Override public short adapterFieldCount() { return 1; } @Override public Set getLowerCaseSuggestedFieldNames() { return Sets.newHashSet("timestamp", "date", "time"); } /** * Maps a `Calendar` adapter field to an `Interval` index field. */ public static class CalendarInstantFieldMapper extends TimeInstantFieldMapper { @Override public Class adapterFieldType() { return Calendar.class; } } /** * Maps a `Date` adapter field to an `Interval` index field. */ public static class DateInstantFieldMapper extends TimeInstantFieldMapper { @Override public Class adapterFieldType() { return Date.class; } } /** * Maps a `Long` adapter field to an `Interval` index field. */ public static class LongInstantFieldMapper extends TimeInstantFieldMapper { @Override public Class adapterFieldType() { return Long.class; } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/TimeRangeFieldMapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.adapter; import java.nio.ByteBuffer; import java.util.Calendar; import java.util.Date; import java.util.List; import java.util.Set; import org.locationtech.geowave.core.geotime.store.dimension.TimeField; import org.locationtech.geowave.core.geotime.util.TimeUtils; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.api.RowBuilder; import org.threeten.extra.Interval; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; /** * Maps two adapter fields that represent a start and end time to an `Interval` index field. * * @param the adapter field type */ public abstract class TimeRangeFieldMapper extends TemporalIntervalFieldMapper { private static Set suggestedStartTimeFieldNames = Sets.newHashSet("starttime", "start", "start_time"); private static Set suggestedEndTimeNames = Sets.newHashSet("endtime", "end", "end_time"); private boolean startFirst = true; @Override public void initFromOptions( final List> inputFieldDescriptors, final IndexFieldOptions options) { if (inputFieldDescriptors.size() != 2) { throw new RuntimeException("Time range field mapper expects exactly 2 fields."); } startFirst = inputFieldDescriptors.get(0).indexHints().contains(TimeField.START_TIME_DIMENSION_HINT) || !inputFieldDescriptors.get(1).indexHints().contains( TimeField.START_TIME_DIMENSION_HINT) || suggestedStartTimeFieldNames.contains( inputFieldDescriptors.get(0).fieldName().toLowerCase()); super.initFromOptions(inputFieldDescriptors, options); } @Override public String[] getIndexOrderedAdapterFields() { if (!startFirst) { return new String[] {adapterFields[1], adapterFields[0]}; } return adapterFields; } @Override public Interval toIndex(List nativeFieldValues) { if (startFirst) { return TimeUtils.getInterval(nativeFieldValues.get(0), nativeFieldValues.get(1)); } else { return TimeUtils.getInterval(nativeFieldValues.get(1), nativeFieldValues.get(0)); } } @Override public void toAdapter(final Interval indexFieldValue, final RowBuilder rowBuilder) { if (startFirst) { rowBuilder.setField( adapterFields[0], TimeUtils.getTimeValue( this.adapterFieldType(), ((Interval) indexFieldValue).getStart().toEpochMilli())); rowBuilder.setField( adapterFields[1], TimeUtils.getTimeValue( this.adapterFieldType(), ((Interval) indexFieldValue).getEnd().toEpochMilli())); } else { rowBuilder.setField( adapterFields[1], TimeUtils.getTimeValue( this.adapterFieldType(), ((Interval) indexFieldValue).getStart().toEpochMilli())); rowBuilder.setField( adapterFields[0], TimeUtils.getTimeValue( this.adapterFieldType(), ((Interval) indexFieldValue).getEnd().toEpochMilli())); } } @Override public short adapterFieldCount() { return 2; } @Override public Set getLowerCaseSuggestedFieldNames() { return Sets.newHashSet(Iterables.concat(suggestedStartTimeFieldNames, suggestedEndTimeNames)); } @Override protected int byteLength() { return super.byteLength() + 1; } protected void writeBytes(final ByteBuffer buffer) { super.writeBytes(buffer); buffer.put((byte) (startFirst ? 1 : 0)); } protected void readBytes(final ByteBuffer buffer) { super.readBytes(buffer); startFirst = buffer.get() != 0; } /** * Maps two `Calendar` adapter fields to an `Interval` index field. */ public static class CalendarRangeFieldMapper extends TimeRangeFieldMapper { @Override public Class adapterFieldType() { return Calendar.class; } } /** * Maps two `Date` adapter fields to an `Interval` index field. */ public static class DateRangeFieldMapper extends TimeRangeFieldMapper { @Override public Class adapterFieldType() { return Date.class; } } /** * Maps two `Long` adapter fields to an `Interval` index field. */ public static class LongRangeFieldMapper extends TimeRangeFieldMapper { @Override public Class adapterFieldType() { return Long.class; } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/annotation/GeoWaveSpatialField.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.adapter.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Inherited; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import org.locationtech.geowave.core.store.adapter.annotation.GeoWaveFieldAnnotation; /** * Annotation for spatial GeoWave fields for the {@link BasicDataTypeAdapter}. This annotation * allows a CRS and spatial index hints to be easily defined. */ @Inherited @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.FIELD) @GeoWaveFieldAnnotation(fieldDescriptorBuilder = SpatialAnnotatedFieldDescriptorBuilder.class) public @interface GeoWaveSpatialField { /** * The name to use for the field. */ String name() default ""; /** * Index hints to use for the field. */ String[] indexHints() default {}; /** * The CRS code to use for the field. */ String crs() default ""; /** * If {@code true} this field will be preferred for spatial indices. */ boolean spatialIndexHint() default false; /** * If {@code true} this field will be preferred as the latitude dimension for spatial indices. */ boolean latitudeIndexHint() default false; /** * If {@code true} this field will be preferred as the longitude dimension for spatial indices. */ boolean longitudeIndexHint() default false; } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/annotation/GeoWaveTemporalField.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.adapter.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Inherited; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import org.locationtech.geowave.core.store.adapter.annotation.GeoWaveFieldAnnotation; /** * Annotation for temporal GeoWave fields for the {@link BasicDataTypeAdapter}. This annotation * allows temporal index hints to be easily defined. */ @Inherited @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.FIELD) @GeoWaveFieldAnnotation(fieldDescriptorBuilder = TemporalAnnotatedFieldDescriptorBuilder.class) public @interface GeoWaveTemporalField { /** * The name to use for the field. */ String name() default ""; /** * Index hints to use for the field. */ String[] indexHints() default {}; /** * If {@code true} this field will be preferred for temporal indices and treated as a time * instant. */ boolean startTimeIndexHint() default false; /** * If {@code true} this field will be preferred for temporal indices and treated as the start * time. */ boolean endTimeIndexHint() default false; /** * If {@code true} this field will be preferred for temporal indices and treated as the end time. */ boolean timeIndexHint() default false; } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/annotation/SpatialAnnotatedFieldDescriptorBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.adapter.annotation; import java.lang.reflect.Field; import org.geotools.referencing.CRS; import org.locationtech.geowave.core.geotime.adapter.SpatialFieldDescriptorBuilder; import org.locationtech.geowave.core.index.IndexDimensionHint; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.adapter.BasicDataTypeAdapter; import org.locationtech.geowave.core.store.adapter.annotation.AnnotatedFieldDescriptorBuilder; import org.opengis.referencing.FactoryException; /** * Builds spatial field descriptors for fields annotated with `@GeoWaveSpatialField`. */ public class SpatialAnnotatedFieldDescriptorBuilder implements AnnotatedFieldDescriptorBuilder { @Override public FieldDescriptor buildFieldDescriptor(Field field) { if (field.isAnnotationPresent(GeoWaveSpatialField.class)) { final GeoWaveSpatialField fieldAnnotation = field.getAnnotation(GeoWaveSpatialField.class); final String fieldName; if (fieldAnnotation.name().isEmpty()) { fieldName = field.getName(); } else { fieldName = fieldAnnotation.name(); } final String[] indexHints = fieldAnnotation.indexHints(); final SpatialFieldDescriptorBuilder builder = new SpatialFieldDescriptorBuilder<>(BasicDataTypeAdapter.normalizeClass(field.getType())); for (final String hint : indexHints) { builder.indexHint(new IndexDimensionHint(hint)); } if (!fieldAnnotation.crs().isEmpty()) { try { builder.crs(CRS.decode(fieldAnnotation.crs())); } catch (FactoryException e) { throw new RuntimeException("Unable to decode CRS: " + fieldAnnotation.crs(), e); } } if (fieldAnnotation.spatialIndexHint()) { builder.spatialIndexHint(); } if (fieldAnnotation.latitudeIndexHint()) { builder.latitudeIndexHint(); } if (fieldAnnotation.longitudeIndexHint()) { builder.longitudeIndexHint(); } return builder.fieldName(fieldName).build(); } throw new RuntimeException("Field is missing GeoWaveSpatialField annotation."); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/annotation/TemporalAnnotatedFieldDescriptorBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.adapter.annotation; import java.lang.reflect.Field; import org.locationtech.geowave.core.geotime.adapter.TemporalFieldDescriptorBuilder; import org.locationtech.geowave.core.index.IndexDimensionHint; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.adapter.BasicDataTypeAdapter; import org.locationtech.geowave.core.store.adapter.annotation.AnnotatedFieldDescriptorBuilder; /** * Builds spatial field descriptors for fields annotated with `@GeoWaveSpatialField`. */ public class TemporalAnnotatedFieldDescriptorBuilder implements AnnotatedFieldDescriptorBuilder { @Override public FieldDescriptor buildFieldDescriptor(Field field) { if (field.isAnnotationPresent(GeoWaveTemporalField.class)) { final GeoWaveTemporalField fieldAnnotation = field.getAnnotation(GeoWaveTemporalField.class); final String fieldName; if (fieldAnnotation.name().isEmpty()) { fieldName = field.getName(); } else { fieldName = fieldAnnotation.name(); } final String[] indexHints = fieldAnnotation.indexHints(); final TemporalFieldDescriptorBuilder builder = new TemporalFieldDescriptorBuilder<>( BasicDataTypeAdapter.normalizeClass(field.getType())); for (final String hint : indexHints) { builder.indexHint(new IndexDimensionHint(hint)); } if (fieldAnnotation.timeIndexHint()) { builder.timeIndexHint(); } if (fieldAnnotation.startTimeIndexHint()) { builder.startTimeIndexHint(); } if (fieldAnnotation.endTimeIndexHint()) { builder.endTimeIndexHint(); } return builder.fieldName(fieldName).build(); } throw new RuntimeException("Field is missing GeoWaveTemporalField annotation."); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/binning/ComplexGeometryBinningOption.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.binning; public enum ComplexGeometryBinningOption { USE_CENTROID_ONLY, USE_FULL_GEOMETRY, USE_FULL_GEOMETRY_SCALE_BY_OVERLAP; // is used by python converter public static ComplexGeometryBinningOption fromString(final String code) { for (final ComplexGeometryBinningOption output : ComplexGeometryBinningOption.values()) { if (output.toString().equalsIgnoreCase(code)) { return output; } } return null; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/binning/GeohashBinningHelper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.binning; import java.util.HashSet; import java.util.Map.Entry; import java.util.Set; import java.util.stream.Collectors; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.geotime.util.GeometryUtils.GeometryHandler; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.ByteArrayRange; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.api.BinConstraints.ByteArrayConstraints; import org.locationtech.geowave.core.store.statistics.query.BinConstraintsImpl.ExplicitConstraints; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.LineString; import org.locationtech.jts.geom.Point; import org.locationtech.jts.geom.Polygon; import com.github.davidmoten.geo.Coverage; import com.github.davidmoten.geo.GeoHash; import com.github.davidmoten.geo.LatLong; import com.google.common.collect.HashMultimap; class GeohashBinningHelper implements SpatialBinningHelper { public GeohashBinningHelper() { super(); } @Override public ByteArrayConstraints getGeometryConstraints(final Geometry geometry, final int precision) { final GeohashGeometryHandler geometryHandler = new GeohashGeometryHandler(precision); GeometryUtils.visitGeometry(geometry, geometryHandler); // we try to replace all common prefixes with a prefix scan instead of using every individual // hash on the query // this can really help with query performance if (removePrefixes(geometryHandler.hashes)) { return new ExplicitConstraints( geometryHandler.hashes.stream().map(str -> StringUtils.stringToBinary(str)).map( bytes -> new ByteArrayRange(bytes, bytes)).toArray(ByteArrayRange[]::new)); } return new ExplicitConstraints( geometryHandler.hashes.stream().map(ByteArray::new).toArray(ByteArray[]::new)); } private static boolean removePrefixes(final Set allHashes) { if (allHashes.isEmpty() || allHashes.iterator().next().isEmpty()) { return false; } final HashMultimap prefixMap = HashMultimap.create(); allHashes.forEach(s -> prefixMap.put(s.substring(0, s.length() - 1), s)); // if there are 32 entries of the same substring that means its prefix is fully covered and we // can remove the 32 and replace with the prefix // need to make sure the set is mutable because we will also try to find prefixes in this set final Set retVal = prefixMap.asMap().entrySet().stream().filter(e -> e.getValue().size() == 32).map( Entry::getKey).collect(Collectors.toCollection(HashSet::new)); if (retVal.isEmpty()) { return false; } retVal.forEach(k -> prefixMap.get(k).forEach(v -> allHashes.remove(v))); removePrefixes(retVal); allHashes.addAll(retVal); return true; } @Override public ByteArray[] getSpatialBins(final Geometry geometry, final int precision) { final GeohashGeometryHandler geometryHandler = new GeohashGeometryHandler(precision); GeometryUtils.visitGeometry(geometry, geometryHandler); return geometryHandler.hashes.stream().map(ByteArray::new).toArray(ByteArray[]::new); } @Override public Geometry getBinGeometry(final ByteArray bin, final int precision) { final double halfWidth = GeoHash.widthDegrees(precision) / 2; final double halfHeight = GeoHash.heightDegrees(precision) / 2; final LatLong ll = GeoHash.decodeHash(bin.getString()); return GeometryUtils.GEOMETRY_FACTORY.toGeometry( new Envelope( ll.getLon() - halfWidth, ll.getLon() + halfWidth, ll.getLat() - halfHeight, ll.getLat() + halfHeight)); } @Override public String binToString(final byte[] binId) { return StringUtils.stringFromBinary(binId); } private static class GeohashGeometryHandler implements GeometryHandler { private final int precision; private final Set hashes = new HashSet<>(); private final double halfHeight; private final double halfWidth; public GeohashGeometryHandler(final int precision) { this.precision = precision; halfHeight = GeoHash.heightDegrees(precision) / 2; halfWidth = GeoHash.widthDegrees(precision) / 2; } @Override public void handlePoint(final Point point) { hashes.add(GeoHash.encodeHash(point.getY(), point.getX(), precision)); } @Override public void handleLineString(final LineString lineString) { final double minx = lineString.getEnvelopeInternal().getMinX(); final double maxx = lineString.getEnvelopeInternal().getMaxX(); final double miny = lineString.getEnvelopeInternal().getMinY(); final double maxy = lineString.getEnvelopeInternal().getMaxY(); final Coverage coverage = GeoHash.coverBoundingBox(maxy, minx, miny, maxx, precision); hashes.addAll(coverage.getHashes().stream().filter(geohash -> { final LatLong ll = GeoHash.decodeHash(geohash); return lineString.intersects( GeometryUtils.GEOMETRY_FACTORY.toGeometry( new Envelope( ll.getLon() - halfWidth, ll.getLon() + halfWidth, ll.getLat() - halfHeight, ll.getLat() + halfHeight))); }).collect(Collectors.toList())); } @Override public void handlePolygon(final Polygon polygon) { final double minx = polygon.getEnvelopeInternal().getMinX(); final double maxx = polygon.getEnvelopeInternal().getMaxX(); final double miny = polygon.getEnvelopeInternal().getMinY(); final double maxy = polygon.getEnvelopeInternal().getMaxY(); final Coverage coverage = GeoHash.coverBoundingBox(maxy, minx, miny, maxx, precision); // this probably should be equalsTopo for completeness but considering this is a shortcut for // performance anyways, we use equalsExact which should be faster if (polygon.equalsExact(polygon.getEnvelope())) { hashes.addAll(coverage.getHashes()); } else { hashes.addAll(coverage.getHashes().stream().filter(geohash -> { final LatLong ll = GeoHash.decodeHash(geohash); return polygon.intersects( GeometryUtils.GEOMETRY_FACTORY.toGeometry( new Envelope( ll.getLon() - halfWidth, ll.getLon() + halfWidth, ll.getLat() - halfHeight, ll.getLat() + halfHeight))); }).collect(Collectors.toList())); } } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/binning/H3BinningHelper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.binning; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.geotime.util.GeometryUtils.GeometryHandler; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.lexicoder.Lexicoders; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.LineString; import org.locationtech.jts.geom.Point; import org.locationtech.jts.geom.Polygon; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.uber.h3core.H3Core; import com.uber.h3core.LengthUnit; import com.uber.h3core.exceptions.LineUndefinedException; import com.uber.h3core.util.GeoCoord; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; class H3BinningHelper implements SpatialBinningHelper { private static final Logger LOGGER = LoggerFactory.getLogger(H3BinningHelper.class); private static final Object H3_MUTEX = new Object(); private static H3Core h3Core; @Override public ByteArray[] getSpatialBins(final Geometry geometry, final int precision) { final H3GeometryHandler h3Handler = new H3GeometryHandler(precision); GeometryUtils.visitGeometry(geometry, h3Handler); return h3Handler.ids.stream().map(Lexicoders.LONG::toByteArray).map(ByteArray::new).toArray( ByteArray[]::new); } @Override public Geometry getBinGeometry(final ByteArray bin, final int precision) { // understanding is that this does not produce a closed loop so we need to add the first point // at the end to close the loop final List coords = h3().h3ToGeoBoundary(Lexicoders.LONG.fromByteArray(bin.getBytes())); coords.add(coords.get(0)); return GeometryUtils.GEOMETRY_FACTORY.createPolygon( coords.stream().map(geoCoord -> new Coordinate(geoCoord.lng, geoCoord.lat)).toArray( Coordinate[]::new)); } @Override public String binToString(final byte[] binId) { return h3().h3ToString(Lexicoders.LONG.fromByteArray(binId)); } @Override public int getBinByteLength(final int precision) { return Long.BYTES; } @SuppressFBWarnings private static H3Core h3() { if (h3Core == null) { synchronized (H3_MUTEX) { if (h3Core == null) { try { h3Core = H3Core.newInstance(); } catch (final IOException e) { LOGGER.error("Unable to load native H3 libraries", e); } } } } return h3Core; } private static class H3GeometryHandler implements GeometryHandler { private final int precision; private final Set ids = new HashSet<>(); // this is just an approximation private static final double KM_PER_DEGREE = 111; private final boolean hasBeenBuffered; public H3GeometryHandler(final int precision) { this(precision, false); } public H3GeometryHandler(final int precision, final boolean hasBeenBuffered) { super(); this.precision = precision; this.hasBeenBuffered = hasBeenBuffered; } @Override public void handlePoint(final Point point) { ids.add(h3().geoToH3(point.getY(), point.getX(), precision)); } private Long coordToH3(final Coordinate coord) { return h3().geoToH3(coord.getY(), coord.getX(), precision); } @Override public void handleLineString(final LineString lineString) { final double edgeLengthDegrees = h3().edgeLength(precision, LengthUnit.km) / KM_PER_DEGREE; internalHandlePolygon((Polygon) lineString.buffer(edgeLengthDegrees)); // this is an under-approximation, but turns out just as poor of an approximation as the above // logic and should be much faster (doing both actually improves accuracy a bit, albeit more // expensive) final Coordinate[] coords = lineString.getCoordinates(); if (coords.length > 1) { Coordinate prev = coords[0]; for (int i = 1; i < coords.length; i++) { try { ids.addAll(h3().h3Line(coordToH3(prev), coordToH3(coords[i]))); } catch (final LineUndefinedException e) { LOGGER.error("Unable to add H3 line for " + lineString, e); } prev = coords[i]; } } else if (coords.length == 1) { ids.add(coordToH3(coords[0])); } } private void internalHandlePolygon(final Polygon polygon) { final int numInteriorRings = polygon.getNumInteriorRing(); final List idsToAdd; if (numInteriorRings > 0) { final List> holes = new ArrayList<>(numInteriorRings); for (int i = 0; i < numInteriorRings; i++) { holes.add( Arrays.stream(polygon.getInteriorRingN(i).getCoordinates()).map( c -> new GeoCoord(c.getY(), c.getX())).collect(Collectors.toList())); } idsToAdd = h3().polyfill( Arrays.stream(polygon.getExteriorRing().getCoordinates()).map( c -> new GeoCoord(c.getY(), c.getX())).collect(Collectors.toList()), holes, precision); } else { idsToAdd = h3().polyfill( Arrays.stream(polygon.getExteriorRing().getCoordinates()).map( c -> new GeoCoord(c.getY(), c.getX())).collect(Collectors.toList()), null, precision); } if (idsToAdd.isEmpty()) { // given the approximations involved with H3 this is still a slight possibility, even given // our geometric buffering to circumvent the approximations handlePoint(polygon.getCentroid()); } else { ids.addAll(idsToAdd); } } @Override public void handlePolygon(final Polygon polygon) { // the H3 APIs is an under-approximation - it only returns hexagons whose center is inside the // polygon, *not* all hexagons that intersect the polygon // by buffering the polygon by the approximation of the edge length we can at least get closer // to all the intersections if (hasBeenBuffered) { internalHandlePolygon(polygon); } else { final double edgeLengthDegrees = h3().edgeLength(precision, LengthUnit.km) / KM_PER_DEGREE; final H3GeometryHandler handler = new H3GeometryHandler(precision, true); GeometryUtils.visitGeometry(polygon.buffer(edgeLengthDegrees), handler); ids.addAll(handler.ids); } } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/binning/S2BinningHelper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.binning; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; import java.util.stream.IntStream; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.geotime.util.GeometryUtils.GeometryHandler; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.ByteArrayRange; import org.locationtech.geowave.core.index.lexicoder.Lexicoders; import org.locationtech.geowave.core.store.api.BinConstraints.ByteArrayConstraints; import org.locationtech.geowave.core.store.statistics.query.BinConstraintsImpl.ExplicitConstraints; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.LineString; import org.locationtech.jts.geom.Point; import org.locationtech.jts.geom.Polygon; import com.google.common.collect.Streams; import com.google.common.geometry.S2Cell; import com.google.common.geometry.S2CellId; import com.google.common.geometry.S2CellUnion; import com.google.common.geometry.S2LatLng; import com.google.common.geometry.S2LatLngRect; import com.google.common.geometry.S2Loop; import com.google.common.geometry.S2PolygonBuilder; import com.google.common.geometry.S2Polyline; import com.google.common.geometry.S2Region; import com.google.common.geometry.S2RegionCoverer; class S2BinningHelper implements SpatialBinningHelper { public S2BinningHelper() { super(); } @Override public ByteArrayConstraints getGeometryConstraints(final Geometry geom, final int precision) { final S2RegionCoverer coverer = new S2RegionCoverer(); coverer.setMaxCells(100); // no sense decomposing further than the max precision the stats are binned at coverer.setMaxLevel(precision); final S2CellUnion s2CellUnion = cellCoverage(geom, coverer); return new ExplicitConstraints( Streams.stream(s2CellUnion.iterator()).map( c -> new ByteArrayRange( Lexicoders.LONG.toByteArray(c.rangeMin().id()), Lexicoders.LONG.toByteArray(c.rangeMax().id()))).toArray(ByteArrayRange[]::new)); } @Override public String binToString(final byte[] binId) { final Long id = Lexicoders.LONG.fromByteArray(binId); return new S2CellId(id).toToken(); } private static S2CellUnion cellCoverage(final Geometry geom, final S2RegionCoverer coverer) { // this probably should be equalsTopo for completeness but considering this is a shortcut for // performance anyways, we use equalsExact which should be faster if (geom.equalsExact(geom.getEnvelope())) { final double minx = geom.getEnvelopeInternal().getMinX(); final double maxx = geom.getEnvelopeInternal().getMaxX(); final double miny = geom.getEnvelopeInternal().getMinY(); final double maxy = geom.getEnvelopeInternal().getMaxY(); final S2Region s2Region = new S2LatLngRect(S2LatLng.fromDegrees(miny, minx), S2LatLng.fromDegrees(maxy, maxx)); return coverer.getCovering(s2Region); } else { final S2GeometryHandler geometryHandler = new S2GeometryHandler(coverer); GeometryUtils.visitGeometry(geom, geometryHandler); return geometryHandler.cellUnion; } } @Override public ByteArray[] getSpatialBins(final Geometry geometry, final int precision) { if (geometry instanceof Point) { final Point centroid = geometry.getCentroid(); return new ByteArray[] { new ByteArray( Lexicoders.LONG.toByteArray( S2CellId.fromLatLng( S2LatLng.fromDegrees(centroid.getY(), centroid.getX())).parent( precision).id()))}; } else { return getSpatialBinsComplexGeometry(geometry, precision); } } @Override public Geometry getBinGeometry(final ByteArray bin, final int precision) { final Long id = Lexicoders.LONG.fromByteArray(bin.getBytes()); final List coords = IntStream.range(0, 4).mapToObj(i -> new S2Cell(new S2CellId(id)).getVertex(i)).map( S2LatLng::new).map(ll -> new Coordinate(ll.lngDegrees(), ll.latDegrees())).collect( Collectors.toList()); // we need to close it so the first one needs to repeat at the end coords.add(coords.get(0)); return GeometryUtils.GEOMETRY_FACTORY.createPolygon(coords.toArray(new Coordinate[5])); } private static ByteArray[] getSpatialBinsComplexGeometry( final Geometry geometry, final int precision) { final S2RegionCoverer coverer = new S2RegionCoverer(); // for now lets assume 10000 should cover any polygon at the desired precision coverer.setMaxCells(10000); coverer.setMinLevel(precision); coverer.setMaxLevel(precision); final S2CellUnion cellUnion = cellCoverage(geometry, coverer); final ArrayList cellIds = new ArrayList<>(); // because cell unions are automatically normalized (children fully covering a parent get // collapsed into a parent) we need to get the covering at the desired precision so we must // denormalize (this is where memory concerns could come in for abnormally large polygons) cellUnion.denormalize(precision, 1, cellIds); return cellIds.stream().map(S2CellId::id).map(Lexicoders.LONG::toByteArray).map( ByteArray::new).toArray(ByteArray[]::new); } @Override public int getBinByteLength(final int precision) { return Long.BYTES; } private static class S2GeometryHandler implements GeometryHandler { private S2CellUnion cellUnion; private final S2RegionCoverer coverer; public S2GeometryHandler(final S2RegionCoverer coverer) { super(); cellUnion = new S2CellUnion(); this.coverer = coverer; } @Override public void handlePoint(final Point point) { final S2CellUnion newUnion = new S2CellUnion(); final ArrayList cellIds = cellUnion.cellIds(); cellIds.add(S2CellId.fromLatLng(S2LatLng.fromDegrees(point.getY(), point.getX()))); newUnion.initFromCellIds(cellIds); cellUnion = newUnion; } @Override public void handleLineString(final LineString lineString) { final S2CellUnion newUnion = new S2CellUnion(); newUnion.getUnion( coverer.getCovering( new S2Polyline( Arrays.stream(lineString.getCoordinates()).map( c -> S2LatLng.fromDegrees(c.getY(), c.getX()).toPoint()).collect( Collectors.toList()))), cellUnion); cellUnion = newUnion; } @Override public void handlePolygon(final Polygon polygon) { // order matters for S2, exterior ring must be counter clockwise and interior must be // clockwise (respecting the right-hand rule) polygon.normalize(); final S2PolygonBuilder bldr = new S2PolygonBuilder(); final int numInteriorRings = polygon.getNumInteriorRing(); if (numInteriorRings > 0) { for (int i = 0; i < numInteriorRings; i++) { final LineString ls = polygon.getInteriorRingN(i); bldr.addLoop( new S2Loop( Arrays.stream(ls.getCoordinates()).map( c -> S2LatLng.fromDegrees(c.getY(), c.getX()).toPoint()).collect( Collectors.toList()))); } } bldr.addLoop( new S2Loop( Arrays.stream(polygon.getExteriorRing().getCoordinates()).map( c -> S2LatLng.fromDegrees(c.getY(), c.getX()).toPoint()).collect( Collectors.toList()))); final S2CellUnion newUnion = new S2CellUnion(); newUnion.getUnion(coverer.getCovering(bldr.assemblePolygon()), cellUnion); cellUnion = newUnion; } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/binning/SpatialBinningHelper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.binning; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.store.api.BinConstraints.ByteArrayConstraints; import org.locationtech.geowave.core.store.statistics.query.BinConstraintsImpl.ExplicitConstraints; import org.locationtech.jts.geom.Geometry; interface SpatialBinningHelper { ByteArray[] getSpatialBins(final Geometry geometry, int precision); default ByteArrayConstraints getGeometryConstraints(final Geometry geom, final int precision) { return new ExplicitConstraints(getSpatialBins(geom, precision)); } Geometry getBinGeometry(final ByteArray bin, int precision); default String binToString(final byte[] binId) { return new ByteArray(binId).getHexString(); } default int getBinByteLength(final int precision) { return precision; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/binning/SpatialBinningType.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.binning; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.store.api.BinConstraints.ByteArrayConstraints; import org.locationtech.jts.geom.Geometry; public enum SpatialBinningType implements SpatialBinningHelper { H3(new H3BinningHelper()), S2(new S2BinningHelper()), GEOHASH(new GeohashBinningHelper()); private SpatialBinningHelper helperDelegate; private SpatialBinningType(final SpatialBinningHelper helperDelegate) { this.helperDelegate = helperDelegate; } @Override public ByteArray[] getSpatialBins(final Geometry geometry, final int precision) { // TODO if geometry is not WGS84 we need to transform it return helperDelegate.getSpatialBins(geometry, precision); } @Override public ByteArrayConstraints getGeometryConstraints(final Geometry geom, final int precision) { // TODO if geometry is not WGS84 we need to transform it return helperDelegate.getGeometryConstraints(geom, precision); } @Override public Geometry getBinGeometry(final ByteArray bin, final int precision) { return helperDelegate.getBinGeometry(bin, precision); } @Override public String binToString(final byte[] binId) { return helperDelegate.binToString(binId); } @Override public int getBinByteLength(final int precision) { return helperDelegate.getBinByteLength(precision); } // is used by python converter public static SpatialBinningType fromString(final String code) { for (final SpatialBinningType output : SpatialBinningType.values()) { if (output.toString().equalsIgnoreCase(code)) { return output; } } return null; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/CommonSpatialOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.index; import javax.annotation.Nullable; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.store.spi.DimensionalityTypeOptions; import com.beust.jcommander.Parameter; public abstract class CommonSpatialOptions implements DimensionalityTypeOptions { @Parameter( names = {"-c", "--crs"}, required = false, description = "The native Coordinate Reference System used within the index. All spatial data will be projected into this CRS for appropriate indexing as needed.") protected String crs = GeometryUtils.DEFAULT_CRS_STR; @Parameter( names = {"-gp", "--geometryPrecision"}, required = false, description = "The maximum precision of the geometry when encoding. Lower precision will save more disk space when encoding. (Between -8 and 7)") protected int geometryPrecision = GeometryUtils.MAX_GEOMETRY_PRECISION; @Parameter( names = {"-fp", "--fullGeometryPrecision"}, required = false, description = "If specified, geometry will be encoded losslessly. Uses more disk space.") protected boolean fullGeometryPrecision = false; public void setCrs(final String crs) { this.crs = crs; } public String getCrs() { return crs; } public void setGeometryPrecision(final @Nullable Integer geometryPrecision) { if (geometryPrecision == null) { fullGeometryPrecision = true; } else { fullGeometryPrecision = false; this.geometryPrecision = geometryPrecision; } } public Integer getGeometryPrecision() { if (fullGeometryPrecision) { return null; } else { return geometryPrecision; } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/SpatialAttributeIndexProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.index; import org.locationtech.geowave.core.geotime.adapter.SpatialFieldDescriptor; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.api.AttributeIndex; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.index.AttributeIndexImpl; import org.locationtech.geowave.core.store.index.AttributeIndexProviderSpi; import org.locationtech.jts.geom.Geometry; /** * Provides attribute indices for spatial fields. */ public class SpatialAttributeIndexProvider implements AttributeIndexProviderSpi { @Override public boolean supportsDescriptor(final FieldDescriptor fieldDescriptor) { return Geometry.class.isAssignableFrom(fieldDescriptor.bindingClass()); } @Override public AttributeIndex buildIndex( final String indexName, final DataTypeAdapter adapter, final FieldDescriptor fieldDescriptor) { final SpatialOptions options = new SpatialOptions(); if (fieldDescriptor instanceof SpatialFieldDescriptor) { options.setCrs(GeometryUtils.getCrsCode(((SpatialFieldDescriptor) fieldDescriptor).crs())); } final Index index = SpatialDimensionalityTypeProvider.createIndexFromOptions(options); return new AttributeIndexImpl( index.getIndexStrategy(), index.getIndexModel(), indexName, fieldDescriptor.fieldName()); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/SpatialDimensionalityTypeProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.index; import javax.annotation.Nullable; import org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition; import org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition; import org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit; import org.locationtech.geowave.core.geotime.store.dimension.CustomCRSBoundedSpatialDimension; import org.locationtech.geowave.core.geotime.store.dimension.CustomCRSBoundedSpatialDimensionX; import org.locationtech.geowave.core.geotime.store.dimension.CustomCRSBoundedSpatialDimensionY; import org.locationtech.geowave.core.geotime.store.dimension.CustomCRSSpatialField; import org.locationtech.geowave.core.geotime.store.dimension.CustomCRSUnboundedSpatialDimension; import org.locationtech.geowave.core.geotime.store.dimension.CustomCRSUnboundedSpatialDimensionX; import org.locationtech.geowave.core.geotime.store.dimension.CustomCRSUnboundedSpatialDimensionY; import org.locationtech.geowave.core.geotime.store.dimension.CustomCrsIndexModel; import org.locationtech.geowave.core.geotime.store.dimension.LatitudeField; import org.locationtech.geowave.core.geotime.store.dimension.LongitudeField; import org.locationtech.geowave.core.geotime.store.dimension.TimeField; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.geotime.util.SpatialIndexUtils; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.sfc.SFCFactory.SFCType; import org.locationtech.geowave.core.index.sfc.xz.XZHierarchicalIndexFactory; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.dimension.NumericDimensionField; import org.locationtech.geowave.core.store.index.BasicIndexModel; import org.locationtech.geowave.core.store.index.CustomNameIndex; import org.locationtech.geowave.core.store.spi.DimensionalityTypeProviderSpi; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.opengis.referencing.cs.CoordinateSystem; import org.opengis.referencing.cs.CoordinateSystemAxis; public class SpatialDimensionalityTypeProvider implements DimensionalityTypeProviderSpi { private static final String DEFAULT_SPATIAL_ID = "SPATIAL_IDX"; public static final int LONGITUDE_BITS = 31; public static final int LATITUDE_BITS = 31; // this is chosen to place metric CRSs always in the same bin public static final double DEFAULT_UNBOUNDED_CRS_INTERVAL = 40075017; public static final NumericDimensionDefinition[] SPATIAL_DIMENSIONS = new NumericDimensionDefinition[] {new LongitudeDefinition(), new LatitudeDefinition(true) // just use the same range for latitude to make square sfc values in // decimal degrees (EPSG:4326) }; @SuppressWarnings("rawtypes") public static NumericDimensionField[] getSpatialFields( final @Nullable Integer geometryPrecision) { return new NumericDimensionField[] { new LongitudeField(geometryPrecision), new LatitudeField(geometryPrecision, true) // just use the same range for latitude to make square sfc values in // decimal degrees (EPSG:4326) }; } @SuppressWarnings("rawtypes") public static NumericDimensionField[] getSpatialTemporalFields( final @Nullable Integer geometryPrecision) { return new NumericDimensionField[] { new LongitudeField(geometryPrecision), new LatitudeField(geometryPrecision, true), new TimeField(Unit.YEAR)}; } public SpatialDimensionalityTypeProvider() {} @Override public String getDimensionalityTypeName() { return "spatial"; } @Override public String getDimensionalityTypeDescription() { return "This dimensionality type matches all indices that only require Geometry."; } @Override public SpatialOptions createOptions() { return new SpatialOptions(); } @Override public Index createIndex(final DataStore dataStore, final SpatialOptions options) { return createIndexFromOptions(options); } public static Index createIndexFromOptions(final SpatialOptions options) { NumericDimensionDefinition[] dimensions; boolean isDefaultCRS; String crsCode = null; NumericDimensionField[] fields = null; NumericDimensionField[] fields_temporal = null; final Integer geometryPrecision = options.getGeometryPrecision(); if ((options.crs == null) || options.crs.isEmpty() || options.crs.equalsIgnoreCase(GeometryUtils.DEFAULT_CRS_STR)) { dimensions = SPATIAL_DIMENSIONS; fields = getSpatialFields(geometryPrecision); isDefaultCRS = true; crsCode = "EPSG:4326"; } else { final CoordinateReferenceSystem crs = GeometryUtils.decodeCRS(options.crs); final CoordinateSystem cs = crs.getCoordinateSystem(); isDefaultCRS = false; crsCode = options.crs; dimensions = new NumericDimensionDefinition[cs.getDimension()]; if (options.storeTime) { fields_temporal = new NumericDimensionField[dimensions.length + 1]; for (int d = 0; d < dimensions.length; d++) { final CoordinateSystemAxis csa = cs.getAxis(d); if (!isUnbounded(csa)) { dimensions[d] = new CustomCRSBoundedSpatialDimension( (byte) d, csa.getMinimumValue(), csa.getMaximumValue()); fields_temporal[d] = new CustomCRSSpatialField( (CustomCRSBoundedSpatialDimension) dimensions[d], geometryPrecision, crs); } else { dimensions[d] = new CustomCRSUnboundedSpatialDimension(DEFAULT_UNBOUNDED_CRS_INTERVAL, (byte) d); fields_temporal[d] = new CustomCRSSpatialField( (CustomCRSUnboundedSpatialDimension) dimensions[d], geometryPrecision, crs); } } fields_temporal[dimensions.length] = new TimeField(Unit.YEAR); } else { fields = new NumericDimensionField[dimensions.length]; for (int d = 0; d < dimensions.length; d++) { final CoordinateSystemAxis csa = cs.getAxis(d); if (!isUnbounded(csa)) { if (d == 0) { dimensions[d] = new CustomCRSBoundedSpatialDimensionX( csa.getMinimumValue(), csa.getMaximumValue()); fields[d] = new CustomCRSSpatialField( (CustomCRSBoundedSpatialDimensionX) dimensions[d], geometryPrecision, crs); } if (d == 1) { dimensions[d] = new CustomCRSBoundedSpatialDimensionY( csa.getMinimumValue(), csa.getMaximumValue()); fields[d] = new CustomCRSSpatialField( (CustomCRSBoundedSpatialDimensionY) dimensions[d], geometryPrecision, crs); } } else { if (d == 0) { dimensions[d] = new CustomCRSUnboundedSpatialDimensionX(DEFAULT_UNBOUNDED_CRS_INTERVAL, (byte) d); fields[d] = new CustomCRSSpatialField( (CustomCRSUnboundedSpatialDimensionX) dimensions[d], geometryPrecision, crs); } if (d == 1) { dimensions[d] = new CustomCRSUnboundedSpatialDimensionY(DEFAULT_UNBOUNDED_CRS_INTERVAL, (byte) d); fields[d] = new CustomCRSSpatialField( (CustomCRSUnboundedSpatialDimensionY) dimensions[d], geometryPrecision, crs); } } } } } BasicIndexModel indexModel = null; if (isDefaultCRS) { indexModel = new BasicIndexModel( options.storeTime ? getSpatialTemporalFields(geometryPrecision) : getSpatialFields(geometryPrecision)); } else { indexModel = new CustomCrsIndexModel(options.storeTime ? fields_temporal : fields, crsCode); } return new CustomNameIndex( XZHierarchicalIndexFactory.createFullIncrementalTieredStrategy( dimensions, new int[] { // TODO this is only valid for 2D coordinate // systems, again consider the possibility // of being // flexible enough to handle n-dimensions LONGITUDE_BITS, LATITUDE_BITS}, SFCType.HILBERT), indexModel, // TODO append CRS code to ID if its overridden isDefaultCRS ? (options.storeTime ? DEFAULT_SPATIAL_ID + "_TIME" : DEFAULT_SPATIAL_ID) : (options.storeTime ? DEFAULT_SPATIAL_ID + "_TIME" : DEFAULT_SPATIAL_ID) + "_" + crsCode.substring(crsCode.indexOf(":") + 1)); } private static boolean isUnbounded(final CoordinateSystemAxis csa) { final double min = csa.getMinimumValue(); final double max = csa.getMaximumValue(); if (!Double.isFinite(max) || !Double.isFinite(min)) { return true; } return false; } public static boolean isSpatial(final Index index) { if (index == null) { return false; } return isSpatial(index.getIndexStrategy()); } public static boolean isSpatial(final NumericIndexStrategy indexStrategy) { if ((indexStrategy == null) || (indexStrategy.getOrderedDimensionDefinitions() == null)) { return false; } final NumericDimensionDefinition[] dimensions = indexStrategy.getOrderedDimensionDefinitions(); if (dimensions.length < 2) { return false; } boolean hasLat = false, hasLon = false; for (final NumericDimensionDefinition definition : dimensions) { if (SpatialIndexUtils.isLatitudeDimension(definition)) { hasLat = true; } else if (SpatialIndexUtils.isLongitudeDimension(definition)) { hasLon = true; } } return hasLat && hasLon; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/SpatialIndexFilter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.index; import org.locationtech.geowave.core.geotime.util.SpatialIndexUtils; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.index.IndexFilter; public class SpatialIndexFilter implements IndexFilter { @Override public boolean test(Index t) { return SpatialIndexUtils.hasSpatialDimensions(t); } @Override public byte[] toBinary() { return null; } @Override public void fromBinary(byte[] bytes) {} } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/SpatialOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.index; import com.beust.jcommander.Parameter; public class SpatialOptions extends CommonSpatialOptions { @Parameter( names = {"--storeTime"}, required = false, description = "The index will store temporal values. This allows it to slightly more efficiently run spatial-temporal queries although if spatial-temporal queries are a common use case, a separate spatial-temporal index is recommended.") protected boolean storeTime = false; public void storeTime(final boolean storeTime) { this.storeTime = storeTime; } public boolean isStoreTime() { return storeTime; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/SpatialTemporalDimensionalityTypeProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.index; import java.util.Locale; import javax.annotation.Nullable; import org.apache.commons.lang3.StringUtils; import org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition; import org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition; import org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit; import org.locationtech.geowave.core.geotime.index.dimension.TimeDefinition; import org.locationtech.geowave.core.geotime.store.dimension.CustomCRSBoundedSpatialDimensionX; import org.locationtech.geowave.core.geotime.store.dimension.CustomCRSBoundedSpatialDimensionY; import org.locationtech.geowave.core.geotime.store.dimension.CustomCRSSpatialField; import org.locationtech.geowave.core.geotime.store.dimension.CustomCRSUnboundedSpatialDimensionX; import org.locationtech.geowave.core.geotime.store.dimension.CustomCRSUnboundedSpatialDimensionY; import org.locationtech.geowave.core.geotime.store.dimension.CustomCrsIndexModel; import org.locationtech.geowave.core.geotime.store.dimension.LatitudeField; import org.locationtech.geowave.core.geotime.store.dimension.LongitudeField; import org.locationtech.geowave.core.geotime.store.dimension.TimeField; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.geotime.util.SpatialIndexUtils; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.sfc.SFCFactory.SFCType; import org.locationtech.geowave.core.index.sfc.xz.XZHierarchicalIndexFactory; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.dimension.NumericDimensionField; import org.locationtech.geowave.core.store.index.BasicIndexModel; import org.locationtech.geowave.core.store.index.CustomNameIndex; import org.locationtech.geowave.core.store.spi.DimensionalityTypeProviderSpi; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.opengis.referencing.cs.CoordinateSystem; import org.opengis.referencing.cs.CoordinateSystemAxis; import com.beust.jcommander.IStringConverter; import com.beust.jcommander.ParameterException; public class SpatialTemporalDimensionalityTypeProvider implements DimensionalityTypeProviderSpi { private static final String DEFAULT_SPATIAL_TEMPORAL_ID_STR = "ST_IDX"; // this is chosen to place metric CRSs always in the same bin public static final double DEFAULT_UNBOUNDED_CRS_INTERVAL = 40075017; // TODO should we use different default IDs for all the different // options, for now lets just use one public static final NumericDimensionDefinition[] SPATIAL_TEMPORAL_DIMENSIONS = new NumericDimensionDefinition[] { new LongitudeDefinition(), new LatitudeDefinition(true), new TimeDefinition(SpatialTemporalOptions.DEFAULT_PERIODICITY)}; @SuppressWarnings("rawtypes") public static NumericDimensionField[] getSpatialTemporalFields( final @Nullable Integer geometryPrecision) { return new NumericDimensionField[] { new LongitudeField(geometryPrecision), new LatitudeField(geometryPrecision, true), new TimeField(SpatialTemporalOptions.DEFAULT_PERIODICITY)}; } public SpatialTemporalDimensionalityTypeProvider() {} @Override public String getDimensionalityTypeName() { return "spatial_temporal"; } @Override public String getDimensionalityTypeDescription() { return "This dimensionality type matches all indices that only require Geometry and Time."; } @Override public SpatialTemporalOptions createOptions() { return new SpatialTemporalOptions(); } @Override public Index createIndex(final DataStore dataStore, final SpatialTemporalOptions options) { return createIndexFromOptions(options); } public static Index createIndexFromOptions(final SpatialTemporalOptions options) { NumericDimensionDefinition[] dimensions; NumericDimensionField[] fields = null; CoordinateReferenceSystem crs = null; boolean isDefaultCRS; String crsCode = null; final Integer geometryPrecision = options.getGeometryPrecision(); if ((options.crs == null) || options.crs.isEmpty() || options.crs.equalsIgnoreCase(GeometryUtils.DEFAULT_CRS_STR)) { dimensions = SPATIAL_TEMPORAL_DIMENSIONS; fields = getSpatialTemporalFields(geometryPrecision); isDefaultCRS = true; crsCode = "EPSG:4326"; } else { crs = GeometryUtils.decodeCRS(options.crs); final CoordinateSystem cs = crs.getCoordinateSystem(); isDefaultCRS = false; crsCode = options.crs; dimensions = new NumericDimensionDefinition[cs.getDimension() + 1]; fields = new NumericDimensionField[dimensions.length]; for (int d = 0; d < (dimensions.length - 1); d++) { final CoordinateSystemAxis csa = cs.getAxis(d); if (!isUnbounded(csa)) { if (d == 0) { dimensions[d] = new CustomCRSBoundedSpatialDimensionX(csa.getMinimumValue(), csa.getMaximumValue()); fields[d] = new CustomCRSSpatialField( (CustomCRSBoundedSpatialDimensionX) dimensions[d], geometryPrecision, crs); } if (d == 1) { dimensions[d] = new CustomCRSBoundedSpatialDimensionY(csa.getMinimumValue(), csa.getMaximumValue()); fields[d] = new CustomCRSSpatialField( (CustomCRSBoundedSpatialDimensionY) dimensions[d], geometryPrecision, crs); } } else { if (d == 0) { dimensions[d] = new CustomCRSUnboundedSpatialDimensionX(DEFAULT_UNBOUNDED_CRS_INTERVAL, (byte) d); fields[d] = new CustomCRSSpatialField( (CustomCRSUnboundedSpatialDimensionX) dimensions[d], geometryPrecision, crs); } if (d == 1) { dimensions[d] = new CustomCRSUnboundedSpatialDimensionY(DEFAULT_UNBOUNDED_CRS_INTERVAL, (byte) d); fields[d] = new CustomCRSSpatialField( (CustomCRSUnboundedSpatialDimensionY) dimensions[d], geometryPrecision, crs); } } } dimensions[dimensions.length - 1] = new TimeDefinition(options.periodicity); fields[dimensions.length - 1] = new TimeField(options.periodicity); } BasicIndexModel indexModel = null; if (isDefaultCRS) { indexModel = new BasicIndexModel(fields); } else { indexModel = new CustomCrsIndexModel(fields, crsCode); } String combinedArrayID; if (isDefaultCRS) { combinedArrayID = DEFAULT_SPATIAL_TEMPORAL_ID_STR + "_" + options.bias + "_" + options.periodicity; } else { combinedArrayID = DEFAULT_SPATIAL_TEMPORAL_ID_STR + "_" + (crsCode.substring(crsCode.indexOf(":") + 1)) + "_" + options.bias + "_" + options.periodicity; } final String combinedId = combinedArrayID; return new CustomNameIndex( XZHierarchicalIndexFactory.createFullIncrementalTieredStrategy( dimensions, new int[] { options.bias.getSpatialPrecision(), options.bias.getSpatialPrecision(), options.bias.getTemporalPrecision()}, SFCType.HILBERT, options.maxDuplicates), indexModel, combinedId); } private static boolean isUnbounded(final CoordinateSystemAxis csa) { final double min = csa.getMinimumValue(); final double max = csa.getMaximumValue(); if (!Double.isFinite(max) || !Double.isFinite(min)) { return true; } return false; } public static enum Bias { TEMPORAL, BALANCED, SPATIAL; // converter that will be used later public static Bias fromString(final String code) { for (final Bias output : Bias.values()) { if (output.toString().equalsIgnoreCase(code)) { return output; } } return null; } public int getSpatialPrecision() { switch (this) { case SPATIAL: return 25; case TEMPORAL: return 10; case BALANCED: default: return 20; } } public int getTemporalPrecision() { switch (this) { case SPATIAL: return 10; case TEMPORAL: return 40; case BALANCED: default: return 20; } } } public static class BiasConverter implements IStringConverter { @Override public Bias convert(final String value) { final Bias convertedValue = Bias.fromString(value); if (convertedValue == null) { throw new ParameterException( "Value " + value + "can not be converted to an index bias. " + "Available values are: " + StringUtils.join(Bias.values(), ", ").toLowerCase(Locale.ENGLISH)); } return convertedValue; } } public static class UnitConverter implements IStringConverter { @Override public Unit convert(final String value) { final Unit convertedValue = Unit.fromString(value); if (convertedValue == null) { throw new ParameterException( "Value " + value + "can not be converted to Unit. " + "Available values are: " + StringUtils.join(Unit.values(), ", ").toLowerCase(Locale.ENGLISH)); } return convertedValue; } } public static boolean isSpatialTemporal(final Index index) { if (index == null) { return false; } return isSpatialTemporal(index.getIndexStrategy()); } public static boolean isSpatialTemporal(final NumericIndexStrategy indexStrategy) { if ((indexStrategy == null) || (indexStrategy.getOrderedDimensionDefinitions() == null)) { return false; } final NumericDimensionDefinition[] dimensions = indexStrategy.getOrderedDimensionDefinitions(); if (dimensions.length < 3) { return false; } boolean hasLat = false, hasLon = false, hasTime = false; for (final NumericDimensionDefinition definition : dimensions) { if (definition instanceof TimeDefinition) { hasTime = true; } else if (SpatialIndexUtils.isLatitudeDimension(definition)) { hasLat = true; } else if (SpatialIndexUtils.isLongitudeDimension(definition)) { hasLon = true; } } return hasTime && hasLat && hasLon; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/SpatialTemporalOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.index; import org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider.Bias; import org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider.BiasConverter; import org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider.UnitConverter; import org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit; import com.beust.jcommander.Parameter; public class SpatialTemporalOptions extends CommonSpatialOptions { protected static Unit DEFAULT_PERIODICITY = Unit.YEAR; @Parameter( names = {"--period"}, required = false, description = "The periodicity of the temporal dimension. Because time is continuous, it is binned at this interval.", converter = UnitConverter.class) protected Unit periodicity = DEFAULT_PERIODICITY; @Parameter( names = {"--bias"}, required = false, description = "The bias of the spatial-temporal index. There can be more precision given to time or space if necessary.", converter = BiasConverter.class) protected Bias bias = Bias.BALANCED; @Parameter( names = {"--maxDuplicates"}, required = false, description = "The max number of duplicates per dimension range. The default is 2 per range (for example lines and polygon timestamp data would be up to 4 because its 2 dimensions, and line/poly time range data would be 8).") protected long maxDuplicates = -1; public void setPeriodicity(final Unit periodicity) { this.periodicity = periodicity; } public Unit getPeriodicity() { return periodicity; } public void setBias(final Bias bias) { this.bias = bias; } public Bias getBias() { return bias; } public void setMaxDuplicates(final long maxDuplicates) { this.maxDuplicates = maxDuplicates; } public long getMaxDuplicates() { return maxDuplicates; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/TemporalAttributeIndexProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.index; import java.util.Calendar; import java.util.Date; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.api.AttributeIndex; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.index.AttributeIndexImpl; import org.locationtech.geowave.core.store.index.AttributeIndexProviderSpi; /** * Provides attribute indices for temporal fields. */ public class TemporalAttributeIndexProvider implements AttributeIndexProviderSpi { @Override public boolean supportsDescriptor(final FieldDescriptor fieldDescriptor) { return Calendar.class.isAssignableFrom(fieldDescriptor.bindingClass()) || Date.class.isAssignableFrom(fieldDescriptor.bindingClass()); } @Override public AttributeIndex buildIndex( final String indexName, final DataTypeAdapter adapter, final FieldDescriptor fieldDescriptor) { final TemporalOptions options = new TemporalOptions(); options.setNoTimeRanges(true); final Index index = TemporalDimensionalityTypeProvider.createIndexFromOptions(options); return new AttributeIndexImpl( index.getIndexStrategy(), index.getIndexModel(), indexName, fieldDescriptor.fieldName()); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/TemporalDimensionalityTypeProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.index; import java.util.Locale; import org.apache.commons.lang3.StringUtils; import org.locationtech.geowave.core.geotime.index.dimension.SimpleTimeDefinition; import org.locationtech.geowave.core.geotime.index.dimension.SimpleTimeIndexStrategy; import org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit; import org.locationtech.geowave.core.geotime.index.dimension.TimeDefinition; import org.locationtech.geowave.core.geotime.store.dimension.TimeField; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.sfc.SFCFactory.SFCType; import org.locationtech.geowave.core.index.sfc.xz.XZHierarchicalIndexFactory; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.dimension.BasicNumericDimensionField; import org.locationtech.geowave.core.store.dimension.NumericDimensionField; import org.locationtech.geowave.core.store.index.BasicIndexModel; import org.locationtech.geowave.core.store.index.CustomNameIndex; import org.locationtech.geowave.core.store.spi.DimensionalityTypeProviderSpi; import com.beust.jcommander.IStringConverter; import com.beust.jcommander.ParameterException; public class TemporalDimensionalityTypeProvider implements DimensionalityTypeProviderSpi { private static final String DEFAULT_TEMPORAL_ID_STR = "TIME_IDX"; public static final NumericDimensionDefinition[] TEMPORAL_DIMENSIONS = new NumericDimensionDefinition[] { new TimeDefinition(SpatialTemporalOptions.DEFAULT_PERIODICITY)}; public static final NumericDimensionField[] TEMPORAL_FIELDS = new NumericDimensionField[] {new TimeField(SpatialTemporalOptions.DEFAULT_PERIODICITY)}; public TemporalDimensionalityTypeProvider() {} @Override public String getDimensionalityTypeName() { return "temporal"; } @Override public String getDimensionalityTypeDescription() { return "This dimensionality type matches all indices that only require Time."; } @Override public TemporalOptions createOptions() { return new TemporalOptions(); } @Override public Index createIndex(final DataStore dataStore, final TemporalOptions options) { return createIndexFromOptions(options); } public static Index createIndexFromOptions(final TemporalOptions options) { if (!options.noTimeRanges) { final NumericDimensionDefinition[] dimensions = TEMPORAL_DIMENSIONS; final NumericDimensionField[] fields = TEMPORAL_FIELDS; dimensions[dimensions.length - 1] = new TimeDefinition(options.periodicity); fields[dimensions.length - 1] = new TimeField(options.periodicity); final BasicIndexModel indexModel = new BasicIndexModel(fields); final String combinedArrayID = DEFAULT_TEMPORAL_ID_STR + "_" + options.periodicity; return new CustomNameIndex( XZHierarchicalIndexFactory.createFullIncrementalTieredStrategy( dimensions, new int[] {63}, SFCType.HILBERT, options.maxDuplicates), indexModel, combinedArrayID); } final BasicIndexModel indexModel = new BasicIndexModel( new NumericDimensionField[] { new BasicNumericDimensionField<>(TimeField.DEFAULT_FIELD_ID, Long.class)}); return new CustomNameIndex(new SimpleTimeIndexStrategy(), indexModel, DEFAULT_TEMPORAL_ID_STR); } public static class UnitConverter implements IStringConverter { @Override public Unit convert(final String value) { final Unit convertedValue = Unit.fromString(value); if (convertedValue == null) { throw new ParameterException( "Value " + value + "can not be converted to Unit. " + "Available values are: " + StringUtils.join(Unit.values(), ", ").toLowerCase(Locale.ENGLISH)); } return convertedValue; } } public static boolean isTemporal(final Index index) { if (index == null) { return false; } return isTemporal(index.getIndexStrategy()); } public static boolean isTemporal(final NumericIndexStrategy indexStrategy) { if ((indexStrategy == null) || (indexStrategy.getOrderedDimensionDefinitions() == null)) { return false; } final NumericDimensionDefinition[] dimensions = indexStrategy.getOrderedDimensionDefinitions(); if (dimensions.length < 1) { return false; } for (final NumericDimensionDefinition definition : dimensions) { if ((definition instanceof TimeDefinition) || (definition instanceof SimpleTimeDefinition)) { return true; } } return false; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/TemporalOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.index; import org.locationtech.geowave.core.geotime.index.TemporalDimensionalityTypeProvider.UnitConverter; import org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit; import org.locationtech.geowave.core.store.spi.DimensionalityTypeOptions; import com.beust.jcommander.Parameter; public class TemporalOptions implements DimensionalityTypeOptions { protected static Unit DEFAULT_PERIODICITY = Unit.YEAR; @Parameter( names = {"--period"}, required = false, description = "The periodicity of the temporal dimension. Because time is continuous, it is binned at this interval.", converter = UnitConverter.class) protected Unit periodicity = DEFAULT_PERIODICITY; @Parameter( names = {"--noTimeRange"}, required = false, description = "The time index can be more efficient if time ranges don't need to be supported.") protected boolean noTimeRanges = false; @Parameter( names = {"--maxDuplicates"}, required = false, description = "The max number of duplicates per dimension range. The default is 2 per range (for example lines and polygon timestamp data would be up to 4 because its 2 dimensions, and line/poly time range data would be 8).") protected long maxDuplicates = -1; public long getMaxDuplicates() { return maxDuplicates; } public Unit getPeriodicity() { return periodicity; } public boolean isSupportTimeRanges() { return !noTimeRanges; } public void setPeriodicity(final Unit periodicity) { this.periodicity = periodicity; } public void setNoTimeRanges(final boolean noTimeRanges) { this.noTimeRanges = noTimeRanges; } public void setMaxDuplicates(final long maxDuplicates) { this.maxDuplicates = maxDuplicates; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/api/SpatialIndexBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.index.api; import javax.annotation.Nullable; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.index.BaseIndexBuilder; public class SpatialIndexBuilder extends BaseIndexBuilder { private final SpatialOptions options; public SpatialIndexBuilder() { super(); options = new SpatialOptions(); } public SpatialIndexBuilder setIncludeTimeInCommonIndexModel(final boolean storeTime) { options.storeTime(storeTime); return this; } public SpatialIndexBuilder setGeometryPrecision(@Nullable final Integer precision) { options.setGeometryPrecision(precision); return this; } public SpatialIndexBuilder setCrs(final String crs) { options.setCrs(crs); return this; } @Override public Index createIndex() { return createIndex(SpatialDimensionalityTypeProvider.createIndexFromOptions(options)); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/api/SpatialTemporalIndexBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.index.api; import org.locationtech.geowave.core.geotime.index.SpatialTemporalOptions; import org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider.Bias; import org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.index.BaseIndexBuilder; public class SpatialTemporalIndexBuilder extends BaseIndexBuilder { private final SpatialTemporalOptions options; public SpatialTemporalIndexBuilder() { options = new SpatialTemporalOptions(); } public SpatialTemporalIndexBuilder setBias(final Bias bias) { options.setBias(bias); return this; } public SpatialTemporalIndexBuilder setPeriodicity(final Unit periodicity) { options.setPeriodicity(periodicity); return this; } public SpatialTemporalIndexBuilder setMaxDuplicates(final long maxDuplicates) { options.setMaxDuplicates(maxDuplicates); return this; } public SpatialTemporalIndexBuilder setCrs(final String crs) { options.setCrs(crs); return this; } @Override public Index createIndex() { return createIndex(SpatialTemporalDimensionalityTypeProvider.createIndexFromOptions(options)); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/api/TemporalIndexBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.index.api; import org.locationtech.geowave.core.geotime.index.TemporalDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.TemporalOptions; import org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.index.BaseIndexBuilder; public class TemporalIndexBuilder extends BaseIndexBuilder { private final TemporalOptions options; public TemporalIndexBuilder() { options = new TemporalOptions(); } public TemporalIndexBuilder setSupportsTimeRanges(final boolean supportsTimeRanges) { options.setNoTimeRanges(!supportsTimeRanges); return this; } public TemporalIndexBuilder setPeriodicity(final Unit periodicity) { options.setPeriodicity(periodicity); return this; } public TemporalIndexBuilder setMaxDuplicates(final long maxDuplicates) { options.setMaxDuplicates(maxDuplicates); return this; } @Override public Index createIndex() { return createIndex(TemporalDimensionalityTypeProvider.createIndexFromOptions(options)); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/dimension/LatitudeDefinition.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.index.dimension; import org.locationtech.geowave.core.index.dimension.BasicDimensionDefinition; /** * The Latitude Definition class is a convenience class used to define a dimension which is * associated with the Y axis on a Cartesian plane. * *

Minimum bounds = -90 and maximum bounds = 90 */ public class LatitudeDefinition extends BasicDimensionDefinition { /** * Convenience constructor used to construct a simple latitude dimension object which sits on a * Cartesian plane. */ public LatitudeDefinition() { this(false); } /** * Convenience constructor used to construct a simple latitude dimension object which sits on a * Cartesian plane. You can pass in a flag to use half the range if you want square SFC IDs in * decimal degree latitudes and longitudes */ public LatitudeDefinition(final boolean useHalfRange) { super(useHalfRange ? -180 : -90, useHalfRange ? 180 : 90); } @Override protected double clamp(final double x) { // continue to clamp values between -90 and 90 regardless of whether // we're using half the range return clamp(x, -90, 90); } @Override public byte[] toBinary() { return new byte[] {(byte) (((min > -180) && (max < 180)) ? 0 : 1)}; } @Override public void fromBinary(final byte[] bytes) { if ((bytes != null) && (bytes.length > 0)) { if (bytes[0] == (byte) 1) { // this implies we just want to use half the range min = -180; max = 180; } } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/dimension/LongitudeDefinition.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.index.dimension; import org.locationtech.geowave.core.index.FloatCompareUtils; import org.locationtech.geowave.core.index.dimension.BasicDimensionDefinition; import org.locationtech.geowave.core.index.dimension.bin.BinRange; import org.locationtech.geowave.core.index.numeric.NumericData; /** * The Longitude Definition class is a convenience class used to define a dimension which is * associated with the X axis on a Cartesian plane. * *

Minimum bounds = -180 and maximum bounds = 180 */ public class LongitudeDefinition extends BasicDimensionDefinition { /** * Convenience constructor used to construct a longitude dimension object which sits on a * Cartesian plane. */ public LongitudeDefinition() { super(-180, 180); } /** * Method is used to normalize the ranges on a Cartesian plane. If the values are outside of the * bounds [ -180, 180 ], two Bin ranges might be created to account for the possible date line * crossing. * * @param range the numeric range of our data set * @return new BinRange[] object */ @Override public BinRange[] getNormalizedRanges(final NumericData range) { if (range == null) { return new BinRange[0]; } // if the range is a single value, clamp at -180, 180 if (FloatCompareUtils.checkDoublesEqual(range.getMin(), range.getMax())) { return super.getNormalizedRanges(range); } // if its a range, treat values outside of (-180,180) as possible date // line crossing final double normalizedMin = getNormalizedLongitude(range.getMin()); final double normalizedMax = getNormalizedLongitude(range.getMax()); // If the normalized max is less than normalized min, the range // crosses the date line // also, special case min=0, max=-1 as this is used within JTS as the // envelope for empty geometry and we don't want empty geometry // interpreted as a dateline crossing if ((normalizedMax < normalizedMin) && !((FloatCompareUtils.checkDoublesEqual(normalizedMax, -1) && (FloatCompareUtils.checkDoublesEqual(normalizedMin, 0))))) { return new BinRange[] {new BinRange(-180, normalizedMax), new BinRange(normalizedMin, 180)}; } return new BinRange[] {new BinRange(normalizedMin, normalizedMax)}; } /** * Normalizes a longitude value * * @param lon value to normalize * @return a normalized longitude value */ public static double getNormalizedLongitude(final double lon) { if ((lon <= 180) && (lon >= -180)) { return lon; } // the sign of the mod should be the sign of the dividend, but just in // case guarantee a mod on a positive dividend and subtract 180 final double offsetLon = lon + 180; return (((Math.ceil(Math.abs(offsetLon) / 360) * 360) + offsetLon) % 360) - 180; } @Override public byte[] toBinary() { // essentially all that is needed is the class name for reflection return new byte[] {}; } @Override public void fromBinary(final byte[] bytes) {} } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/dimension/SimpleTimeDefinition.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.index.dimension; import org.locationtech.geowave.core.index.dimension.BasicDimensionDefinition; public class SimpleTimeDefinition extends BasicDimensionDefinition { public SimpleTimeDefinition() { super(Long.MIN_VALUE, Long.MAX_VALUE); } @Override public byte[] toBinary() { return new byte[0]; } @Override public void fromBinary(final byte[] bytes) {} } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/dimension/SimpleTimeIndexStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.index.dimension; import org.locationtech.geowave.core.index.simple.SimpleLongIndexStrategy; public class SimpleTimeIndexStrategy extends SimpleLongIndexStrategy { public SimpleTimeIndexStrategy() { super(new SimpleTimeDefinition()); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/dimension/TemporalBinningStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.index.dimension; import java.nio.ByteBuffer; import java.text.NumberFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.List; import java.util.TimeZone; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.dimension.bin.BinRange; import org.locationtech.geowave.core.index.dimension.bin.BinValue; import org.locationtech.geowave.core.index.dimension.bin.IndexBinningStrategy; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.threeten.extra.Interval; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; /** * This class is useful for establishing a consistent binning strategy using a unit of time. Each * bin will then be defined by the boundaries of that unit within the timezone given in the * constructor. So if the unit is year and the data spreads across 2011-2013, the bins will be 2011, * 2012, and 2013. The unit chosen should represent a much more significant range than the average * query range (at least 20x larger) for efficiency purposes. So if the average query is for a 24 * hour period, the unit should not be a day, but could be perhaps a month or a year (depending on * the temporal extent of the dataset). */ public class TemporalBinningStrategy implements IndexBinningStrategy { public static enum Unit { MINUTE(Calendar.MINUTE), HOUR(Calendar.HOUR_OF_DAY), DAY(Calendar.DAY_OF_MONTH), WEEK(Calendar.WEEK_OF_YEAR), MONTH(Calendar.MONTH), YEAR(Calendar.YEAR), DECADE(-1); // java.util.Calendar does not define a field number for decade // use -1 since that value is unused private final int calendarEnum; private Unit(final int calendarEnum) { this.calendarEnum = calendarEnum; } public int toCalendarEnum() { return calendarEnum; } public static Unit getUnit(final int calendarEnum) { for (final Unit u : values()) { if (u.calendarEnum == calendarEnum) { return u; } } throw new IllegalArgumentException( "Calendar enum '" + calendarEnum + "' not found as a valid unit "); } // converter that will be used later public static Unit fromString(final String code) { for (final Unit output : Unit.values()) { if (output.toString().equalsIgnoreCase(code)) { return output; } } return null; } } protected static final long MILLIS_PER_DAY = 86400000L; private static final NumberFormat TWO_DIGIT_NUMBER = NumberFormat.getIntegerInstance(); { TWO_DIGIT_NUMBER.setMinimumIntegerDigits(2); TWO_DIGIT_NUMBER.setMaximumIntegerDigits(2); } private Unit unit; private String timezone; public TemporalBinningStrategy() {} public TemporalBinningStrategy(final Unit unit) { this(unit, "GMT"); } public TemporalBinningStrategy(final Unit unit, final String timezone) { this.unit = unit; this.timezone = timezone; } @Override public double getBinMin() { return 0; } @Override public double getBinMax() { return getBinSizeMillis() - 1; } /** Method used to bin a raw date in milliseconds to a binned value of the Binning Strategy. */ @Override public BinValue getBinnedValue(final double value) { // convert to a calendar and subtract the epoch for the bin final Calendar epochCal = Calendar.getInstance(TimeZone.getTimeZone(timezone)); epochCal.setTimeInMillis((long) value); setToEpoch(epochCal); // use the value to get the bin ID (although the epoch should work fine // too) final Calendar valueCal = Calendar.getInstance(TimeZone.getTimeZone(timezone)); valueCal.setTimeInMillis((long) value); return new BinValue( getBinId(valueCal), valueCal.getTimeInMillis() - epochCal.getTimeInMillis()); } private long getBinSizeMillis() { long binSizeMillis = MILLIS_PER_DAY; // use the max possible value for that unit as the bin size switch (unit) { case DECADE: binSizeMillis *= 3653; break; case YEAR: default: binSizeMillis *= 366; break; case MONTH: binSizeMillis *= 31; break; case WEEK: binSizeMillis *= 7; break; case DAY: break; case HOUR: binSizeMillis /= 24; break; case MINUTE: binSizeMillis /= 1440; break; } return binSizeMillis; } @SuppressFBWarnings( value = {"SF_SWITCH_FALLTHROUGH", "SF_SWITCH_NO_DEFAULT"}, justification = "Fallthrough intentional for time parsing; default case is provided") protected void setToEpoch(final Calendar value) { // reset appropriate values to 0 based on the unit switch (unit) { case DECADE: value.set(Calendar.YEAR, ((value.get(Calendar.YEAR) / 10) * 10)); // don't break so that the other fields are also set to the // minimum case YEAR: default: value.set(Calendar.MONTH, value.getActualMinimum(Calendar.MONTH)); // don't break so that the other fields are also set to the // minimum case MONTH: value.set(Calendar.DAY_OF_MONTH, value.getActualMinimum(Calendar.DAY_OF_MONTH)); // don't break so that the other fields are also set to the // minimum case DAY: value.set(Calendar.HOUR_OF_DAY, value.getActualMinimum(Calendar.HOUR_OF_DAY)); // don't break so that the other fields are also set to the // minimum case HOUR: value.set(Calendar.MINUTE, value.getActualMinimum(Calendar.MINUTE)); // don't break so that the other fields are also set to the // minimum case MINUTE: value.set(Calendar.SECOND, value.getActualMinimum(Calendar.SECOND)); value.set(Calendar.MILLISECOND, value.getActualMinimum(Calendar.MILLISECOND)); break; // special handling for week case WEEK: value.set(Calendar.DAY_OF_WEEK, value.getActualMinimum(Calendar.DAY_OF_WEEK)); value.set(Calendar.HOUR_OF_DAY, value.getActualMinimum(Calendar.HOUR_OF_DAY)); value.set(Calendar.MINUTE, value.getActualMinimum(Calendar.MINUTE)); value.set(Calendar.SECOND, value.getActualMinimum(Calendar.SECOND)); value.set(Calendar.MILLISECOND, value.getActualMinimum(Calendar.MILLISECOND)); } } @Override public int getFixedBinIdSize() { switch (unit) { case YEAR: default: return 4; case MONTH: return 7; case WEEK: return 7; case DAY: return 10; case HOUR: return 13; case MINUTE: return 16; } } public byte[] getBinId(final long millis) { final Calendar valueCal = Calendar.getInstance(TimeZone.getTimeZone(timezone)); valueCal.setTimeInMillis(millis); return getBinId(valueCal); } private byte[] getBinId(final Calendar value) { // this is assuming we want human-readable bin ID's but alternatively we // could consider returning a more compressed representation switch (unit) { case YEAR: default: return StringUtils.stringToBinary(Integer.toString(value.get(Calendar.YEAR))); case MONTH: return StringUtils.stringToBinary( (Integer.toString(value.get(Calendar.YEAR)) + "_" + TWO_DIGIT_NUMBER.format(value.get(Calendar.MONTH)))); case WEEK: return StringUtils.stringToBinary( Integer.toString(value.getWeekYear()) + "_" + TWO_DIGIT_NUMBER.format(value.get(Calendar.WEEK_OF_YEAR))); case DAY: return StringUtils.stringToBinary( (Integer.toString(value.get(Calendar.YEAR)) + "_" + TWO_DIGIT_NUMBER.format(value.get(Calendar.MONTH)) + "_" + TWO_DIGIT_NUMBER.format(value.get(Calendar.DAY_OF_MONTH)))); case HOUR: return StringUtils.stringToBinary( (Integer.toString(value.get(Calendar.YEAR)) + "_" + TWO_DIGIT_NUMBER.format(value.get(Calendar.MONTH)) + "_" + TWO_DIGIT_NUMBER.format(value.get(Calendar.DAY_OF_MONTH)) + "_" + TWO_DIGIT_NUMBER.format(value.get(Calendar.HOUR_OF_DAY)))); case MINUTE: return StringUtils.stringToBinary( (Integer.toString(value.get(Calendar.YEAR)) + "_" + TWO_DIGIT_NUMBER.format(value.get(Calendar.MONTH)) + "_" + TWO_DIGIT_NUMBER.format(value.get(Calendar.DAY_OF_MONTH)) + "_" + TWO_DIGIT_NUMBER.format(value.get(Calendar.HOUR_OF_DAY)) + "_" + TWO_DIGIT_NUMBER.format(value.get(Calendar.MINUTE)))); } } @SuppressFBWarnings( value = {"SF_SWITCH_FALLTHROUGH", "SF_SWITCH_NO_DEFAULT"}, justification = "Fallthrough intentional for time parsing") private Calendar getStartEpoch(final byte[] binId) { final String str = StringUtils.stringFromBinary(binId); final Calendar cal = Calendar.getInstance(TimeZone.getTimeZone(timezone)); switch (unit) { case MINUTE: final int minute = Integer.parseInt(str.substring(14, 16)); cal.set(Calendar.MINUTE, minute); case HOUR: final int hour = Integer.parseInt(str.substring(11, 13)); cal.set(Calendar.HOUR_OF_DAY, hour); case DAY: final int day = Integer.parseInt(str.substring(8, 10)); cal.set(Calendar.DAY_OF_MONTH, day); case MONTH: final int month = Integer.parseInt(str.substring(5, 7)); cal.set(Calendar.MONTH, month); case YEAR: default: final int year = Integer.parseInt(str.substring(0, 4)); cal.set(Calendar.YEAR, year); break; // do not automatically fall-through to decade parsing case DECADE: int decade = Integer.parseInt(str.substring(0, 4)); decade = (decade / 10) * 10; // int division will truncate ones cal.set(Calendar.YEAR, decade); break; // special handling for week case WEEK: final int yr = Integer.parseInt(str.substring(0, 4)); final int weekOfYear = Integer.parseInt(str.substring(5, 7)); cal.setWeekDate(yr, weekOfYear, cal.getActualMinimum(Calendar.DAY_OF_WEEK)); break; } setToEpoch(cal); return cal; } private Calendar getEndExclusive(final Calendar startOfEpoch) { final Calendar endExclusive = Calendar.getInstance(TimeZone.getTimeZone(timezone)); endExclusive.setTime(startOfEpoch.getTime()); switch (unit) { case MINUTE: endExclusive.add(Calendar.MINUTE, 1); return endExclusive; case HOUR: endExclusive.add(Calendar.HOUR_OF_DAY, 1); return endExclusive; case DAY: endExclusive.add(Calendar.DAY_OF_MONTH, 1); return endExclusive; case MONTH: endExclusive.add(Calendar.MONTH, 1); return endExclusive; case DECADE: endExclusive.add(Calendar.YEAR, 10); return endExclusive; case WEEK: endExclusive.add(Calendar.WEEK_OF_YEAR, 1); return endExclusive; case YEAR: default: endExclusive.add(Calendar.YEAR, 1); return endExclusive; } } public BinRange[] getNormalizedRanges(final Interval range) { return getNormalizedRanges(range.getStart().toEpochMilli(), range.getEnd().toEpochMilli()); } private BinRange[] getNormalizedRanges(final long min, final long max) { final Calendar startEpoch = Calendar.getInstance(TimeZone.getTimeZone(timezone)); final long binSizeMillis = getBinSizeMillis(); // initialize the epoch to the range min and then reset appropriate // values to 0 based on the units startEpoch.setTimeInMillis(min); setToEpoch(startEpoch); // now make sure all bin definitions between the start and end bins // are covered final long startEpochMillis = startEpoch.getTimeInMillis(); long epochIterator = startEpochMillis; final List bins = new ArrayList<>(); // track this, so that we can easily declare a range to be the full // extent and use the information to perform a more efficient scan boolean firstBin = (min != startEpochMillis); boolean lastBin = false; do { // because not every year has 366 days, and not every month has 31 // days we need to reset next epoch to the actual epoch final Calendar nextEpochCal = Calendar.getInstance(TimeZone.getTimeZone(timezone)); // set it to a value in the middle of the bin just to be sure (for // example if the bin size does not get to the next epoch as is // the case when units are days and the timezone accounts for // daylight savings time) nextEpochCal.setTimeInMillis(epochIterator + (long) (binSizeMillis * 1.5)); setToEpoch(nextEpochCal); final long nextEpoch = nextEpochCal.getTimeInMillis(); final long maxOfBin = nextEpoch - 1; final Calendar cal = Calendar.getInstance(TimeZone.getTimeZone(timezone)); cal.setTimeInMillis(epochIterator); long startMillis, endMillis; boolean fullExtent; if (max <= maxOfBin) { lastBin = true; endMillis = max; // its questionable whether we use fullExtent = (max == maxOfBin); } else { endMillis = maxOfBin; fullExtent = !firstBin; } if (firstBin) { startMillis = min; firstBin = false; } else { startMillis = epochIterator; } // we have the millis for range, but to normalize for this bin we // need to subtract the epoch of the bin bins.add( new BinRange( getBinId(cal), startMillis - epochIterator, endMillis - epochIterator, fullExtent)); epochIterator = nextEpoch; // iterate until we reach our end epoch } while (!lastBin); return bins.toArray(new BinRange[bins.size()]); } @Override public BinRange[] getNormalizedRanges(final NumericData range) { if ((range == null) || (range.getMax() < range.getMin())) { return new BinRange[] {}; } return getNormalizedRanges(range.getMin().longValue(), range.getMax().longValue()); } @Override public byte[] toBinary() { final byte[] timeZone = StringUtils.stringToBinary(timezone); final ByteBuffer binary = ByteBuffer.allocate(timezone.length() + VarintUtils.signedIntByteLength(unit.calendarEnum)); VarintUtils.writeSignedInt(unit.calendarEnum, binary); binary.put(timeZone); return binary.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); final int unitCalendarEnum = VarintUtils.readSignedInt(buffer); final byte[] timeZoneName = new byte[buffer.remaining()]; buffer.get(timeZoneName); unit = Unit.getUnit(unitCalendarEnum); timezone = StringUtils.stringFromBinary(timeZoneName); } @Override public int hashCode() { final int prime = 31; int result = 1; final String className = getClass().getName(); result = (prime * result) + ((className == null) ? 0 : className.hashCode()); result = (prime * result) + ((timezone == null) ? 0 : timezone.hashCode()); result = (prime * result) + ((unit == null) ? 0 : unit.calendarEnum); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final TemporalBinningStrategy other = (TemporalBinningStrategy) obj; if (timezone == null) { if (other.timezone != null) { return false; } } else if (!timezone.equals(other.timezone)) { return false; } if (unit == null) { if (other.unit != null) { return false; } } else if (unit.calendarEnum != other.unit.calendarEnum) { return false; } return true; } @Override public NumericRange getDenormalizedRanges(final BinRange binnedRange) { final Calendar startofEpoch = getStartEpoch(binnedRange.getBinId()); final long startOfEpochMillis = startofEpoch.getTimeInMillis(); final long minMillis = startOfEpochMillis + (long) binnedRange.getNormalizedMin(); final long maxMillis = startOfEpochMillis + (long) binnedRange.getNormalizedMax(); return new NumericRange(minMillis, maxMillis); } public Interval getInterval(final byte[] binId) { final Calendar startOfEpoch = getStartEpoch(binId); return Interval.of(startOfEpoch.toInstant(), getEndExclusive(startOfEpoch).toInstant()); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/index/dimension/TimeDefinition.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.index.dimension; import org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit; import org.locationtech.geowave.core.index.dimension.UnboundedDimensionDefinition; import org.locationtech.geowave.core.index.dimension.bin.IndexBinningStrategy; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; /** * The Time Definition class is a convenience class used to define a dimension which is associated * with a time dimension. */ public class TimeDefinition extends UnboundedDimensionDefinition { public TimeDefinition() { super(); } /** * Constructor used to create a new Unbounded Binning Strategy based upon a temporal binning * strategy of the unit parameter. The unit can be of DAY, MONTH, or YEAR. * * @param unit an enumeration of temporal units (DAY, MONTH, or YEAR) */ public TimeDefinition(final Unit unit) { super(new TemporalBinningStrategy(unit)); } /** * Constructor used to create a new Unbounded Binning Strategy based upon a generic binning * strategy. * * @param binningStrategy a object which defines the bins */ public TimeDefinition(final IndexBinningStrategy binningStrategy) { super(binningStrategy); } @Override public NumericData getFullRange() { return new NumericRange(0, System.currentTimeMillis() + 1); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/GeotoolsFeatureDataAdapter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store; import org.locationtech.geowave.core.geotime.util.TimeDescriptors; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.VisibilityHandler; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; public interface GeotoolsFeatureDataAdapter extends DataTypeAdapter { SimpleFeatureType getFeatureType(); TimeDescriptors getTimeDescriptors(); boolean hasTemporalConstraints(); void setNamespace(final String namespaceURI); @Override default InternalDataAdapter asInternalAdapter(final short internalAdapterId) { return new InternalGeotoolsDataAdapterWrapper<>(this, internalAdapterId); } @Override default InternalDataAdapter asInternalAdapter( final short internalAdapterId, final VisibilityHandler visibilityHandler) { return new InternalGeotoolsDataAdapterWrapper<>(this, internalAdapterId, visibilityHandler); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/InternalGeotoolsDataAdapterWrapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store; import org.locationtech.geowave.core.geotime.util.TimeDescriptors; import org.locationtech.geowave.core.store.adapter.InternalDataAdapterImpl; import org.locationtech.geowave.core.store.api.VisibilityHandler; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; public class InternalGeotoolsDataAdapterWrapper extends InternalDataAdapterImpl implements InternalGeotoolsFeatureDataAdapter { public InternalGeotoolsDataAdapterWrapper() { super(); } public InternalGeotoolsDataAdapterWrapper( final GeotoolsFeatureDataAdapter adapter, final short adapterId) { super(adapter, adapterId); } public InternalGeotoolsDataAdapterWrapper( final GeotoolsFeatureDataAdapter adapter, final short adapterId, final VisibilityHandler visibilityHandler) { super(adapter, adapterId, visibilityHandler); } @Override public SimpleFeatureType getFeatureType() { return ((GeotoolsFeatureDataAdapter) adapter).getFeatureType(); } @Override public TimeDescriptors getTimeDescriptors() { return ((GeotoolsFeatureDataAdapter) adapter).getTimeDescriptors(); } @Override public boolean hasTemporalConstraints() { return ((GeotoolsFeatureDataAdapter) adapter).hasTemporalConstraints(); } @Override public void setNamespace(final String namespaceURI) { ((GeotoolsFeatureDataAdapter) adapter).setNamespace(namespaceURI); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/InternalGeotoolsFeatureDataAdapter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.opengis.feature.simple.SimpleFeature; public interface InternalGeotoolsFeatureDataAdapter extends InternalDataAdapter, GeotoolsFeatureDataAdapter { } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/dimension/BaseCustomCRSSpatialDimension.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.dimension; class BaseCustomCRSSpatialDimension { protected byte axis; protected BaseCustomCRSSpatialDimension() {} protected BaseCustomCRSSpatialDimension(final byte axis) { this.axis = axis; } @Override public int hashCode() { final int prime = 31; int result = super.hashCode(); result = (prime * result) + axis; return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (!super.equals(obj)) { return false; } if (getClass() != obj.getClass()) { return false; } final BaseCustomCRSSpatialDimension other = (BaseCustomCRSSpatialDimension) obj; if (axis != other.axis) { return false; } return true; } public byte[] addAxisToBinary(final byte[] parentBinary) { // TODO future issue to investigate performance improvements associated // with excessive array/object allocations // serialize axis final byte[] retVal = new byte[parentBinary.length + 1]; System.arraycopy(parentBinary, 0, retVal, 0, parentBinary.length); retVal[parentBinary.length] = axis; return retVal; } public byte[] getAxisFromBinaryAndRemove(final byte[] bytes) { // TODO future issue to investigate performance improvements associated // with excessive array/object allocations // deserialize axis final byte[] parentBinary = new byte[bytes.length - 1]; System.arraycopy(bytes, 0, parentBinary, 0, parentBinary.length); axis = bytes[parentBinary.length]; return parentBinary; } public byte getAxis() { return axis; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/dimension/CustomCRSBoundedSpatialDimension.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.dimension; import org.locationtech.geowave.core.index.dimension.BasicDimensionDefinition; public class CustomCRSBoundedSpatialDimension extends BasicDimensionDefinition implements CustomCRSSpatialDimension { private BaseCustomCRSSpatialDimension baseCustomCRS; public CustomCRSBoundedSpatialDimension() {} public CustomCRSBoundedSpatialDimension(final byte axis, final double min, final double max) { super(min, max); baseCustomCRS = new BaseCustomCRSSpatialDimension(axis); } @Override public int hashCode() { final int prime = 31; int result = super.hashCode(); result = (prime * result) + ((baseCustomCRS == null) ? 0 : baseCustomCRS.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (!super.equals(obj)) { return false; } if (getClass() != obj.getClass()) { return false; } final CustomCRSBoundedSpatialDimension other = (CustomCRSBoundedSpatialDimension) obj; if (baseCustomCRS == null) { if (other.baseCustomCRS != null) { return false; } } else if (!baseCustomCRS.equals(other.baseCustomCRS)) { return false; } return true; } @Override public byte[] toBinary() { // TODO future issue to investigate performance improvements associated // with excessive array/object allocations // serialize axis return baseCustomCRS.addAxisToBinary(super.toBinary()); } @Override public void fromBinary(final byte[] bytes) { // TODO future issue to investigate performance improvements associated // with excessive array/object allocations // deserialize axis baseCustomCRS = new BaseCustomCRSSpatialDimension(); super.fromBinary(baseCustomCRS.getAxisFromBinaryAndRemove(bytes)); } @Override public byte getAxis() { return baseCustomCRS.getAxis(); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/dimension/CustomCRSBoundedSpatialDimensionX.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.dimension; public class CustomCRSBoundedSpatialDimensionX extends CustomCRSBoundedSpatialDimension { public CustomCRSBoundedSpatialDimensionX() {} public CustomCRSBoundedSpatialDimensionX(final double min, final double max) { super((byte) 0, min, max); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/dimension/CustomCRSBoundedSpatialDimensionY.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.dimension; public class CustomCRSBoundedSpatialDimensionY extends CustomCRSBoundedSpatialDimension { public CustomCRSBoundedSpatialDimensionY() {} public CustomCRSBoundedSpatialDimensionY(final double min, final double max) { super((byte) 1, min, max); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/dimension/CustomCRSSpatialDimension.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.dimension; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; public interface CustomCRSSpatialDimension extends NumericDimensionDefinition { public byte getAxis(); } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/dimension/CustomCRSSpatialField.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.dimension; import java.util.Set; import javax.annotation.Nullable; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.index.IndexDimensionHint; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.jts.geom.Geometry; import org.opengis.referencing.crs.CoordinateReferenceSystem; import com.google.common.collect.Sets; public class CustomCRSSpatialField extends SpatialField { public CustomCRSSpatialField() {} public CustomCRSSpatialField( final CustomCRSSpatialDimension baseDefinition, final @Nullable Integer geometryPrecision, final @Nullable CoordinateReferenceSystem crs) { super(baseDefinition, geometryPrecision, crs); } @Override public NumericData getNumericData(final Geometry geometry) { // TODO if this can be generalized to n-dimensional that would be better if (((CustomCRSSpatialDimension) baseDefinition).getAxis() == 0) { return GeometryUtils.xRangeFromGeometry(geometry); } return GeometryUtils.yRangeFromGeometry(geometry); } @Override public Set getDimensionHints() { if (((CustomCRSSpatialDimension) baseDefinition).getAxis() == 0) { return Sets.newHashSet(SpatialField.LONGITUDE_DIMENSION_HINT); } return Sets.newHashSet(SpatialField.LATITUDE_DIMENSION_HINT); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/dimension/CustomCRSUnboundedSpatialDimension.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.dimension; import org.locationtech.geowave.core.index.dimension.UnboundedDimensionDefinition; import org.locationtech.geowave.core.index.dimension.bin.BasicBinningStrategy; public class CustomCRSUnboundedSpatialDimension extends UnboundedDimensionDefinition implements CustomCRSSpatialDimension { private BaseCustomCRSSpatialDimension baseCustomCRS; public CustomCRSUnboundedSpatialDimension() { super(); } public CustomCRSUnboundedSpatialDimension(final double interval, final byte axis) { super(new BasicBinningStrategy(interval)); baseCustomCRS = new BaseCustomCRSSpatialDimension(axis); } @Override public int hashCode() { final int prime = 31; int result = super.hashCode(); result = (prime * result) + ((baseCustomCRS == null) ? 0 : baseCustomCRS.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (!super.equals(obj)) { return false; } if (getClass() != obj.getClass()) { return false; } final CustomCRSUnboundedSpatialDimension other = (CustomCRSUnboundedSpatialDimension) obj; if (baseCustomCRS == null) { if (other.baseCustomCRS != null) { return false; } } else if (!baseCustomCRS.equals(other.baseCustomCRS)) { return false; } return true; } @Override public byte[] toBinary() { // TODO future issue to investigate performance improvements associated // with excessive array/object allocations // serialize axis return baseCustomCRS.addAxisToBinary(super.toBinary()); } @Override public void fromBinary(final byte[] bytes) { // TODO future issue to investigate performance improvements associated // with excessive array/object allocations // deserialize axis baseCustomCRS = new BaseCustomCRSSpatialDimension(); super.fromBinary(baseCustomCRS.getAxisFromBinaryAndRemove(bytes)); } @Override public byte getAxis() { return baseCustomCRS.getAxis(); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/dimension/CustomCRSUnboundedSpatialDimensionX.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.dimension; import org.locationtech.geowave.core.index.dimension.UnboundedDimensionDefinition; import org.locationtech.geowave.core.index.dimension.bin.BasicBinningStrategy; public class CustomCRSUnboundedSpatialDimensionX extends UnboundedDimensionDefinition implements CustomCRSSpatialDimension { private BaseCustomCRSSpatialDimension baseCustomCRS; public CustomCRSUnboundedSpatialDimensionX() { super(); } public CustomCRSUnboundedSpatialDimensionX(final double interval, final byte axis) { super(new BasicBinningStrategy(interval)); baseCustomCRS = new BaseCustomCRSSpatialDimension(axis); } @Override public int hashCode() { final int prime = 31; int result = super.hashCode(); result = (prime * result) + ((baseCustomCRS == null) ? 0 : baseCustomCRS.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (!super.equals(obj)) { return false; } if (getClass() != obj.getClass()) { return false; } final CustomCRSUnboundedSpatialDimensionX other = (CustomCRSUnboundedSpatialDimensionX) obj; if (baseCustomCRS == null) { if (other.baseCustomCRS != null) { return false; } } else if (!baseCustomCRS.equals(other.baseCustomCRS)) { return false; } return true; } @Override public byte[] toBinary() { // TODO future issue to investigate performance improvements associated // with excessive array/object allocations // serialize axis return baseCustomCRS.addAxisToBinary(super.toBinary()); } @Override public void fromBinary(final byte[] bytes) { // TODO future issue to investigate performance improvements associated // with excessive array/object allocations // deserialize axis baseCustomCRS = new BaseCustomCRSSpatialDimension(); super.fromBinary(baseCustomCRS.getAxisFromBinaryAndRemove(bytes)); } @Override public byte getAxis() { return baseCustomCRS.getAxis(); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/dimension/CustomCRSUnboundedSpatialDimensionY.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.dimension; import org.locationtech.geowave.core.index.dimension.UnboundedDimensionDefinition; import org.locationtech.geowave.core.index.dimension.bin.BasicBinningStrategy; public class CustomCRSUnboundedSpatialDimensionY extends UnboundedDimensionDefinition implements CustomCRSSpatialDimension { private BaseCustomCRSSpatialDimension baseCustomCRS; public CustomCRSUnboundedSpatialDimensionY() { super(); } public CustomCRSUnboundedSpatialDimensionY(final double interval, final byte axis) { super(new BasicBinningStrategy(interval)); baseCustomCRS = new BaseCustomCRSSpatialDimension(axis); } @Override public int hashCode() { final int prime = 31; int result = super.hashCode(); result = (prime * result) + ((baseCustomCRS == null) ? 0 : baseCustomCRS.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (!super.equals(obj)) { return false; } if (getClass() != obj.getClass()) { return false; } final CustomCRSUnboundedSpatialDimensionY other = (CustomCRSUnboundedSpatialDimensionY) obj; if (baseCustomCRS == null) { if (other.baseCustomCRS != null) { return false; } } else if (!baseCustomCRS.equals(other.baseCustomCRS)) { return false; } return true; } @Override public byte[] toBinary() { // TODO future issue to investigate performance improvements associated // with excessive array/object allocations // serialize axis return baseCustomCRS.addAxisToBinary(super.toBinary()); } @Override public void fromBinary(final byte[] bytes) { // TODO future issue to investigate performance improvements associated // with excessive array/object allocations // deserialize axis baseCustomCRS = new BaseCustomCRSSpatialDimension(); super.fromBinary(baseCustomCRS.getAxisFromBinaryAndRemove(bytes)); } @Override public byte getAxis() { return baseCustomCRS.getAxis(); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/dimension/CustomCrsIndexModel.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.dimension; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.geotools.referencing.CRS; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.dimension.NumericDimensionField; import org.locationtech.geowave.core.store.index.BasicIndexModel; import org.opengis.referencing.FactoryException; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class is a concrete implementation of a common index model. Data adapters will map their * adapter specific fields to these fields that are common for a given index. This way distributable * filters will not need to handle any adapter-specific transformation, but can use the common index * fields. */ public class CustomCrsIndexModel extends BasicIndexModel { private static final Logger LOGGER = LoggerFactory.getLogger(CustomCrsIndexModel.class); private String crsCode; private CoordinateReferenceSystem crs; public CustomCrsIndexModel() {} public CustomCrsIndexModel(final NumericDimensionField[] dimensions, final String crsCode) { init(dimensions); this.crsCode = crsCode; } public CoordinateReferenceSystem getCrs() { if (crs == null) { try { crs = CRS.decode(crsCode, true); } catch (final FactoryException e) { LOGGER.warn("Unable to decode indexed crs", e); } } return crs; } public String getCrsCode() { return crsCode; } @Override public void init(final NumericDimensionField[] dimensions) { super.init(dimensions); } @Override public int hashCode() { final int prime = 31; int result = 1; final String className = getClass().getName(); result = (prime * result) + ((className == null) ? 0 : className.hashCode()); result = (prime * result) + Arrays.hashCode(dimensions); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final CustomCrsIndexModel other = (CustomCrsIndexModel) obj; return Arrays.equals(dimensions, other.dimensions); } @Override public byte[] toBinary() { final byte[] crsCodeBinary = StringUtils.stringToBinary(crsCode); int byteBufferLength = VarintUtils.unsignedIntByteLength(dimensions.length) + VarintUtils.unsignedIntByteLength(crsCodeBinary.length) + crsCodeBinary.length; final List dimensionBinaries = new ArrayList<>(dimensions.length); for (final NumericDimensionField dimension : dimensions) { final byte[] dimensionBinary = PersistenceUtils.toBinary(dimension); byteBufferLength += (VarintUtils.unsignedIntByteLength(dimensionBinary.length) + dimensionBinary.length); dimensionBinaries.add(dimensionBinary); } final ByteBuffer buf = ByteBuffer.allocate(byteBufferLength); VarintUtils.writeUnsignedInt(dimensions.length, buf); VarintUtils.writeUnsignedInt(crsCodeBinary.length, buf); for (final byte[] dimensionBinary : dimensionBinaries) { VarintUtils.writeUnsignedInt(dimensionBinary.length, buf); buf.put(dimensionBinary); } buf.put(crsCodeBinary); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int numDimensions = VarintUtils.readUnsignedInt(buf); final int crsCodeLength = VarintUtils.readUnsignedInt(buf); dimensions = new NumericDimensionField[numDimensions]; for (int i = 0; i < numDimensions; i++) { final byte[] dim = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); dimensions[i] = (NumericDimensionField) PersistenceUtils.fromBinary(dim); } final byte[] codeBytes = ByteArrayUtils.safeRead(buf, crsCodeLength); crsCode = StringUtils.stringFromBinary(codeBytes); init(dimensions); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/dimension/LatitudeField.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.dimension; import java.util.Set; import javax.annotation.Nullable; import org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.index.IndexDimensionHint; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.jts.geom.Geometry; import com.google.common.collect.Sets; /** * This field can be used as a EPSG:4326 latitude dimension within GeoWave. It can utilize JTS * geometry as the underlying spatial object for this dimension. */ public class LatitudeField extends SpatialField { public LatitudeField() {} public LatitudeField(final @Nullable Integer geometryPrecision, final boolean useHalfRange) { this(new LatitudeDefinition(useHalfRange), geometryPrecision); } public LatitudeField(final @Nullable Integer geometryPrecision) { this(geometryPrecision, false); } public LatitudeField( final NumericDimensionDefinition baseDefinition, final @Nullable Integer geometryPrecision) { super(baseDefinition, geometryPrecision); } @Override public NumericData getNumericData(final Geometry geometry) { return GeometryUtils.yRangeFromGeometry(geometry); } @Override public Set getDimensionHints() { return Sets.newHashSet(SpatialField.LATITUDE_DIMENSION_HINT); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/dimension/LongitudeField.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.dimension; import java.util.Set; import javax.annotation.Nullable; import org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.index.IndexDimensionHint; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.jts.geom.Geometry; import com.google.common.collect.Sets; /** * This field can be used as a EPSG:4326 longitude dimension within GeoWave. It can utilize JTS * geometry as the underlying spatial object for this dimension. */ public class LongitudeField extends SpatialField { public LongitudeField() {} public LongitudeField(final @Nullable Integer geometryPrecision) { this(new LongitudeDefinition(), geometryPrecision); } public LongitudeField( final NumericDimensionDefinition baseDefinition, final @Nullable Integer geometryPrecision) { super(baseDefinition, geometryPrecision); } @Override public NumericData getNumericData(final Geometry geometry) { return GeometryUtils.xRangeFromGeometry(geometry); } @Override public Set getDimensionHints() { return Sets.newHashSet(SpatialField.LONGITUDE_DIMENSION_HINT); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/dimension/SpatialField.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.dimension; import java.nio.ByteBuffer; import javax.annotation.Nullable; import org.geotools.referencing.CRS; import org.locationtech.geowave.core.geotime.store.field.GeometrySerializationProvider; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.index.IndexDimensionHint; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.dimension.bin.BinRange; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.api.IndexFieldMapper.IndexFieldOptions; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldWriter; import org.locationtech.geowave.core.store.dimension.NumericDimensionField; import org.locationtech.jts.geom.Geometry; import org.opengis.referencing.FactoryException; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** A base class for EPSG:4326 latitude/longitude fields that use JTS geometry */ public abstract class SpatialField implements NumericDimensionField { private static final Logger LOGGER = LoggerFactory.getLogger(SpatialField.class); public static final String DEFAULT_GEOMETRY_FIELD_NAME = "default_geom_dimension"; public static final IndexDimensionHint LONGITUDE_DIMENSION_HINT = new IndexDimensionHint("LONGITUDE"); public static final IndexDimensionHint LATITUDE_DIMENSION_HINT = new IndexDimensionHint("LATITUDE"); protected NumericDimensionDefinition baseDefinition; private FieldReader geometryReader; private FieldWriter geometryWriter; private Integer geometryPrecision; private CoordinateReferenceSystem crs = GeometryUtils.getDefaultCRS(); protected SpatialField() { this(null, null, null); } protected SpatialField(@Nullable final Integer geometryPrecision) { this(null, geometryPrecision, null); } public SpatialField( final NumericDimensionDefinition baseDefinition, final @Nullable Integer geometryPrecision) { this(baseDefinition, geometryPrecision, null); } public SpatialField( final NumericDimensionDefinition baseDefinition, final @Nullable Integer geometryPrecision, final @Nullable CoordinateReferenceSystem crs) { if (crs != null) { this.crs = crs; } this.baseDefinition = baseDefinition; this.geometryPrecision = geometryPrecision; final GeometrySerializationProvider serialization = new GeometrySerializationProvider(geometryPrecision); geometryReader = serialization.getFieldReader(); geometryWriter = serialization.getFieldWriter(); } public CoordinateReferenceSystem getCRS() { return crs; } public Integer getGeometryPrecision() { return geometryPrecision; } @Override public IndexFieldOptions getIndexFieldOptions() { return new SpatialIndexFieldOptions(crs); } @Override public Class getFieldClass() { return Geometry.class; } @Override public NumericData getFullRange() { return baseDefinition.getFullRange(); } @Override public NumericRange getDenormalizedRange(final BinRange range) { return new NumericRange(range.getNormalizedMin(), range.getNormalizedMax()); } @Override public double getRange() { return baseDefinition.getRange(); } @Override public int getFixedBinIdSize() { return 0; } @Override public NumericRange getBounds() { return baseDefinition.getBounds(); } @Override public double normalize(final double value) { return baseDefinition.normalize(value); } @Override public double denormalize(final double value) { return baseDefinition.denormalize(value); } @Override public BinRange[] getNormalizedRanges(final NumericData range) { return baseDefinition.getNormalizedRanges(range); } @Override public String getFieldName() { return DEFAULT_GEOMETRY_FIELD_NAME; } @Override public FieldWriter getWriter() { return geometryWriter; } @Override public FieldReader getReader() { return geometryReader; } @Override public NumericDimensionDefinition getBaseDefinition() { return baseDefinition; } @Override public byte[] toBinary() { final byte[] dimensionBinary = PersistenceUtils.toBinary(baseDefinition); final byte[] crsBinary = StringUtils.stringToBinary(CRS.toSRS(crs)); final ByteBuffer buf = ByteBuffer.allocate( VarintUtils.unsignedShortByteLength((short) dimensionBinary.length) + dimensionBinary.length + 1 + crsBinary.length); VarintUtils.writeUnsignedShort((short) dimensionBinary.length, buf); buf.put(dimensionBinary); if (geometryPrecision == null) { buf.put(Byte.MAX_VALUE); } else { buf.put((byte) geometryPrecision.intValue()); } buf.put(crsBinary); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final byte[] dimensionBinary = new byte[VarintUtils.readUnsignedShort(buf)]; buf.get(dimensionBinary); baseDefinition = (NumericDimensionDefinition) PersistenceUtils.fromBinary(dimensionBinary); final byte precision = buf.get(); if (precision == Byte.MAX_VALUE) { geometryPrecision = null; } else { geometryPrecision = Integer.valueOf(precision); } final GeometrySerializationProvider serialization = new GeometrySerializationProvider(geometryPrecision); geometryReader = serialization.getFieldReader(); geometryWriter = serialization.getFieldWriter(); final byte[] crsBinary = new byte[buf.remaining()]; buf.get(crsBinary); try { this.crs = CRS.decode(StringUtils.stringFromBinary(crsBinary), true); } catch (FactoryException e) { LOGGER.warn("Unable to decode index field CRS"); this.crs = GeometryUtils.getDefaultCRS(); } } @Override public int hashCode() { final int prime = 31; int result = 1; final String className = getClass().getName(); result = (prime * result) + ((className == null) ? 0 : className.hashCode()); result = (prime * result) + ((baseDefinition == null) ? 0 : baseDefinition.hashCode()); result = (prime * result) + ((geometryPrecision == null) ? 0 : geometryPrecision.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final SpatialField other = (SpatialField) obj; if (baseDefinition == null) { if (other.baseDefinition != null) { return false; } } else if (!baseDefinition.equals(other.baseDefinition)) { return false; } if (geometryPrecision == null) { if (other.geometryPrecision != null) { return false; } } else if (!geometryPrecision.equals(other.geometryPrecision)) { return false; } return true; } public static class SpatialIndexFieldOptions implements IndexFieldOptions { private final CoordinateReferenceSystem indexCRS; public SpatialIndexFieldOptions(final CoordinateReferenceSystem indexCRS) { this.indexCRS = indexCRS; } public CoordinateReferenceSystem crs() { return this.indexCRS; } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/dimension/TimeField.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.dimension; import java.nio.ByteBuffer; import java.util.Set; import org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit; import org.locationtech.geowave.core.geotime.index.dimension.TimeDefinition; import org.locationtech.geowave.core.geotime.store.field.IntervalSerializationProvider; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.IndexDimensionHint; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.dimension.bin.BinRange; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.index.numeric.NumericValue; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldWriter; import org.locationtech.geowave.core.store.dimension.NumericDimensionField; import org.threeten.extra.Interval; import com.google.common.collect.Sets; /** * This field definition can be used for temporal data (either as a time range or a single instant * in time). */ public class TimeField implements NumericDimensionField { public static final String DEFAULT_FIELD_ID = "default_time_dimension"; public static final IndexDimensionHint TIME_DIMENSION_HINT = new IndexDimensionHint("TIME"); public static final IndexDimensionHint START_TIME_DIMENSION_HINT = new IndexDimensionHint("START_TIME"); public static final IndexDimensionHint END_TIME_DIMENSION_HINT = new IndexDimensionHint("END_TIME"); private NumericDimensionDefinition baseDefinition; private final FieldReader reader; private final FieldWriter writer; private String fieldName; public TimeField() { final IntervalSerializationProvider serializationProvider = new IntervalSerializationProvider(); reader = serializationProvider.getFieldReader(); writer = serializationProvider.getFieldWriter(); fieldName = DEFAULT_FIELD_ID; } public TimeField(final Unit timeUnit) { this(timeUnit, DEFAULT_FIELD_ID); } public TimeField(final Unit timeUnit, final String fieldName) { this(new TimeDefinition(timeUnit), fieldName); } @Override public NumericData getFullRange() { return new NumericRange(0, System.currentTimeMillis() + 1); } public TimeField(final NumericDimensionDefinition baseDefinition, final String fieldName) { this.baseDefinition = baseDefinition; final IntervalSerializationProvider serializationProvider = new IntervalSerializationProvider(); reader = serializationProvider.getFieldReader(); writer = serializationProvider.getFieldWriter(); this.fieldName = fieldName; } @Override public double normalize(final double value) { return baseDefinition.normalize(value); } @Override public double denormalize(final double value) { return baseDefinition.denormalize(value); } @Override public BinRange[] getNormalizedRanges(final NumericData index) { return baseDefinition.getNormalizedRanges(index); } @Override public NumericRange getDenormalizedRange(final BinRange range) { return baseDefinition.getDenormalizedRange(range); } @Override public int getFixedBinIdSize() { return baseDefinition.getFixedBinIdSize(); } @Override public double getRange() { return baseDefinition.getRange(); } @Override public NumericRange getBounds() { return baseDefinition.getBounds(); } @Override public NumericData getNumericData(final Interval dataElement) { if (dataElement.getStart().equals(dataElement.getEnd())) { return new NumericValue(dataElement.getStart().toEpochMilli()); } return new NumericRange( dataElement.getStart().toEpochMilli(), dataElement.getEnd().toEpochMilli()); } @Override public String getFieldName() { return fieldName; } @Override public FieldWriter getWriter() { return writer; } @Override public FieldReader getReader() { return reader; } @Override public NumericDimensionDefinition getBaseDefinition() { return baseDefinition; } @Override public byte[] toBinary() { final byte[] dimensionBinary = PersistenceUtils.toBinary(baseDefinition); final byte[] fieldNameBytes = StringUtils.stringToBinary(fieldName); final ByteBuffer buf = ByteBuffer.allocate( dimensionBinary.length + fieldNameBytes.length + VarintUtils.unsignedIntByteLength(fieldNameBytes.length)); VarintUtils.writeUnsignedInt(fieldNameBytes.length, buf); buf.put(fieldNameBytes); buf.put(dimensionBinary); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int fieldNameLength = VarintUtils.readUnsignedInt(buf); final byte[] fieldNameBinary = ByteArrayUtils.safeRead(buf, fieldNameLength); fieldName = StringUtils.stringFromBinary(fieldNameBinary); final byte[] dimensionBinary = new byte[buf.remaining()]; buf.get(dimensionBinary); baseDefinition = (NumericDimensionDefinition) PersistenceUtils.fromBinary(dimensionBinary); } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((baseDefinition == null) ? 0 : baseDefinition.hashCode()); result = (prime * result) + ((fieldName == null) ? 0 : fieldName.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final TimeField other = (TimeField) obj; if (baseDefinition == null) { if (other.baseDefinition != null) { return false; } } else if (!baseDefinition.equals(other.baseDefinition)) { return false; } if (fieldName == null) { if (other.fieldName != null) { return false; } } else if (!fieldName.equals(other.fieldName)) { return false; } return true; } @Override public Class getFieldClass() { return Interval.class; } @Override public Set getDimensionHints() { return Sets.newHashSet(TIME_DIMENSION_HINT, START_TIME_DIMENSION_HINT, END_TIME_DIMENSION_HINT); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/field/CalendarArraySerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.field; import java.util.Calendar; import org.locationtech.geowave.core.geotime.store.field.CalendarSerializationProvider.CalendarReader; import org.locationtech.geowave.core.geotime.store.field.CalendarSerializationProvider.CalendarWriter; import org.locationtech.geowave.core.store.data.field.ArrayReader; import org.locationtech.geowave.core.store.data.field.ArrayWriter.VariableSizeObjectArrayWriter; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldWriter; public class CalendarArraySerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new CalendarArrayReader(); } @Override public FieldWriter getFieldWriter() { return new CalendarArrayWriter(); } private static class CalendarArrayReader implements FieldReader { @Override public Calendar[] readField(final byte[] fieldData) { return new ArrayReader<>(new CalendarReader()).readField(fieldData); } } private static class CalendarArrayWriter extends VariableSizeObjectArrayWriter { public CalendarArrayWriter() { super(new CalendarWriter()); } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/field/CalendarSerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.field; import java.nio.ByteBuffer; import java.util.Calendar; import java.util.Date; import java.util.TimeZone; import org.locationtech.geowave.core.geotime.util.TimeUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldUtils; import org.locationtech.geowave.core.store.data.field.FieldWriter; public class CalendarSerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new CalendarReader(); } @Override public FieldWriter getFieldWriter() { return new CalendarWriter(); } protected static class CalendarReader implements FieldReader { @Override public Calendar readField(final byte[] fieldData) { if ((fieldData == null) || (fieldData.length == 0)) { return null; } final Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("GMT")); cal.setTime(new Date(VarintUtils.readTime(ByteBuffer.wrap(fieldData)))); return cal; } @Override public Calendar readField(final byte[] fieldData, final byte serializationVersion) { if ((fieldData == null) || (fieldData.length == 0)) { return null; } if (serializationVersion < FieldUtils.SERIALIZATION_VERSION) { final Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("GMT")); cal.setTime(new Date(ByteBuffer.wrap(fieldData).getLong())); return cal; } else { return readField(fieldData); } } } protected static class CalendarWriter implements FieldWriter { @Override public byte[] writeField(final Calendar cal) { if (cal == null) { return new byte[] {}; } final long time = TimeUtils.calendarToGMTMillis(cal); final ByteBuffer buf = ByteBuffer.allocate(VarintUtils.timeByteLength(time)); VarintUtils.writeTime(time, buf); return buf.array(); } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/field/DateArraySerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.field; import java.nio.ByteBuffer; import java.util.Date; import org.locationtech.geowave.core.geotime.store.field.DateSerializationProvider.DateReader; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.data.field.ArrayReader; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldUtils; import org.locationtech.geowave.core.store.data.field.FieldWriter; public class DateArraySerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new DateArrayReader(); } @Override public FieldWriter getFieldWriter() { return new DateArrayWriter(); } // @see LongArraySerializationProvider.LongArrayReader private static class DateArrayReader implements FieldReader { @Override public Date[] readField(final byte[] fieldData) { if ((fieldData == null) || (fieldData.length == 0)) { return null; } final ByteBuffer buff = ByteBuffer.wrap(fieldData); final int count = VarintUtils.readUnsignedInt(buff); ByteArrayUtils.verifyBufferSize(buff, count); final Date[] result = new Date[count]; for (int i = 0; i < count; i++) { if (buff.get() > 0) { result[i] = new Date(VarintUtils.readTime(buff)); } else { result[i] = null; } } return result; } @Override public Date[] readField(final byte[] fieldData, final byte serializationVersion) { if (serializationVersion < FieldUtils.SERIALIZATION_VERSION) { return new ArrayReader<>(new DateReader()).readField(fieldData, serializationVersion); } else { return readField(fieldData); } } } // @see LongArraySerializationProvider.LongArrayWriter private static class DateArrayWriter implements FieldWriter { @Override public byte[] writeField(final Date[] fieldValue) { if (fieldValue == null) { return new byte[] {}; } int bytes = VarintUtils.unsignedIntByteLength(fieldValue.length); for (final Date value : fieldValue) { bytes++; if (value != null) { bytes += VarintUtils.timeByteLength(value.getTime()); } } final ByteBuffer buf = ByteBuffer.allocate(bytes); VarintUtils.writeUnsignedInt(fieldValue.length, buf); for (final Date value : fieldValue) { if (value == null) { buf.put((byte) 0x0); } else { buf.put((byte) 0x1); VarintUtils.writeTime(value.getTime(), buf); } } return buf.array(); } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/field/DateSerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.field; import java.nio.ByteBuffer; import java.util.Date; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldUtils; import org.locationtech.geowave.core.store.data.field.FieldWriter; public class DateSerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new DateReader(); } @Override public FieldWriter getFieldWriter() { return new DateWriter(); } protected static class DateReader implements FieldReader { @Override public Date readField(final byte[] fieldData) { if ((fieldData == null) || (fieldData.length == 0)) { return null; } return new Date(VarintUtils.readTime(ByteBuffer.wrap(fieldData))); } @Override public Date readField(final byte[] fieldData, final byte serializationVersion) { if ((fieldData == null) || (fieldData.length == 0)) { return null; } if (serializationVersion < FieldUtils.SERIALIZATION_VERSION) { return new Date(ByteBuffer.wrap(fieldData).getLong()); } else { return readField(fieldData); } } } protected static class DateWriter implements FieldWriter { @Override public byte[] writeField(final Date fieldData) { if (fieldData == null) { return new byte[] {}; } final ByteBuffer buf = ByteBuffer.allocate(VarintUtils.timeByteLength(fieldData.getTime())); VarintUtils.writeTime(fieldData.getTime(), buf); return buf.array(); } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/field/GeometryArraySerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.field; import org.locationtech.geowave.core.geotime.store.field.GeometrySerializationProvider.GeometryReader; import org.locationtech.geowave.core.geotime.store.field.GeometrySerializationProvider.GeometryWriter; import org.locationtech.geowave.core.store.data.field.ArrayReader; import org.locationtech.geowave.core.store.data.field.ArrayWriter.VariableSizeObjectArrayWriter; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldWriter; import org.locationtech.jts.geom.Geometry; public class GeometryArraySerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new GeometryArrayReader(); } @Override public FieldWriter getFieldWriter() { return new GeometryArrayWriter(); } private static class GeometryArrayReader extends ArrayReader { public GeometryArrayReader() { super(new GeometryReader()); } } private static class GeometryArrayWriter extends VariableSizeObjectArrayWriter { public GeometryArrayWriter() { super(new GeometryWriter()); } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/field/GeometrySerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.field; import javax.annotation.Nullable; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldUtils; import org.locationtech.geowave.core.store.data.field.FieldWriter; import org.locationtech.jts.geom.Geometry; public class GeometrySerializationProvider implements FieldSerializationProviderSpi { private Integer geometryPrecision; public GeometrySerializationProvider() { geometryPrecision = GeometryUtils.MAX_GEOMETRY_PRECISION; } public GeometrySerializationProvider(@Nullable final Integer geometryPrecision) { super(); this.geometryPrecision = geometryPrecision; } @Override public FieldReader getFieldReader() { return new GeometryReader(geometryPrecision); } @Override public FieldWriter getFieldWriter() { return new GeometryWriter(geometryPrecision); } protected static class GeometryReader implements FieldReader { private Integer geometryPrecision; public GeometryReader() { geometryPrecision = GeometryUtils.MAX_GEOMETRY_PRECISION; } public GeometryReader(@Nullable final Integer geometryPrecision) { this.geometryPrecision = geometryPrecision; } public void setPrecision(@Nullable final Integer geometryPrecision) { this.geometryPrecision = geometryPrecision; } @Override public Geometry readField(final byte[] fieldData) { if ((fieldData == null) || (fieldData.length < 1)) { return null; } return GeometryUtils.geometryFromBinary( fieldData, geometryPrecision, FieldUtils.SERIALIZATION_VERSION); } @Override public Geometry readField(final byte[] fieldData, final byte serializationVersion) { if ((fieldData == null) || (fieldData.length < 1)) { return null; } return GeometryUtils.geometryFromBinary(fieldData, geometryPrecision, serializationVersion); } } protected static class GeometryWriter implements FieldWriter { private Integer geometryPrecision; public GeometryWriter() { geometryPrecision = GeometryUtils.MAX_GEOMETRY_PRECISION; } public GeometryWriter(@Nullable final Integer geometryPrecision) { this.geometryPrecision = geometryPrecision; } public void setPrecision(@Nullable final Integer geometryPrecision) { this.geometryPrecision = geometryPrecision; } @Override public byte[] writeField(final Geometry fieldValue) { if (fieldValue == null) { return new byte[] {}; } return GeometryUtils.geometryToBinary(fieldValue, geometryPrecision); } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/field/IntervalArraySerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.field; import org.locationtech.geowave.core.geotime.store.field.IntervalSerializationProvider.IntervalReader; import org.locationtech.geowave.core.geotime.store.field.IntervalSerializationProvider.IntervalWriter; import org.locationtech.geowave.core.store.data.field.ArrayReader; import org.locationtech.geowave.core.store.data.field.ArrayWriter.VariableSizeObjectArrayWriter; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldWriter; import org.threeten.extra.Interval; public class IntervalArraySerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new IntervalArrayReader(); } @Override public FieldWriter getFieldWriter() { return new IntervalArrayWriter(); } private static class IntervalArrayReader extends ArrayReader { public IntervalArrayReader() { super(new IntervalReader()); } } private static class IntervalArrayWriter extends VariableSizeObjectArrayWriter { public IntervalArrayWriter() { super(new IntervalWriter()); } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/field/IntervalSerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.field; import java.nio.ByteBuffer; import java.time.Instant; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldWriter; import org.threeten.extra.Interval; public class IntervalSerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new IntervalReader(); } @Override public FieldWriter getFieldWriter() { return new IntervalWriter(); } public static class IntervalReader implements FieldReader { @Override public Interval readField(final byte[] fieldData) { Interval retVal; // this is less generic than using the persistable interface but is a // little better for performance final ByteBuffer buf = ByteBuffer.wrap(fieldData); final Instant value = Instant.ofEpochMilli(VarintUtils.readTime(buf)); if (buf.hasRemaining()) { retVal = Interval.of(value, Instant.ofEpochMilli(VarintUtils.readTime(buf))); } else { retVal = Interval.of(value, value); } return retVal; } } public static class IntervalWriter implements FieldWriter { @Override public byte[] writeField(final Interval fieldData) { if (fieldData == null) { return new byte[] {}; } if (fieldData.isEmpty()) { final long millis = fieldData.getStart().toEpochMilli(); final ByteBuffer buf = ByteBuffer.allocate(VarintUtils.timeByteLength(millis)); VarintUtils.writeTime(millis, buf); return buf.array(); } else { final long startMillis = fieldData.getStart().toEpochMilli(); final long endMillis = fieldData.getEnd().toEpochMilli(); final ByteBuffer buf = ByteBuffer.allocate( VarintUtils.timeByteLength(startMillis) + VarintUtils.timeByteLength(endMillis)); VarintUtils.writeTime(startMillis, buf); VarintUtils.writeTime(endMillis, buf); return buf.array(); } } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/AbstractVectorConstraints.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query; import java.util.List; import org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.geotime.store.InternalGeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.geotime.util.IndexOptimizationUtils; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.query.constraints.AdapterAndIndexBasedQueryConstraints; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.geowave.core.store.query.filter.QueryFilter; import org.opengis.filter.Filter; abstract public class AbstractVectorConstraints implements AdapterAndIndexBasedQueryConstraints, QueryConstraints { protected T delegateConstraints; protected AbstractVectorConstraints() {} public AbstractVectorConstraints(final T delegateConstraints) { super(); this.delegateConstraints = delegateConstraints; } @Override public byte[] toBinary() { return delegateConstraints.toBinary(); } @Override public List createFilters(final Index index) { return delegateConstraints.createFilters(index); } @Override public void fromBinary(final byte[] bytes) { delegateConstraints = newConstraints(); delegateConstraints.fromBinary(bytes); } abstract protected T newConstraints(); @Override public List getIndexConstraints(final Index index) { return delegateConstraints.getIndexConstraints(index); } abstract protected boolean isSupported( final Index index, final GeotoolsFeatureDataAdapter adapter); abstract protected Filter getFilter(GeotoolsFeatureDataAdapter adapter, Index index); @Override public QueryConstraints createQueryConstraints( final InternalDataAdapter adapter, final Index index, final AdapterToIndexMapping indexMapping) { final InternalGeotoolsFeatureDataAdapter gtAdapter = IndexOptimizationUtils.unwrapGeotoolsFeatureDataAdapter(adapter); if (gtAdapter != null) { if (!isSupported(index, gtAdapter)) { final Filter filter = getFilter(gtAdapter, index); if (filter == null) { return null; } return new ExplicitCQLQuery(delegateConstraints, filter, gtAdapter, indexMapping); } } // otherwise just unwrap this return delegateConstraints; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/BaseVectorQueryBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query; import org.locationtech.geowave.core.store.query.BaseQuery; public interface BaseVectorQueryBuilder, R extends BaseVectorQueryBuilder> { } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/ExplicitCQLQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.locationtech.geowave.core.geotime.store.InternalGeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.geotime.store.query.filter.CQLQueryFilter; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.geowave.core.store.query.constraints.TypeConstraintQuery; import org.locationtech.geowave.core.store.query.filter.QueryFilter; import org.opengis.filter.Filter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ExplicitCQLQuery implements QueryConstraints, TypeConstraintQuery { private static final Logger LOGGER = LoggerFactory.getLogger(ExplicitCQLQuery.class); private QueryConstraints baseQuery; private CQLQueryFilter filter; private Filter cqlFilter; public ExplicitCQLQuery() {} public ExplicitCQLQuery( final QueryConstraints baseQuery, final Filter filter, final InternalGeotoolsFeatureDataAdapter adapter, final AdapterToIndexMapping indexMapping) { // TODO consider ensuring the baseQuery amd the filter are in the // coordinate reference system of the adapter // only if the query has spatial predicate(s) this.baseQuery = baseQuery; cqlFilter = filter; this.filter = new CQLQueryFilter(filter, adapter, indexMapping); } @Override public List createFilters(final Index index) { List queryFilters; // note, this assumes the CQL filter covers the baseQuery which *should* // be a safe assumption, otherwise we need to add the // baseQuery.createFilters to the list of query filters queryFilters = new ArrayList<>(); if (filter != null) { queryFilters = new ArrayList<>(queryFilters); queryFilters.add(filter); } return queryFilters; } @Override public List getIndexConstraints(final Index index) { if (baseQuery != null) { return baseQuery.getIndexConstraints(index); } return Collections.emptyList(); } @Override public byte[] toBinary() { byte[] baseQueryBytes; if (baseQuery != null) { baseQueryBytes = PersistenceUtils.toBinary(baseQuery); } else { // base query can be null, no reason to log a warning baseQueryBytes = new byte[] {}; } final byte[] filterBytes; if (filter != null) { filterBytes = filter.toBinary(); } else { LOGGER.warn("Filter is null"); filterBytes = new byte[] {}; } final ByteBuffer buf = ByteBuffer.allocate( filterBytes.length + baseQueryBytes.length + VarintUtils.unsignedIntByteLength(filterBytes.length)); VarintUtils.writeUnsignedInt(filterBytes.length, buf); buf.put(filterBytes); buf.put(baseQueryBytes); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int filterBytesLength = VarintUtils.readUnsignedInt(buf); if (filterBytesLength > 0) { final byte[] filterBytes = ByteArrayUtils.safeRead(buf, filterBytesLength); filter = new CQLQueryFilter(); filter.fromBinary(filterBytes); } else { LOGGER.warn("CQL filter is empty bytes"); filter = null; } final int baseQueryBytesLength = buf.remaining(); if (baseQueryBytesLength > 0) { final byte[] baseQueryBytes = ByteArrayUtils.safeRead(buf, baseQueryBytesLength); try { baseQuery = (QueryConstraints) PersistenceUtils.fromBinary(baseQueryBytes); } catch (final Exception e) { throw new IllegalArgumentException("Unable to read base query from binary", e); } } else { // base query can be null, no reason to log a warning baseQuery = null; } } @Override public String getTypeName() { return filter.getTypeName(); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/ExplicitSpatialQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query; import java.nio.ByteBuffer; import java.util.HashMap; import java.util.List; import java.util.Map; import org.geotools.geometry.jts.JTS; import org.geotools.referencing.CRS; import org.locationtech.geowave.core.geotime.store.dimension.CustomCrsIndexModel; import org.locationtech.geowave.core.geotime.store.query.filter.SpatialQueryFilter; import org.locationtech.geowave.core.geotime.store.query.filter.SpatialQueryFilter.CompareOperation; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.geotime.util.TWKBReader; import org.locationtech.geowave.core.geotime.util.TWKBWriter; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.dimension.NumericDimensionField; import org.locationtech.geowave.core.store.index.CommonIndexModel; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass; import org.locationtech.geowave.core.store.query.filter.BasicQueryFilter.BasicQueryCompareOperation; import org.locationtech.geowave.core.store.query.filter.QueryFilter; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.io.ParseException; import org.opengis.geometry.MismatchedDimensionException; import org.opengis.referencing.FactoryException; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.opengis.referencing.operation.MathTransform; import org.opengis.referencing.operation.TransformException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * The Spatial Query class represents a query in two dimensions. The constraint that is applied * represents an intersection operation on the query geometry. */ public class ExplicitSpatialQuery extends BasicQueryByClass { private static final Logger LOGGER = LoggerFactory.getLogger(ExplicitSpatialQuery.class); private static class CrsCache { Geometry geometry; Map> constraintsPerIndexId; public CrsCache( final Geometry geometry, final Map> constraintsPerIndexId) { this.geometry = geometry; this.constraintsPerIndexId = constraintsPerIndexId; } } private Geometry queryGeometry; private String crsCode; private CompareOperation compareOp = CompareOperation.INTERSECTS; private BasicQueryCompareOperation nonSpatialCompareOp = BasicQueryCompareOperation.INTERSECTS; private final Map crsCodeCache = new HashMap<>(); private CoordinateReferenceSystem crs; /** * Convenience constructor used to construct a SpatialQuery object that has an X and Y dimension * (axis). * * @param queryGeometry spatial geometry of the query */ public ExplicitSpatialQuery(final Geometry queryGeometry) { this(GeometryUtils.basicConstraintsFromGeometry(queryGeometry), queryGeometry); } public ExplicitSpatialQuery(final ConstraintsByClass constraints, final Geometry queryGeometry) { this(constraints, queryGeometry, (String) null); } public ExplicitSpatialQuery( final ConstraintsByClass constraints, final Geometry queryGeometry, final String crsCode) { this( constraints, queryGeometry, crsCode, CompareOperation.INTERSECTS, BasicQueryCompareOperation.INTERSECTS); } public ExplicitSpatialQuery(final Geometry queryGeometry, final String crsCode) { this( GeometryUtils.basicConstraintsFromGeometry(queryGeometry), queryGeometry, crsCode, CompareOperation.INTERSECTS, BasicQueryCompareOperation.INTERSECTS); } /** * Convenience constructor used to construct a SpatialQuery object that has an X and Y dimension * (axis). * * @param queryGeometry spatial geometry of the query * @param compareOp the compare operation to use */ public ExplicitSpatialQuery(final Geometry queryGeometry, final CompareOperation compareOp) { this(GeometryUtils.basicConstraintsFromGeometry(queryGeometry), queryGeometry, compareOp); } /** * Convenience constructor can be used when you already have linear constraints for the query. The * queryGeometry and compareOp is used for fine grained post filtering. * * @param constraints linear constraints * @param queryGeometry spatial geometry of the query * @param compareOp the compare operation to use */ public ExplicitSpatialQuery( final ConstraintsByClass constraints, final Geometry queryGeometry, final CompareOperation compareOp) { this(constraints, queryGeometry, compareOp, BasicQueryCompareOperation.INTERSECTS); } public ExplicitSpatialQuery( final Geometry queryGeometry, final String crsCode, final CompareOperation compareOp) { this( GeometryUtils.basicConstraintsFromGeometry(queryGeometry), queryGeometry, crsCode, compareOp == null ? CompareOperation.INTERSECTS : compareOp, BasicQueryCompareOperation.INTERSECTS); } /** * Convenience constructor can be used when you already have linear constraints for the query. The * queryGeometry and compareOp is used for fine grained post filtering. * * @param constraints linear constraints * @param queryGeometry spatial geometry of the query * @param compareOp predicate associated query geometry * @param nonSpatialCompareOp predicate associated non-spatial fields (i.e Time) */ public ExplicitSpatialQuery( final ConstraintsByClass constraints, final Geometry queryGeometry, final CompareOperation compareOp, final BasicQueryCompareOperation nonSpatialCompareOp) { this( constraints, queryGeometry, null, compareOp == null ? CompareOperation.INTERSECTS : compareOp, nonSpatialCompareOp); } public ExplicitSpatialQuery( final ConstraintsByClass constraints, final Geometry queryGeometry, final String crsCode, final CompareOperation compareOp, final BasicQueryCompareOperation nonSpatialCompareOp) { super(constraints, nonSpatialCompareOp); this.crsCode = crsCode; this.queryGeometry = queryGeometry; this.compareOp = compareOp; this.nonSpatialCompareOp = nonSpatialCompareOp; } public ExplicitSpatialQuery() { super(); } /** @return queryGeometry the spatial geometry of the SpatialQuery object */ public Geometry getQueryGeometry() { return queryGeometry; } public String getCrsCode() { return crsCode; } public CoordinateReferenceSystem getCrs() { return crs; } @Override protected QueryFilter createQueryFilter( final MultiDimensionalNumericData constraints, final NumericDimensionField[] orderedConstrainedDimensionFields, final NumericDimensionField[] unconstrainedDimensionDefinitions, final Index index) { return new SpatialQueryFilter( constraints, orderedConstrainedDimensionFields, unconstrainedDimensionDefinitions, internalGetGeometry(index), compareOp, nonSpatialCompareOp); } protected Geometry internalGetGeometry(final Index index) { final String indexCrsStr = getCrs(index.getIndexModel()); CrsCache cache = crsCodeCache.get(indexCrsStr); if (cache != null) { return cache.geometry; } cache = transformToIndex(indexCrsStr, index); crsCodeCache.put(indexCrsStr, cache); return cache.geometry; } @Override public List getIndexConstraints(final Index index) { final String indexCrsStr = getCrs(index.getIndexModel()); CrsCache cache = crsCodeCache.get(indexCrsStr); if (cache != null) { List indexConstraints = cache.constraintsPerIndexId.get(index.getName()); if (indexConstraints == null) { if (GeometryUtils.crsMatches(crsCode, indexCrsStr) || (queryGeometry == null)) { indexConstraints = super.getIndexConstraints(index); } else { indexConstraints = indexConstraintsFromGeometry(cache.geometry, index); } cache.constraintsPerIndexId.put(index.getName(), indexConstraints); } return indexConstraints; } cache = transformToIndex(indexCrsStr, index); crsCodeCache.put(indexCrsStr, cache); return cache.constraintsPerIndexId.get(index.getName()); } private CrsCache transformToIndex(final String indexCrsStr, final Index index) { if (GeometryUtils.crsMatches(crsCode, indexCrsStr) || (queryGeometry == null)) { final List constraints = super.getIndexConstraints(index); final Map> constraintsPerIndexId = new HashMap<>(); constraintsPerIndexId.put(index.getName(), constraints); return new CrsCache(queryGeometry, constraintsPerIndexId); } else { if (crs == null) { if ((crsCode == null) || crsCode.isEmpty()) { crsCode = GeometryUtils.DEFAULT_CRS_STR; } try { crs = CRS.decode(crsCode, true); } catch (final FactoryException e) { LOGGER.warn("Unable to decode spatial query crs", e); } } CoordinateReferenceSystem indexCrs; if (GeometryUtils.isDefaultCrs(indexCrsStr)) { indexCrs = GeometryUtils.getDefaultCRS(); } else { indexCrs = ((CustomCrsIndexModel) index.getIndexModel()).getCrs(); } try { final MathTransform transform = CRS.findMathTransform(crs, indexCrs, true); // transform geometry final Geometry indexCrsQueryGeometry = JTS.transform(queryGeometry, transform); final List indexConstraints = indexConstraintsFromGeometry(indexCrsQueryGeometry, index); final Map> constraintsPerIndexId = new HashMap<>(); constraintsPerIndexId.put(index.getName(), indexConstraints); return new CrsCache(indexCrsQueryGeometry, constraintsPerIndexId); } catch (final FactoryException e) { LOGGER.warn("Unable to create coordinate reference system transform", e); } catch (MismatchedDimensionException | TransformException e) { LOGGER.warn("Unable to transform query geometry into index CRS", e); } } final List constraints = super.getIndexConstraints(index); final Map> constraintsPerIndexId = new HashMap<>(); constraintsPerIndexId.put(index.getName(), constraints); return new CrsCache(queryGeometry, constraintsPerIndexId); } private static List indexConstraintsFromGeometry( final Geometry geom, final Index index) { return GeometryUtils.basicConstraintsFromGeometry(geom).getIndexConstraints(index); } private static String getCrs(final CommonIndexModel indexModel) { if (indexModel instanceof CustomCrsIndexModel) { if (GeometryUtils.isDefaultCrs(((CustomCrsIndexModel) indexModel).getCrs())) { return null; } return GeometryUtils.getCrsCode(((CustomCrsIndexModel) indexModel).getCrs()); } return null; } @Override public byte[] toBinary() { final byte[] crsBinary = GeometryUtils.isDefaultCrs(crsCode) ? new byte[0] : StringUtils.stringToBinary(crsCode); final byte[] superBinary = super.toBinary(); final byte[] geometryBinary = new TWKBWriter().write(queryGeometry); final ByteBuffer buf = ByteBuffer.allocate( superBinary.length + geometryBinary.length + crsBinary.length + VarintUtils.unsignedIntByteLength(compareOp.ordinal()) + VarintUtils.unsignedIntByteLength(nonSpatialCompareOp.ordinal()) + VarintUtils.unsignedIntByteLength(crsBinary.length) + VarintUtils.unsignedIntByteLength(superBinary.length)); VarintUtils.writeUnsignedInt(compareOp.ordinal(), buf); VarintUtils.writeUnsignedInt(nonSpatialCompareOp.ordinal(), buf); VarintUtils.writeUnsignedInt(crsBinary.length, buf); VarintUtils.writeUnsignedInt(superBinary.length, buf); buf.put(crsBinary); buf.put(superBinary); buf.put(geometryBinary); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); compareOp = CompareOperation.values()[VarintUtils.readUnsignedInt(buf)]; nonSpatialCompareOp = BasicQueryCompareOperation.values()[VarintUtils.readUnsignedInt(buf)]; final int crsBinaryLength = VarintUtils.readUnsignedInt(buf); final int superBinaryLength = VarintUtils.readUnsignedInt(buf); final byte[] crsBinary = ByteArrayUtils.safeRead(buf, crsBinaryLength); crsCode = crsBinary.length > 0 ? StringUtils.stringFromBinary(crsBinary) : null; final byte[] superBinary = ByteArrayUtils.safeRead(buf, superBinaryLength); super.fromBinary(superBinary); try { queryGeometry = new TWKBReader().read(buf); } catch (final ParseException e) { LOGGER.warn("Unable to read query geometry as well-known binary", e); } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/ExplicitSpatialTemporalQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query; import java.util.ArrayList; import java.util.Date; import java.util.List; import org.locationtech.geowave.core.geotime.index.dimension.SimpleTimeDefinition; import org.locationtech.geowave.core.geotime.index.dimension.TimeDefinition; import org.locationtech.geowave.core.geotime.store.query.filter.SpatialQueryFilter.CompareOperation; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.store.query.filter.BasicQueryFilter.BasicQueryCompareOperation; import org.locationtech.jts.geom.Geometry; import org.threeten.extra.Interval; /** * The Spatial Temporal Query class represents a query in three dimensions. The constraint that is * applied represents an intersection operation on the query geometry AND a date range intersection * based on startTime and endTime. */ public class ExplicitSpatialTemporalQuery extends ExplicitSpatialQuery { public ExplicitSpatialTemporalQuery() {} public ExplicitSpatialTemporalQuery( final Date startTime, final Date endTime, final Geometry queryGeometry) { super(createSpatialTemporalConstraints(startTime, endTime, queryGeometry), queryGeometry); } public ExplicitSpatialTemporalQuery( final Date startTime, final Date endTime, final Geometry queryGeometry, final String crsCode) { super( createSpatialTemporalConstraints(startTime, endTime, queryGeometry), queryGeometry, crsCode); } public ExplicitSpatialTemporalQuery( final TemporalConstraints constraints, final Geometry queryGeometry) { super(createSpatialTemporalConstraints(constraints, queryGeometry), queryGeometry); } public ExplicitSpatialTemporalQuery( final TemporalConstraints constraints, final Geometry queryGeometry, final String crsCode) { super(createSpatialTemporalConstraints(constraints, queryGeometry), queryGeometry, crsCode); } /** * If more then on polygon is supplied in the geometry, then the range of time is partnered with * each polygon constraint. Note: By default we are using same compareOp for 1D Time filtering as * the compareOp of the Spatial query by calling getBaseCompareOp() * * @param startTime * @param endTime * @param queryGeometry * @param compareOp */ public ExplicitSpatialTemporalQuery( final Date startTime, final Date endTime, final Geometry queryGeometry, final CompareOperation compareOp) { super( createSpatialTemporalConstraints(startTime, endTime, queryGeometry), queryGeometry, compareOp, compareOp.getBaseCompareOp()); } public ExplicitSpatialTemporalQuery( final Interval[] intervals, final Geometry queryGeometry, final String crsCode, final CompareOperation compareOp) { super( createSpatialTemporalConstraints(intervals, queryGeometry), queryGeometry, crsCode, compareOp, // it seems like temporal should always use intersection and not // inherit from the spatial compare op BasicQueryCompareOperation.INTERSECTS); } /** * Applies the set of temporal constraints to the boundaries of the provided polygon. If a * multi-polygon is provided, then all matching combinations between temporal ranges and polygons * are explored. * * @param constraints * @param queryGeometry * @param compareOp */ public ExplicitSpatialTemporalQuery( final TemporalConstraints constraints, final Geometry queryGeometry, final CompareOperation compareOp) { super(createSpatialTemporalConstraints(constraints, queryGeometry), queryGeometry, compareOp); } public static ConstraintSet createConstraints( final TemporalRange temporalRange, final boolean isDefault) { return new ConstraintSet( new ConstraintData( new NumericRange( temporalRange.getStartTime().getTime(), temporalRange.getEndTime().getTime()), isDefault), TimeDefinition.class, SimpleTimeDefinition.class); } public static ConstraintsByClass createConstraints( final TemporalConstraints temporalConstraints, final boolean isDefault) { final List constraints = new ArrayList<>(); for (final TemporalRange range : temporalConstraints.getRanges()) { constraints.add( new ConstraintSet( new ConstraintData( new NumericRange(range.getStartTime().getTime(), range.getEndTime().getTime()), isDefault), TimeDefinition.class, SimpleTimeDefinition.class)); } return new ConstraintsByClass(constraints); } public static ConstraintsByClass createConstraints( final Interval[] intervals, final boolean isDefault) { final List constraints = new ArrayList<>(); for (final Interval range : intervals) { constraints.add( new ConstraintSet( new ConstraintData( new NumericRange( range.getStart().toEpochMilli(), // intervals are intended to be exclusive on the end so this adjusts for // exclusivity Math.max(range.getEnd().toEpochMilli() - 1, range.getStart().toEpochMilli())), isDefault), TimeDefinition.class, SimpleTimeDefinition.class)); } return new ConstraintsByClass(constraints); } /** * Supports multi-polygons and multiple temporal bounds. Creates all matchings between polygon and * temporal bounds. * * @param startTime * @param endTime * @param queryGeometry * @return */ private static ConstraintsByClass createSpatialTemporalConstraints( final TemporalConstraints temporalConstraints, final Geometry queryGeometry) { final ConstraintsByClass geoConstraints = GeometryUtils.basicConstraintsFromGeometry(queryGeometry); final ConstraintsByClass timeConstraints = createConstraints(temporalConstraints, false); return geoConstraints.merge(timeConstraints); } /** * Supports multi-polygons and multiple temporal bounds. Creates all matchings between polygon and * temporal bounds. * * @param startTime * @param endTime * @param queryGeometry * @return */ private static ConstraintsByClass createSpatialTemporalConstraints( final Interval[] intervals, final Geometry queryGeometry) { final ConstraintsByClass geoConstraints = GeometryUtils.basicConstraintsFromGeometry(queryGeometry); final ConstraintsByClass timeConstraints = createConstraints(intervals, false); return geoConstraints.merge(timeConstraints); } /** * Supports multi-polygons. Applies 'temporal bounds' to each geometric constraint. * * @param startTime * @param endTime * @param queryGeometry * @return */ private static ConstraintsByClass createSpatialTemporalConstraints( final Date startTime, final Date endTime, final Geometry queryGeometry) { final ConstraintsByClass geoConstraints = GeometryUtils.basicConstraintsFromGeometry(queryGeometry); return geoConstraints.merge( new ConstraintsByClass( new ConstraintSet( new ConstraintData(new NumericRange(startTime.getTime(), endTime.getTime()), false), TimeDefinition.class, SimpleTimeDefinition.class))); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/ExplicitTemporalQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query; import java.util.ArrayList; import java.util.List; import org.locationtech.geowave.core.geotime.index.dimension.SimpleTimeDefinition; import org.locationtech.geowave.core.geotime.index.dimension.TimeDefinition; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass; import org.threeten.extra.Interval; /** * The Spatial Temporal Query class represents a query in three dimensions. The constraint that is * applied represents an intersection operation on the query geometry AND a date range intersection * based on startTime and endTime. */ public class ExplicitTemporalQuery extends BasicQueryByClass { public ExplicitTemporalQuery(final Interval[] intervals) { super(createTemporalConstraints(intervals)); } public ExplicitTemporalQuery(final TemporalConstraints contraints) { super(createTemporalConstraints(contraints)); } public ExplicitTemporalQuery() { super(); } private static ConstraintsByClass createTemporalConstraints( final TemporalConstraints temporalConstraints) { final List constraints = new ArrayList<>(); for (final TemporalRange range : temporalConstraints.getRanges()) { constraints.add( new ConstraintSet( new ConstraintData( new NumericRange(range.getStartTime().getTime(), range.getEndTime().getTime()), false), TimeDefinition.class, SimpleTimeDefinition.class)); } return new ConstraintsByClass(constraints); } private static ConstraintsByClass createTemporalConstraints(final Interval[] intervals) { final List constraints = new ArrayList<>(); for (final Interval range : intervals) { constraints.add( new ConstraintSet( new ConstraintData( new NumericRange( range.getStart().toEpochMilli(), // intervals are intended to be exclusive on the end so this adjusts for // exclusivity Math.max(range.getEnd().toEpochMilli() - 1, range.getStart().toEpochMilli())), false), TimeDefinition.class, SimpleTimeDefinition.class)); } return new ConstraintsByClass(constraints); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/IndexOnlySpatialQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.dimension.NumericDimensionField; import org.locationtech.geowave.core.store.query.filter.QueryFilter; import org.locationtech.jts.geom.Geometry; public class IndexOnlySpatialQuery extends ExplicitSpatialQuery { public IndexOnlySpatialQuery() { super(); } public IndexOnlySpatialQuery(final ConstraintsByClass constraints, final Geometry queryGeometry) { super(constraints, queryGeometry); } public IndexOnlySpatialQuery(final Geometry queryGeometry) { super(queryGeometry); } public IndexOnlySpatialQuery(final Geometry queryGeometry, final String crsCode) { super(queryGeometry, crsCode); } @Override protected QueryFilter createQueryFilter( final MultiDimensionalNumericData constraints, final NumericDimensionField[] orderedConstrainedDimensionFields, final NumericDimensionField[] unconstrainedDimensionDefinitions, final Index index) { // this will ignore fine grained filters and just use the row ID in the // index return null; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/OptimalCQLQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query; import java.util.ArrayList; import java.util.Collection; import org.geotools.filter.text.cql2.CQLException; import org.geotools.filter.text.ecql.ECQL; import org.locationtech.geowave.core.geotime.store.InternalGeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.geotime.store.query.filter.SpatialQueryFilter.CompareOperation; import org.locationtech.geowave.core.geotime.util.ExtractAttributesFilter; import org.locationtech.geowave.core.geotime.util.ExtractGeometryFilterVisitor; import org.locationtech.geowave.core.geotime.util.ExtractGeometryFilterVisitorResult; import org.locationtech.geowave.core.geotime.util.ExtractTimeFilterVisitor; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.geotime.util.GeometryUtils.GeoConstraintsWrapper; import org.locationtech.geowave.core.geotime.util.IndexOptimizationUtils; import org.locationtech.geowave.core.geotime.util.TimeDescriptors; import org.locationtech.geowave.core.geotime.util.TimeUtils; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.query.constraints.AdapterAndIndexBasedQueryConstraints; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintsByClass; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.geowave.core.store.query.filter.BasicQueryFilter.BasicQueryCompareOperation; import org.locationtech.jts.geom.Geometry; import org.opengis.feature.type.AttributeDescriptor; import org.opengis.filter.Filter; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class OptimalCQLQuery implements AdapterAndIndexBasedQueryConstraints, QueryConstraints { private static final Logger LOGGER = LoggerFactory.getLogger(OptimalCQLQuery.class); public static QueryConstraints createOptimalQuery( final String cql, final InternalGeotoolsFeatureDataAdapter adapter, final Index index, final AdapterToIndexMapping indexMapping) throws CQLException { return createOptimalQuery(cql, adapter, index, indexMapping, null); } public static QueryConstraints createOptimalQuery( final String cql, final InternalGeotoolsFeatureDataAdapter adapter, final Index index, final AdapterToIndexMapping indexMapping, final BasicQueryByClass baseQuery) throws CQLException { return createOptimalQuery( cql, adapter, CompareOperation.INTERSECTS, index, indexMapping, baseQuery); } public static QueryConstraints createOptimalQuery( final String cql, final InternalGeotoolsFeatureDataAdapter adapter, final CompareOperation geoCompareOp, final Index index, final AdapterToIndexMapping indexMapping, final BasicQueryByClass baseQuery) throws CQLException { final Filter cqlFilter = ECQL.toFilter(cql); return createOptimalQuery(cqlFilter, adapter, geoCompareOp, index, indexMapping, baseQuery); } public static QueryConstraints createOptimalQuery( final Filter cqlFilter, final InternalGeotoolsFeatureDataAdapter adapter, final Index index, final AdapterToIndexMapping indexMapping) { return createOptimalQuery(cqlFilter, adapter, index, indexMapping, null); } public static QueryConstraints createOptimalQuery( final Filter cqlFilter, final InternalGeotoolsFeatureDataAdapter adapter, final Index index, final AdapterToIndexMapping indexMapping, final BasicQueryByClass baseQuery) { return createOptimalQuery( cqlFilter, adapter, CompareOperation.INTERSECTS, index, indexMapping, baseQuery); } public static QueryConstraints createOptimalQuery( final Filter cqlFilter, final InternalGeotoolsFeatureDataAdapter adapter, final CompareOperation geoCompareOp, final Index index, final AdapterToIndexMapping indexMapping, BasicQueryByClass baseQuery) { final ExtractAttributesFilter attributesVisitor = new ExtractAttributesFilter(); final Object obj = cqlFilter.accept(attributesVisitor, null); final Collection attrs; if ((obj != null) && (obj instanceof Collection)) { attrs = (Collection) obj; } else { attrs = new ArrayList<>(); } // assume the index can't handle spatial or temporal constraints if its // null final boolean isSpatial = IndexOptimizationUtils.hasAtLeastSpatial(index); final boolean isTemporal = IndexOptimizationUtils.hasTime(index, adapter); if (isSpatial) { final String geomName = adapter.getFeatureType().getGeometryDescriptor().getLocalName(); attrs.remove(geomName); } if (isTemporal) { final TimeDescriptors timeDescriptors = adapter.getTimeDescriptors(); if (timeDescriptors != null) { final AttributeDescriptor timeDesc = timeDescriptors.getTime(); if (timeDesc != null) { attrs.remove(timeDesc.getLocalName()); } final AttributeDescriptor startDesc = timeDescriptors.getStartRange(); if (startDesc != null) { attrs.remove(startDesc.getLocalName()); } final AttributeDescriptor endDesc = timeDescriptors.getEndRange(); if (endDesc != null) { attrs.remove(endDesc.getLocalName()); } } } if (baseQuery == null) { final CoordinateReferenceSystem indexCRS = GeometryUtils.getIndexCrs(index); // there is only space and time final ExtractGeometryFilterVisitorResult geometryAndCompareOp = ExtractGeometryFilterVisitor.getConstraints( cqlFilter, indexCRS, adapter.getFeatureType().getGeometryDescriptor().getLocalName()); final TemporalConstraintsSet timeConstraintSet = new ExtractTimeFilterVisitor(adapter.getTimeDescriptors()).getConstraints(cqlFilter); if (geometryAndCompareOp != null) { final Geometry geometry = geometryAndCompareOp.getGeometry(); final GeoConstraintsWrapper geoConstraints = GeometryUtils.basicGeoConstraintsWrapperFromGeometry(geometry); ConstraintsByClass constraints = geoConstraints.getConstraints(); final CompareOperation extractedCompareOp = geometryAndCompareOp.getCompareOp(); if ((timeConstraintSet != null) && !timeConstraintSet.isEmpty()) { // determine which time constraints are associated with an // indexable // field final TemporalConstraints temporalConstraints = TimeUtils.getTemporalConstraintsForDescriptors( adapter.getTimeDescriptors(), timeConstraintSet); // convert to constraints final ConstraintsByClass timeConstraints = ExplicitSpatialTemporalQuery.createConstraints(temporalConstraints, false); constraints = geoConstraints.getConstraints().merge(timeConstraints); } // TODO: this actually doesn't boost performance much, if at // all, and one key is missing - the query geometry has to be // topologically equivalent to its envelope and the ingested // geometry has to be topologically equivalent to its envelope // this could be kept as a statistic on ingest, but considering // it doesn't boost performance it may not be worthwhile // pursuing // if (geoConstraints.isConstraintsMatchGeometry() && // CompareOperation.INTERSECTS.equals(geoCompareOp)) { // baseQuery = new BasicQuery( // constraints); // } // else { // we have to assume the geometry was transformed to the feature // type's CRS, but SpatialQuery assumes the default CRS if not // specified, so specify a CRS if necessary if (GeometryUtils.getDefaultCRS().equals(indexCRS)) { baseQuery = new ExplicitSpatialQuery(constraints, geometry, extractedCompareOp); } else { baseQuery = new ExplicitSpatialQuery( constraints, geometry, GeometryUtils.getCrsCode(indexCRS), extractedCompareOp, BasicQueryCompareOperation.INTERSECTS); } // ExtractGeometryFilterVisitor sets predicate to NULL when CQL // expression // involves multiple dissimilar geometric relationships (i.e. // "CROSSES(...) AND TOUCHES(...)") // In which case, baseQuery is not sufficient to represent CQL // expression. // By setting Exact flag to false we are forcing CQLQuery to // represent CQL expression but use // linear constraint from baseQuery if (extractedCompareOp == null) { baseQuery.setExact(false); } // } } else if ((timeConstraintSet != null) && !timeConstraintSet.isEmpty()) { // determine which time constraints are associated with an // indexable // field final TemporalConstraints temporalConstraints = TimeUtils.getTemporalConstraintsForDescriptors( adapter.getTimeDescriptors(), timeConstraintSet); baseQuery = new ExplicitTemporalQuery(temporalConstraints); } } // if baseQuery completely represents CQLQuery expression then use that if (attrs.isEmpty() && (baseQuery != null) && baseQuery.isExact()) { return baseQuery; } else { // baseQuery is passed to CQLQuery just to extract out linear // constraints only return new ExplicitCQLQuery(baseQuery, cqlFilter, adapter, indexMapping); } } private Filter filter; public OptimalCQLQuery() {} public OptimalCQLQuery(final Filter filter) { this.filter = filter; } @Override public QueryConstraints createQueryConstraints( final InternalDataAdapter adapter, final Index index, final AdapterToIndexMapping indexMapping) { final InternalGeotoolsFeatureDataAdapter gtAdapter = IndexOptimizationUtils.unwrapGeotoolsFeatureDataAdapter(adapter); if (gtAdapter != null) { return createOptimalQuery(filter, gtAdapter, index, indexMapping); } LOGGER.error("Adapter is not a geotools feature adapter. Cannot apply CQL filter."); return null; } @Override public byte[] toBinary() { byte[] filterBytes; if (filter == null) { LOGGER.warn("CQL filter is null"); filterBytes = new byte[] {}; } else { filterBytes = StringUtils.stringToBinary(ECQL.toCQL(filter)); } return filterBytes; } @Override public void fromBinary(final byte[] bytes) { GeometryUtils.initClassLoader(); if (bytes.length > 0) { final String cql = StringUtils.stringFromBinary(bytes); try { filter = ECQL.toFilter(cql); } catch (final Exception e) { throw new IllegalArgumentException(cql, e); } } else { LOGGER.warn("CQL filter is empty bytes"); filter = null; } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/ScaledTemporalRange.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query; import java.io.Serializable; import java.util.Calendar; import java.util.Date; import java.util.TimeZone; public class ScaledTemporalRange implements Serializable { private static final long serialVersionUID = 1L; private static long MILLIS_PER_DAY = 86400000; private static long DEFAULT_TIME_RANGE = 365L * MILLIS_PER_DAY; // one year private Date startTime = null; private Date endTime = null; // Default to lat bounds private double minVal = 0.0; private double maxVal = 180.0; private long timeRange = DEFAULT_TIME_RANGE; private double timeScale; private final Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("GMT")); public ScaledTemporalRange() { updateTimeScale(); } public void setTimeRange(final Date startTime, final Date endTime) { this.startTime = startTime; this.endTime = endTime; updateTimeScale(); } public void setTimeRange(final long millis) { timeRange = millis; startTime = null; endTime = null; updateTimeScale(); } public void setValueRange(final double minVal, final double maxVal) { this.minVal = minVal; this.maxVal = maxVal; updateTimeScale(); } public void setTimeScale(final double timeScale) { this.timeScale = timeScale; } private void updateTimeScale() { timeScale = (maxVal - minVal) / getTimeRangeMillis(); } public double getTimeScale() { return timeScale; } public long getTimeRangeMillis() { if ((startTime == null) || (endTime == null)) { return timeRange; } return endTime.getTime() - startTime.getTime(); } public double timeToValue(final Date time) { final long deltaTime = time.getTime() - getTimeMin(); return minVal + (deltaTime * timeScale); } public Date valueToTime(final double timeVal) { final long timeMillis = (long) (timeVal / timeScale) + getTimeMin(); cal.setTimeInMillis(timeMillis); return cal.getTime(); } private long getTimeMin() { if (startTime != null) { return startTime.getTime(); } return 0L; } public Date getStartTime() { return startTime; } public void setStartTime(final Date startTime) { this.startTime = startTime; } public Date getEndTime() { return endTime; } public void setEndTime(final Date endTime) { this.endTime = endTime; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/SpatialQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query; import org.geotools.geometry.jts.JTS; import org.geotools.referencing.CRS; import org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.geotime.util.IndexOptimizationUtils; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.jts.geom.Geometry; import org.opengis.feature.type.GeometryDescriptor; import org.opengis.filter.Filter; import org.opengis.geometry.MismatchedDimensionException; import org.opengis.referencing.FactoryException; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.opengis.referencing.operation.MathTransform; import org.opengis.referencing.operation.TransformException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class SpatialQuery extends AbstractVectorConstraints { private static final Logger LOGGER = LoggerFactory.getLogger(SpatialQuery.class); public SpatialQuery() { super(); } public SpatialQuery(final ExplicitSpatialQuery delegateConstraints) { super(delegateConstraints); } @Override protected ExplicitSpatialQuery newConstraints() { return new ExplicitSpatialQuery(); } @Override protected boolean isSupported(final Index index, final GeotoolsFeatureDataAdapter adapter) { return IndexOptimizationUtils.hasAtLeastSpatial(index); } @Override protected Filter getFilter(final GeotoolsFeatureDataAdapter adapter, final Index index) { return getFilter(adapter, index, delegateConstraints); } protected static Filter getFilter( final GeotoolsFeatureDataAdapter adapter, final Index index, final ExplicitSpatialQuery delegateConstraints) { final GeometryDescriptor geomDesc = adapter.getFeatureType().getGeometryDescriptor(); final CoordinateReferenceSystem indexCrs = GeometryUtils.getIndexCrs(index); return GeometryUtils.geometryToSpatialOperator( transformToAdapter(indexCrs, delegateConstraints), geomDesc.getLocalName(), indexCrs); } private static Geometry transformToAdapter( final CoordinateReferenceSystem adapterCrs, final ExplicitSpatialQuery delegateConstraints) { final Geometry queryGeometry = delegateConstraints.getQueryGeometry(); if (adapterCrs == null) { return queryGeometry; } final String indexCrsStr = GeometryUtils.getCrsCode(adapterCrs); if (indexCrsStr == null) { return queryGeometry; } if (GeometryUtils.crsMatches(delegateConstraints.getCrsCode(), indexCrsStr) || (queryGeometry == null)) { return queryGeometry; } else { CoordinateReferenceSystem crs = delegateConstraints.getCrs(); if (crs == null) { String crsCode = delegateConstraints.getCrsCode(); if ((crsCode == null) || crsCode.isEmpty()) { crsCode = GeometryUtils.DEFAULT_CRS_STR; } try { crs = CRS.decode(crsCode, true); } catch (final FactoryException e) { LOGGER.warn("Unable to decode spatial query crs", e); } } try { final MathTransform transform = CRS.findMathTransform(crs, adapterCrs, true); // transform geometry return JTS.transform(queryGeometry, transform); } catch (final FactoryException e) { LOGGER.warn("Unable to create coordinate reference system transform", e); } catch (MismatchedDimensionException | TransformException e) { LOGGER.warn("Unable to transform query geometry into index CRS", e); } } return queryGeometry; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/SpatialTemporalConstraintsBuilderImpl.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query; import java.time.Instant; import java.util.Date; import org.apache.commons.lang3.ArrayUtils; import org.locationtech.geowave.core.geotime.store.query.api.SpatialTemporalConstraintsBuilder; import org.locationtech.geowave.core.geotime.store.query.filter.SpatialQueryFilter.CompareOperation; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.store.query.constraints.EverythingQuery; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.threeten.extra.Interval; public class SpatialTemporalConstraintsBuilderImpl implements SpatialTemporalConstraintsBuilder { private static final Logger LOGGER = LoggerFactory.getLogger(SpatialTemporalConstraintsBuilderImpl.class); private String crsCode; private Geometry geometry; private CompareOperation spatialCompareOp; private Interval[] timeRanges = new Interval[0]; @Override public SpatialTemporalConstraintsBuilder noSpatialConstraints() { geometry = null; crsCode = null; spatialCompareOp = null; return this; } @Override public SpatialTemporalConstraintsBuilder bboxConstraints( final double minX, final double maxX, final double minY, final double maxY) { this.geometry = GeometryUtils.GEOMETRY_FACTORY.toGeometry(new Envelope(minX, maxX, minY, maxY)); return this; } @Override public SpatialTemporalConstraintsBuilder spatialConstraints(final Geometry geometry) { this.geometry = geometry; return this; } @Override public SpatialTemporalConstraintsBuilder spatialConstraintsCrs(final String crsCode) { this.crsCode = crsCode; return this; } @Override public SpatialTemporalConstraintsBuilder spatialConstraintsCompareOperation( final CompareOperation spatialCompareOp) { this.spatialCompareOp = spatialCompareOp; return this; } @Override public SpatialTemporalConstraintsBuilder noTemporalConstraints() { timeRanges = new Interval[0]; return this; } @Override public SpatialTemporalConstraintsBuilder addTimeRange(final Date startTime, final Date endTime) { return addTimeRange( Interval.of( Instant.ofEpochMilli(startTime.getTime()), Instant.ofEpochMilli(endTime.getTime()))); } @Override public SpatialTemporalConstraintsBuilder addTimeRange(final Interval timeRange) { timeRanges = ArrayUtils.add(timeRanges, timeRange); return this; } @Override public SpatialTemporalConstraintsBuilder setTimeRanges(final Interval[] timeRanges) { if (timeRanges == null) { this.timeRanges = new Interval[0]; } this.timeRanges = timeRanges; return this; } @Override public QueryConstraints build() { if ((crsCode != null) && (geometry == null)) { LOGGER.warn( "CRS code `" + crsCode + "` cannot be applied without a geometry. Ignoring CRS."); } if ((spatialCompareOp != null) && (geometry == null)) { LOGGER.warn( "Spatial compare operator `" + spatialCompareOp.name() + "` cannot be applied without a geometry. Ignoring compare operator."); } if (geometry != null) { // its at least spatial if (timeRanges.length > 0) { // its spatial-temporal return new SpatialTemporalQuery( new ExplicitSpatialTemporalQuery(timeRanges, geometry, crsCode, spatialCompareOp)); } return new SpatialQuery(new ExplicitSpatialQuery(geometry, crsCode, spatialCompareOp)); } else if (timeRanges.length > 0) { // its temporal only return new TemporalQuery(new ExplicitTemporalQuery(timeRanges)); } return new EverythingQuery(); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/SpatialTemporalQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query; import org.geotools.factory.CommonFactoryFinder; import org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.geotime.util.IndexOptimizationUtils; import org.locationtech.geowave.core.store.api.Index; import org.opengis.filter.Filter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class SpatialTemporalQuery extends AbstractVectorConstraints { private static final Logger LOGGER = LoggerFactory.getLogger(SpatialTemporalQuery.class); public SpatialTemporalQuery() { super(); } public SpatialTemporalQuery(final ExplicitSpatialTemporalQuery delegateConstraints) { super(delegateConstraints); } @Override protected ExplicitSpatialTemporalQuery newConstraints() { return new ExplicitSpatialTemporalQuery(); } @Override protected boolean isSupported(final Index index, final GeotoolsFeatureDataAdapter adapter) { return IndexOptimizationUtils.hasTime(index, adapter) && IndexOptimizationUtils.hasAtLeastSpatial(index); } @Override protected Filter getFilter(final GeotoolsFeatureDataAdapter adapter, final Index index) { final Filter spatialFilter = SpatialQuery.getFilter(adapter, index, delegateConstraints); if (spatialFilter == null) { LOGGER.warn("Spatial filter does not apply to type '" + adapter.getTypeName() + "'"); return null; } final Filter temporalFilter = TemporalQuery.getFilter(adapter, delegateConstraints); if (temporalFilter == null) { LOGGER.warn("Temporal filter does not apply to type '" + adapter.getTypeName() + "'"); return null; } return CommonFactoryFinder.getFilterFactory2().and(spatialFilter, temporalFilter); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/TemporalConstraints.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query; import java.nio.ByteBuffer; import java.util.Collections; import java.util.Date; import java.util.LinkedList; import java.util.List; import org.locationtech.geowave.core.index.VarintUtils; public class TemporalConstraints { private LinkedList constraints = new LinkedList<>(); private String name; public static final TemporalRange FULL_RANGE = new TemporalRange(TemporalRange.START_TIME, TemporalRange.END_TIME); public TemporalConstraints() {} public String getName() { return name; } public void empty() { constraints.clear(); } public TemporalConstraints(final String name) { this.name = name; } public TemporalConstraints(final List ranges, final String name) { constraints.addAll(ranges); this.name = name; } public TemporalConstraints(final TemporalRange range, final String name) { constraints.add(range); this.name = name; } public void replaceWithIntersections(final TemporalConstraints constraints) { this.constraints = TemporalConstraints.findIntersections(this, constraints).constraints; } public void replaceWithMerged(final TemporalConstraints constraints) { this.constraints = TemporalConstraints.merge(this, constraints).constraints; } public void add(final TemporalRange range) { int pos = 0; TemporalRange nextNeighbor = null; for (final TemporalRange aRange : constraints) { nextNeighbor = aRange; if (nextNeighbor.getStartTime().after(range.getStartTime())) { break; } else if (nextNeighbor.getEndTime().after(range.getStartTime()) || nextNeighbor.getEndTime().equals(range.getStartTime())) { if (range.getEndTime().before(nextNeighbor.getEndTime())) { // subsummed return; } else { // replaced with larger range constraints.set(pos, new TemporalRange(nextNeighbor.getStartTime(), range.getEndTime())); return; } } pos++; } if ((nextNeighbor != null) && nextNeighbor.getStartTime().before(range.getEndTime())) { constraints.add( pos, new TemporalRange( range.getStartTime(), TemporalConstraints.max(nextNeighbor.getEndTime(), range.getEndTime()))); } else { constraints.add(pos, range); } } public static final Date max(final Date one, final Date two) { return one.before(two) ? two : one; } public static final Date min(final Date one, final Date two) { return one.before(two) ? one : two; } public Date getMinOr(final Date min, final int exclusivityIncrement) { return (constraints.isEmpty()) ? min : exclusivityIncrement == 0 ? constraints.getFirst().getStartTime() : new Date(constraints.getFirst().getStartTime().getTime() + exclusivityIncrement); } public Date getMaxOr(final Date max, final int exclusivityIncrement) { return (constraints.isEmpty()) ? max : exclusivityIncrement == 0 ? constraints.getLast().getEndTime() : new Date(constraints.getLast().getEndTime().getTime() + exclusivityIncrement); } public boolean isEmpty() { return constraints.isEmpty(); } public TemporalRange getEndRange() { return (constraints.isEmpty()) ? FULL_RANGE : constraints.getLast(); } public TemporalRange getStartRange() { return (constraints.isEmpty()) ? FULL_RANGE : constraints.getFirst(); } public List getRanges() { return constraints == null ? Collections.emptyList() : constraints; } public static final TemporalConstraints findIntersections( final TemporalConstraints sideL, final TemporalConstraints sideR) { if (sideL.constraints.isEmpty()) { return sideR; } if (sideR.constraints.isEmpty()) { return sideL; } final TemporalConstraints newSet = new TemporalConstraints(sideL.name); for (final TemporalRange lRange : sideL.constraints) { for (final TemporalRange rRange : sideR.constraints) { if (lRange.getEndTime().before(rRange.getStartTime()) || rRange.getEndTime().before(lRange.getStartTime())) { continue; } newSet.add( new TemporalRange( max(lRange.getStartTime(), rRange.getStartTime()), min(lRange.getEndTime(), rRange.getEndTime()))); } } return newSet; } public static final TemporalConstraints merge( final TemporalConstraints left, final TemporalConstraints right) { if (left.isEmpty()) { return right; } if (right.isEmpty()) { return left; } final TemporalConstraints newSetOfRanges = new TemporalConstraints(left.name); newSetOfRanges.constraints.addAll(left.constraints); for (final TemporalRange range : right.constraints) { newSetOfRanges.add(range); } return newSetOfRanges; } public byte[] toBinary() { int bufferSize = VarintUtils.unsignedIntByteLength(constraints.size()); for (final TemporalRange range : constraints) { bufferSize += range.getBufferSize(); } final ByteBuffer buffer = ByteBuffer.allocate(bufferSize); VarintUtils.writeUnsignedInt(constraints.size(), buffer); for (final TemporalRange range : constraints) { range.toBinary(buffer); } return buffer.array(); } public void fromBinary(final byte[] data) { final ByteBuffer buffer = ByteBuffer.wrap(data); final int s = VarintUtils.readUnsignedInt(buffer); for (int i = 0; i < s; i++) { final TemporalRange range = new TemporalRange(); range.fromBinary(buffer); add(range); } } @Override public String toString() { return "TemporalConstraints [constraints=" + constraints + "]"; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((constraints == null) ? 0 : constraints.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final TemporalConstraints other = (TemporalConstraints) obj; if (constraints == null) { if (other.constraints != null) { return false; } } else if (!constraints.equals(other.constraints)) { return false; } return true; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/TemporalConstraintsSet.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import java.util.Set; /** Constraints per each property name referenced in a query. */ public class TemporalConstraintsSet { final Map constraintsSet = new HashMap<>(); private boolean exact = true; public TemporalConstraintsSet() {} public boolean hasConstraintsForRange(final String startName, final String endName) { return constraintsSet.containsKey(startName + "_" + endName); } public void setExact(final boolean exact) { this.exact = exact; } public boolean isExact() { return exact; } public TemporalConstraints getConstraintsForRange(final String startName, final String endName) { final String rangeName = startName + "_" + endName; if (constraintsSet.containsKey(rangeName)) { return constraintsSet.get(rangeName); } else { final TemporalConstraints constraints = new TemporalConstraints(rangeName); constraintsSet.put(rangeName, constraints); return constraints; } } public TemporalConstraints getConstraintsFor(final String fieldName) { if (constraintsSet.containsKey(fieldName)) { return constraintsSet.get(fieldName); } else { final TemporalConstraints constraints = new TemporalConstraints(fieldName); constraintsSet.put(fieldName, constraints); return constraints; } } public void removeConstraints(final String... names) { for (final String name : names) { constraintsSet.remove(name); } } public void removeAllConstraintsExcept(final String... names) { final Map newConstraintsSet = new HashMap<>(); for (final String name : names) { final TemporalConstraints constraints = constraintsSet.get(name); if (constraints != null) { newConstraintsSet.put(name, constraints); } } constraintsSet.clear(); constraintsSet.putAll(newConstraintsSet); } public boolean hasConstraintsFor(final String propertyName) { return (propertyName != null) && constraintsSet.containsKey(propertyName); } public Set> getSet() { return constraintsSet.entrySet(); } public boolean isEmpty() { if (constraintsSet.isEmpty()) { return true; } boolean isEmpty = true; for (final Entry entry : getSet()) { isEmpty &= entry.getValue().isEmpty(); } return isEmpty; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/TemporalQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query; import java.util.List; import java.util.function.Function; import java.util.stream.Collectors; import org.geotools.factory.CommonFactoryFinder; import org.locationtech.geowave.core.geotime.index.api.TemporalIndexBuilder; import org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.geotime.util.IndexOptimizationUtils; import org.locationtech.geowave.core.geotime.util.TimeUtils; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.opengis.filter.Filter; public class TemporalQuery extends AbstractVectorConstraints { public TemporalQuery() { super(); } public TemporalQuery(final ExplicitTemporalQuery delegateConstraints) { super(delegateConstraints); } @Override protected ExplicitTemporalQuery newConstraints() { return new ExplicitTemporalQuery(); } @Override protected boolean isSupported(final Index index, final GeotoolsFeatureDataAdapter adapter) { return IndexOptimizationUtils.hasTime(index, adapter); } @Override protected Filter getFilter(final GeotoolsFeatureDataAdapter adapter, final Index index) { return getFilter(adapter, delegateConstraints); } protected static Filter getFilter( final GeotoolsFeatureDataAdapter adapter, final QueryConstraints delegateConstraints) { final List constraints = delegateConstraints.getIndexConstraints(new TemporalIndexBuilder().createIndex()); if (adapter.getTimeDescriptors().getTime() != null) { return constraintsToFilter( constraints, data -> TimeUtils.toDuringFilter( data.getMinValuesPerDimension()[0].longValue(), data.getMaxValuesPerDimension()[0].longValue(), adapter.getTimeDescriptors().getTime().getLocalName())); } else if ((adapter.getTimeDescriptors().getStartRange() != null) && (adapter.getTimeDescriptors().getEndRange() != null)) { return constraintsToFilter( constraints, data -> TimeUtils.toFilter( data.getMinValuesPerDimension()[0].longValue(), data.getMaxValuesPerDimension()[0].longValue(), adapter.getTimeDescriptors().getStartRange().getLocalName(), adapter.getTimeDescriptors().getEndRange().getLocalName())); } return null; } private static Filter constraintsToFilter( final List constraints, final Function dataToFilter) { if (!constraints.isEmpty()) { final List filters = constraints.stream().map(dataToFilter).collect(Collectors.toList()); if (filters.size() > 1) { return CommonFactoryFinder.getFilterFactory2().or(filters); } else { return filters.get(0); } } else { return null; } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/TemporalRange.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query; import java.nio.ByteBuffer; import java.util.Date; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.numeric.NumericData; public class TemporalRange { private Date startTime; private Date endTime; public static final Date START_TIME = new Date(0); public static final Date END_TIME = new Date(Long.MAX_VALUE); public TemporalRange() { startTime = START_TIME; endTime = END_TIME; } public TemporalRange(final Date startTime, final Date endTime) { super(); this.startTime = startTime; this.endTime = endTime; } public Date getStartTime() { return startTime; } public Date getEndTime() { return endTime; } public void setStartTime(final Date startTime) { this.startTime = startTime; } public void setEndTime(final Date endTime) { this.endTime = endTime; } public boolean isWithin(final Date time) { return (startTime.before(time) || startTime.equals(time)) && (endTime.equals(time) || endTime.after(time)); } public boolean isWithin(final NumericData timeRange) { final double st = startTime.getTime(); final double et = endTime.getTime(); final double rst = timeRange.getMin(); final double ret = timeRange.getMax(); return (((st < rst) && (et > rst)) || ((st < ret) && (et > ret)) || ((st < rst) && (et > ret))); } public TemporalRange intersect(final TemporalRange range) { final Date start = startTime.after(range.getStartTime()) ? startTime : range.getStartTime(); final Date end = endTime.before(range.getEndTime()) ? endTime : range.getEndTime(); if (start.after(end)) { return new TemporalRange(START_TIME, START_TIME); } return new TemporalRange(start, end); } public TemporalRange union(final TemporalRange range) { final Date start = startTime.before(range.getStartTime()) ? startTime : range.getStartTime(); final Date end = endTime.after(range.getEndTime()) ? endTime : range.getEndTime(); if (start.after(end)) { return new TemporalRange(START_TIME, START_TIME); } return new TemporalRange(start, end); } public void toBinary(final ByteBuffer buffer) { VarintUtils.writeTime(startTime.getTime(), buffer); VarintUtils.writeTime(endTime.getTime(), buffer); } public byte[] toBinary() { final ByteBuffer buf = ByteBuffer.allocate(getBufferSize()); toBinary(buf); return buf.array(); } public void fromBinary(final ByteBuffer buffer) { startTime = new Date(VarintUtils.readTime(buffer)); endTime = new Date(VarintUtils.readTime(buffer)); } public void fromBinary(final byte[] data) { final ByteBuffer buf = ByteBuffer.wrap(data); fromBinary(buf); } @Override public String toString() { return "TemporalRange [startTime=" + startTime + ", endTime=" + endTime + "]"; } protected final int getBufferSize() { return VarintUtils.timeByteLength(startTime.getTime()) + VarintUtils.timeByteLength(endTime.getTime()); } public boolean isInfinity() { return (startTime.getTime() == 0) && (endTime.getTime() == END_TIME.getTime()); } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((endTime == null) ? 0 : endTime.hashCode()); result = (prime * result) + ((startTime == null) ? 0 : startTime.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final TemporalRange other = (TemporalRange) obj; if (endTime == null) { if (other.endTime != null) { return false; } } else if (!endTime.equals(other.endTime)) { return false; } if (startTime == null) { if (other.startTime != null) { return false; } } else if (!startTime.equals(other.startTime)) { return false; } return true; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/VectorQueryBuilderImpl.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query; import org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder; import org.locationtech.geowave.core.store.query.QueryBuilderImpl; import org.opengis.feature.simple.SimpleFeature; public class VectorQueryBuilderImpl extends QueryBuilderImpl implements VectorQueryBuilder { } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/VectorQueryConstraintsFactoryImpl.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query; import org.geotools.filter.text.cql2.CQLException; import org.geotools.filter.text.ecql.ECQL; import org.locationtech.geowave.core.geotime.store.query.api.SpatialTemporalConstraintsBuilder; import org.locationtech.geowave.core.geotime.store.query.api.VectorQueryConstraintsFactory; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.geowave.core.store.query.constraints.QueryConstraintsFactoryImpl; import org.opengis.filter.Filter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class VectorQueryConstraintsFactoryImpl extends QueryConstraintsFactoryImpl implements VectorQueryConstraintsFactory { private static final Logger LOGGER = LoggerFactory.getLogger(OptimalCQLQuery.class); public static final VectorQueryConstraintsFactoryImpl SINGLETON_INSTANCE = new VectorQueryConstraintsFactoryImpl(); @Override public SpatialTemporalConstraintsBuilder spatialTemporalConstraints() { return new SpatialTemporalConstraintsBuilderImpl(); } // these cql expressions should always attempt to use // CQLQuery.createOptimalQuery() which requires adapter and index @Override public QueryConstraints cqlConstraints(final String cqlExpression) { GeometryUtils.initClassLoader(); try { final Filter cqlFilter = ECQL.toFilter(cqlExpression); return new OptimalCQLQuery(cqlFilter); } catch (final CQLException e) { LOGGER.error("Unable to parse CQL expresion", e); } return null; } @Override public QueryConstraints filterConstraints(final Filter filter) { return new OptimalCQLQuery(filter); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/aggregate/BaseOptimalVectorAggregation.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.aggregate; import org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.query.aggregate.AdapterAndIndexBasedAggregation; import org.locationtech.geowave.core.store.query.aggregate.FieldNameParam; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public abstract class BaseOptimalVectorAggregation

implements AdapterAndIndexBasedAggregation { private static final Logger LOGGER = LoggerFactory.getLogger(BaseOptimalVectorAggregation.class); protected FieldNameParam fieldNameParam; public BaseOptimalVectorAggregation() {} public BaseOptimalVectorAggregation(final FieldNameParam fieldNameParam) { this.fieldNameParam = fieldNameParam; } @Override public P getParameters() { return (P) fieldNameParam; } @Override public void setParameters(final P parameters) { if (parameters instanceof FieldNameParam) { fieldNameParam = (FieldNameParam) parameters; } } @Override public Aggregation createAggregation( final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index) { GeotoolsFeatureDataAdapter gtAdapter; if (adapter instanceof GeotoolsFeatureDataAdapter) { gtAdapter = (GeotoolsFeatureDataAdapter) adapter; } else if ((adapter instanceof InternalDataAdapter) && (((InternalDataAdapter) adapter).getAdapter() instanceof GeotoolsFeatureDataAdapter)) { gtAdapter = (GeotoolsFeatureDataAdapter) ((InternalDataAdapter) adapter).getAdapter(); } else { LOGGER.error( "Unable to perform aggregation on non-geotools feature adapter '" + adapter.getTypeName() + "'"); return null; } if ((fieldNameParam == null) || isCommonIndex(index, gtAdapter)) { return createCommonIndexAggregation(); } return createAggregation(); } protected abstract boolean isCommonIndex(Index index, GeotoolsFeatureDataAdapter adapter); protected abstract Aggregation createCommonIndexAggregation(); protected abstract Aggregation createAggregation(); } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/aggregate/BoundingBoxAggregation.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.aggregate; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.jts.geom.Envelope; public abstract class BoundingBoxAggregation

implements Aggregation { protected double minX = Double.MAX_VALUE; protected double minY = Double.MAX_VALUE; protected double maxX = -Double.MAX_VALUE; protected double maxY = -Double.MAX_VALUE; @Override public P getParameters() { return null; } @Override public void setParameters(final P parameters) {} public boolean isSet() { if (minX > maxX || minY > maxY) { return false; } return true; } @Override public Envelope getResult() { if (!isSet()) { return new Envelope(); } return new Envelope(minX, maxX, minY, maxY); } @Override public Envelope merge(final Envelope result1, final Envelope result2) { if (result1.isNull()) { return result2; } else if (result2.isNull()) { return result1; } final double minX = Math.min(result1.getMinX(), result2.getMinX()); final double minY = Math.min(result1.getMinY(), result2.getMinY()); final double maxX = Math.max(result1.getMaxX(), result2.getMaxX()); final double maxY = Math.max(result1.getMaxY(), result2.getMaxY()); return new Envelope(minX, maxX, minY, maxY); } @Override public byte[] resultToBinary(final Envelope result) { final ByteBuffer buffer = ByteBuffer.allocate(Double.BYTES * 4); buffer.putDouble(minX); buffer.putDouble(minY); buffer.putDouble(maxX); buffer.putDouble(maxY); return buffer.array(); } @Override public Envelope resultFromBinary(final byte[] binary) { final ByteBuffer buffer = ByteBuffer.wrap(binary); final double minX = buffer.getDouble(); final double minY = buffer.getDouble(); final double maxX = buffer.getDouble(); final double maxY = buffer.getDouble(); if (minX > maxX || minY > maxY) { // The Envelope implementation will swap min and max if min is greater than max, use a null // Envelope in this case to avoid an invalid result. return new Envelope(); } return new Envelope(minX, maxX, minY, maxY); } @Override public void clearResult() { minX = Double.MAX_VALUE; minY = Double.MAX_VALUE; maxX = -Double.MAX_VALUE; maxY = -Double.MAX_VALUE; } @Override public void aggregate(final DataTypeAdapter adapter, final T entry) { final Envelope env = getEnvelope(adapter, entry); aggregate(env); } protected void aggregate(final Envelope env) { if ((env != null) && !env.isNull()) { minX = Math.min(minX, env.getMinX()); minY = Math.min(minY, env.getMinY()); maxX = Math.max(maxX, env.getMaxX()); maxY = Math.max(maxY, env.getMaxY()); } } protected abstract Envelope getEnvelope(final DataTypeAdapter adapter, final T entry); } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/aggregate/CommonIndexBoundingBoxAggregation.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.aggregate; import org.locationtech.geowave.core.geotime.store.dimension.SpatialField; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding; import org.locationtech.geowave.core.store.query.aggregate.CommonIndexAggregation; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; public class CommonIndexBoundingBoxAggregation

extends BoundingBoxAggregation implements CommonIndexAggregation { @Override protected Envelope getEnvelope( final DataTypeAdapter adapter, final CommonIndexedPersistenceEncoding entry) { final Object v = entry.getCommonData().getValue(SpatialField.DEFAULT_GEOMETRY_FIELD_NAME); if ((v != null) && (v instanceof Geometry)) { return ((Geometry) v).getEnvelopeInternal(); } return null; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/aggregate/CommonIndexTimeRangeAggregation.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.aggregate; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding; import org.locationtech.geowave.core.store.query.aggregate.CommonIndexAggregation; import org.threeten.extra.Interval; public class CommonIndexTimeRangeAggregation

extends TimeRangeAggregation implements CommonIndexAggregation { @Override protected Interval getInterval(final CommonIndexedPersistenceEncoding entry) { return null; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/aggregate/OptimalVectorBoundingBoxAggregation.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.aggregate; import org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.geotime.util.IndexOptimizationUtils; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.query.aggregate.FieldNameParam; import org.locationtech.jts.geom.Envelope; public class OptimalVectorBoundingBoxAggregation

extends BaseOptimalVectorAggregation { public OptimalVectorBoundingBoxAggregation() {} public OptimalVectorBoundingBoxAggregation(final FieldNameParam fieldNameParam) { super(fieldNameParam); } @Override protected boolean isCommonIndex(final Index index, final GeotoolsFeatureDataAdapter adapter) { return fieldNameParam.getFieldName().equals( adapter.getFeatureType().getGeometryDescriptor().getLocalName()) && IndexOptimizationUtils.hasAtLeastSpatial(index); } @Override protected Aggregation createCommonIndexAggregation() { return (Aggregation) new CommonIndexBoundingBoxAggregation

(); } @Override protected Aggregation createAggregation() { return (Aggregation) new VectorBoundingBoxAggregation(fieldNameParam); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/aggregate/OptimalVectorTimeRangeAggregation.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.aggregate; import org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.geotime.util.IndexOptimizationUtils; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.query.aggregate.FieldNameParam; import org.threeten.extra.Interval; public class OptimalVectorTimeRangeAggregation

extends BaseOptimalVectorAggregation { public OptimalVectorTimeRangeAggregation() {} public OptimalVectorTimeRangeAggregation(final FieldNameParam fieldNameParam) { super(fieldNameParam); } @Override protected boolean isCommonIndex(final Index index, final GeotoolsFeatureDataAdapter adapter) { // because field name param doesn't allow for multiple, ranges cannot be // set, field name param can be null in which case it can use a range, // or if field name is non-nul it must use a timestamp return ((fieldNameParam == null) || ((adapter.getTimeDescriptors().getTime() != null) && fieldNameParam.getFieldName().equals( adapter.getTimeDescriptors().getTime().getLocalName()))) && IndexOptimizationUtils.hasTime(index, adapter); } @Override protected Aggregation createCommonIndexAggregation() { return (Aggregation) new CommonIndexTimeRangeAggregation

(); } @Override protected Aggregation createAggregation() { return (Aggregation) new VectorTimeRangeAggregation(fieldNameParam); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/aggregate/SpatialBinningStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.aggregate; import java.nio.ByteBuffer; import org.locationtech.geowave.core.geotime.binning.ComplexGeometryBinningOption; import org.locationtech.geowave.core.geotime.binning.SpatialBinningType; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.api.BinningStrategy; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.Point; /** * This strategy uses a spatial binning type (such as H3, S2, GeoHash) on geometry fields to bin * data. * * @param The type of the entry. The geometry inside of it is queried, and the geohash of that * geometry is used as the bin. */ public abstract class SpatialBinningStrategy implements BinningStrategy { protected String geometryFieldName; /** * The precision/resolution/length used by the binning strategy (it usually is equivalent to * character length). */ protected int precision; protected ComplexGeometryBinningOption complexGeometryBinning; protected SpatialBinningType type; public SpatialBinningStrategy() {} /** * Use the given precision to bin objects * * @param type The type (such as S3, H2, or GeoHash) * @param precision The Geohash precision to calculate bins. * @param useCentroidOnly for complex geometry such as lines and polygons whether to just * aggregate one hash value based on the centroid or to apply the aggregation to all * overlapping centroids * @param geometryFieldName the field name for the geometry to bin by */ public SpatialBinningStrategy( final SpatialBinningType type, final int precision, final boolean useCentroidOnly, final String geometryFieldName) { this.type = type; this.precision = precision; // for now scaling by weight isn't wired into aggregations so don't expose that option through // the constructor yet, although at some point it would make some sense to add it this.complexGeometryBinning = useCentroidOnly ? ComplexGeometryBinningOption.USE_CENTROID_ONLY : ComplexGeometryBinningOption.USE_FULL_GEOMETRY; this.geometryFieldName = geometryFieldName; } /** * Extract the geometry from the entry. * * @param entry The entry that will be binned using this strategy. * @return The geometry object in the entry, or null if no geometry is found. */ abstract Geometry getGeometry(final DataTypeAdapter adapter, T entry); /** * @return The precision that is used when calculating bins for entries. */ public int getPrecision() { return precision; } /** * calculates appropriate bins for a given entry. GeohashBinningStrategy only ever bins into * singleton-arrays. * * @param entry An entry to bin, utilizing its' geohash. * @return a length-1 array of the bin that this entry can be placed into. `null` if no Geometry * was found in the entry. */ @Override public ByteArray[] getBins( final DataTypeAdapter adapter, final I entry, final GeoWaveRow... rows) { final Geometry geometry = getGeometry((DataTypeAdapter) adapter, (T) entry); if (geometry == null) { return null; } if (ComplexGeometryBinningOption.USE_CENTROID_ONLY.equals(complexGeometryBinning)) { final Point centroid = geometry.getCentroid(); return type.getSpatialBins(centroid, precision); } return type.getSpatialBins(geometry, precision); } @Override public byte[] toBinary() { final byte[] fieldNameBytes = geometryFieldName == null ? new byte[0] : StringUtils.stringToBinary(geometryFieldName); final ByteBuffer buf = ByteBuffer.allocate( fieldNameBytes.length + +VarintUtils.unsignedIntByteLength(fieldNameBytes.length) + VarintUtils.unsignedIntByteLength(type.ordinal()) + VarintUtils.unsignedIntByteLength(precision) + VarintUtils.unsignedIntByteLength(complexGeometryBinning.ordinal())); VarintUtils.writeUnsignedInt(type.ordinal(), buf); VarintUtils.writeUnsignedInt(precision, buf); VarintUtils.writeUnsignedInt(complexGeometryBinning.ordinal(), buf); VarintUtils.writeUnsignedInt(fieldNameBytes.length, buf); buf.put(fieldNameBytes); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); this.type = SpatialBinningType.values()[VarintUtils.readUnsignedInt(buf)]; this.precision = VarintUtils.readUnsignedInt(buf); this.complexGeometryBinning = ComplexGeometryBinningOption.values()[VarintUtils.readUnsignedInt(buf)]; final byte[] fieldNameBytes = new byte[VarintUtils.readUnsignedInt(buf)]; if (fieldNameBytes.length > 0) { buf.get(fieldNameBytes); this.geometryFieldName = StringUtils.stringFromBinary(fieldNameBytes); } else { this.geometryFieldName = null; } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/aggregate/SpatialCommonIndexedBinningStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.aggregate; import java.nio.ByteBuffer; import org.locationtech.geowave.core.geotime.binning.SpatialBinningType; import org.locationtech.geowave.core.geotime.store.dimension.SpatialField; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding; import org.locationtech.geowave.core.store.data.PersistentDataset; import org.locationtech.jts.geom.Geometry; /** * A GeohashBinningStrategy that bins CommonIndexedPersistenceEncoding values. * * @see SpatialBinningStrategy */ public class SpatialCommonIndexedBinningStrategy extends SpatialBinningStrategy { /** * Create a binning strategy using a small number of bins. Usage of this method is not * recommended, if you are to use this, it should be through serialization. */ public SpatialCommonIndexedBinningStrategy() { this(SpatialBinningType.S2, 3, true); } public SpatialCommonIndexedBinningStrategy( final SpatialBinningType type, final int precision, final boolean useCentroidOnly) { this(type, precision, useCentroidOnly, SpatialField.DEFAULT_GEOMETRY_FIELD_NAME); } /** * @param type S2, H3, or GeoHash * @param precision the resolution/length of the hash * @param useCentroidOnly desired behavior for complex geometry such as lines and polygons whether * to just aggregate one hash value based on the centroid or to apply the aggregation to * all overlapping centroids * @param geometryFieldName The field name of the geometry used in a given * CommonIndexedPersistenceEncoding entry. For more documentation on this behavior, see * {@link SpatialBinningStrategy#GeohashBinningStrategy(int) new * GeohashBinningStrategy(int)}. */ public SpatialCommonIndexedBinningStrategy( final SpatialBinningType type, final int precision, final boolean useCentroidOnly, final String geometryFieldName) { super(type, precision, useCentroidOnly, geometryFieldName); } @Override public Geometry getGeometry( final DataTypeAdapter adapter, final CommonIndexedPersistenceEncoding entry) { final PersistentDataset data = entry.getCommonData(); final Object geometryValue = data.getValue(geometryFieldName); if (geometryValue instanceof Geometry) { return ((Geometry) geometryValue); } else { return null; } } @Override public byte[] toBinary() { final byte[] fieldName = geometryFieldName.getBytes(StringUtils.getGeoWaveCharset()); return ByteBuffer.allocate(4 + 4 + fieldName.length).putInt(getPrecision()).putInt( fieldName.length).put(fieldName).array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer bb = ByteBuffer.wrap(bytes); precision = bb.getInt(); final int fieldLen = bb.getInt(); final byte[] fieldBytes = new byte[fieldLen]; bb.get(fieldBytes); geometryFieldName = new String(fieldBytes, StringUtils.getGeoWaveCharset()); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/aggregate/SpatialFieldBinningStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.aggregate; import org.locationtech.geowave.core.geotime.binning.SpatialBinningType; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.jts.geom.Geometry; public class SpatialFieldBinningStrategy extends SpatialBinningStrategy { /** * Create a binning strategy using a small number of bins. Usage of this method is not * recommended, if you are to use this, it should be through serialization. */ public SpatialFieldBinningStrategy() { this(SpatialBinningType.S2, 3, true, null); } /** * @param type S2, H3, or GeoHash * @param precision the resolution/length of the hash * @param useCentroidOnly desired behavior for complex geometry such as lines and polygons whether * to just aggregate one hash value based on the centroid or to apply the aggregation to * all overlapping centroids * @param geometryFieldName the geometry field to bin on */ public SpatialFieldBinningStrategy( final SpatialBinningType type, final int precision, final boolean useCentroidOnly, final String geometryFieldName) { super(type, precision, useCentroidOnly, geometryFieldName); } @Override Geometry getGeometry(DataTypeAdapter adapter, T entry) { final Object obj = adapter.getFieldValue(entry, geometryFieldName); if (obj != null && obj instanceof Geometry) { return (Geometry) obj; } return null; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/aggregate/SpatialSimpleFeatureBinningStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.aggregate; import org.locationtech.geowave.core.geotime.binning.SpatialBinningType; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.jts.geom.Geometry; import org.opengis.feature.simple.SimpleFeature; /** * A GeohashBinningStrategy that bins SimpleFeature values. * * @see SpatialBinningStrategy */ public class SpatialSimpleFeatureBinningStrategy extends SpatialBinningStrategy { /** * Create a binning strategy using a small number of bins. Usage of this method is not * recommended, if you are to use this, it should be through serialization. */ public SpatialSimpleFeatureBinningStrategy() { this(SpatialBinningType.S2, 3, true); } /** * @param type S2, H3, or GeoHash * @param precision the resolution/length of the hash * @param useCentroidOnly desired behavior for complex geometry such as lines and polygons whether * to just aggregate one hash value based on the centroid or to apply the aggregation to * all overlapping centroids */ public SpatialSimpleFeatureBinningStrategy( final SpatialBinningType type, final int precision, final boolean useCentroidOnly) { super(type, precision, useCentroidOnly, null); } @Override public Geometry getGeometry( final DataTypeAdapter adapter, final SimpleFeature entry) { return (Geometry) entry.getDefaultGeometry(); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/aggregate/TimeRangeAggregation.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.aggregate; import java.nio.ByteBuffer; import java.time.Instant; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.threeten.extra.Interval; public abstract class TimeRangeAggregation

implements Aggregation { protected long min = Long.MAX_VALUE; protected long max = Long.MIN_VALUE; @Override public P getParameters() { return null; } @Override public void setParameters(final P parameters) {} public boolean isSet() { if ((min == Long.MAX_VALUE) || (max == Long.MIN_VALUE)) { return false; } return true; } @Override public Interval getResult() { if (!isSet()) { return null; } return Interval.of(Instant.ofEpochMilli(min), Instant.ofEpochMilli(max)); } @Override public Interval merge(final Interval result1, final Interval result2) { if (result1 == null) { return result2; } else if (result2 == null) { return result1; } final long min = Math.min(result1.getStart().toEpochMilli(), result1.getEnd().toEpochMilli()); final long max = Math.max(result2.getStart().toEpochMilli(), result2.getEnd().toEpochMilli()); return Interval.of(Instant.ofEpochMilli(min), Instant.ofEpochMilli(max)); } @Override public byte[] resultToBinary(final Interval result) { long start = Long.MAX_VALUE; long end = Long.MIN_VALUE; if (result != null) { start = result.getStart().toEpochMilli(); end = result.getEnd().toEpochMilli(); } final ByteBuffer buffer = ByteBuffer.allocate(VarintUtils.timeByteLength(start) + VarintUtils.timeByteLength(end)); VarintUtils.writeTime(start, buffer); VarintUtils.writeTime(end, buffer); return buffer.array(); } @Override public Interval resultFromBinary(final byte[] binary) { final ByteBuffer buffer = ByteBuffer.wrap(binary); final long minTime = VarintUtils.readTime(buffer); final long maxTime = VarintUtils.readTime(buffer); if ((min == Long.MAX_VALUE) || (max == Long.MIN_VALUE)) { return null; } return Interval.of(Instant.ofEpochMilli(minTime), Instant.ofEpochMilli(maxTime)); } @Override public void clearResult() { min = Long.MAX_VALUE; max = Long.MIN_VALUE; } @Override public void aggregate(final DataTypeAdapter adapter, final T entry) { final Interval env = getInterval(entry); aggregate(env); } protected void aggregate(final Interval interval) { if (interval != null) { min = Math.min(min, interval.getStart().toEpochMilli()); max = Math.max(max, interval.getEnd().toEpochMilli()); } } protected abstract Interval getInterval(final T entry); } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/aggregate/VectorAggregationQueryBuilderImpl.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.aggregate; import org.locationtech.geowave.core.geotime.store.query.api.VectorAggregationQueryBuilder; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.query.aggregate.AggregationQueryBuilderImpl; import org.locationtech.geowave.core.store.query.aggregate.FieldNameParam; import org.locationtech.geowave.core.store.query.options.AggregateTypeQueryOptions; import org.opengis.feature.simple.SimpleFeature; public class VectorAggregationQueryBuilderImpl

extends AggregationQueryBuilderImpl> implements VectorAggregationQueryBuilder { @Override public VectorAggregationQueryBuilder bboxOfResults(final String... typeNames) { options = new AggregateTypeQueryOptions(new OptimalVectorBoundingBoxAggregation(), typeNames); return this; } @Override public VectorAggregationQueryBuilder bboxOfResultsForGeometryField( final String typeName, final String geomFieldName) { options = new AggregateTypeQueryOptions( new OptimalVectorBoundingBoxAggregation<>(new FieldNameParam(geomFieldName)), typeName); return this; } @Override public VectorAggregationQueryBuilder timeRangeOfResults(final String... typeNames) { options = new AggregateTypeQueryOptions(new VectorTimeRangeAggregation(), typeNames); return this; } @Override public VectorAggregationQueryBuilder timeRangeOfResultsForTimeField( final String typeName, final String timeFieldName) { options = new AggregateTypeQueryOptions( new VectorTimeRangeAggregation(new FieldNameParam(timeFieldName)), typeName); return this; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/aggregate/VectorBoundingBoxAggregation.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.aggregate; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.query.aggregate.FieldNameParam; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; public class VectorBoundingBoxAggregation extends BoundingBoxAggregation { private FieldNameParam fieldNameParam; private String spatialField = null; public VectorBoundingBoxAggregation() { this(null); } public VectorBoundingBoxAggregation(final FieldNameParam fieldNameParam) { super(); this.fieldNameParam = fieldNameParam; } @Override public FieldNameParam getParameters() { return fieldNameParam; } @Override public void setParameters(final FieldNameParam fieldNameParam) { this.fieldNameParam = fieldNameParam; } @Override protected Envelope getEnvelope(final DataTypeAdapter adapter, final T entry) { if ((fieldNameParam != null) && !fieldNameParam.isEmpty()) { final Object o = adapter.getFieldValue(entry, fieldNameParam.getFieldName()); if (o instanceof Geometry) { final Geometry geometry = (Geometry) o; return geometry.getEnvelopeInternal(); } } else { if (spatialField == null) { for (final FieldDescriptor descriptor : adapter.getFieldDescriptors()) { if (Geometry.class.isAssignableFrom(descriptor.bindingClass())) { spatialField = descriptor.fieldName(); break; } } } if (spatialField != null) { return ((Geometry) adapter.getFieldValue(entry, spatialField)).getEnvelopeInternal(); } } return null; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/aggregate/VectorTimeRangeAggregation.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.aggregate; import java.time.Instant; import java.util.HashMap; import java.util.Map; import org.locationtech.geowave.core.geotime.util.TimeDescriptors; import org.locationtech.geowave.core.geotime.util.TimeUtils; import org.locationtech.geowave.core.store.query.aggregate.FieldNameParam; import org.opengis.feature.simple.SimpleFeature; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.threeten.extra.Interval; public class VectorTimeRangeAggregation extends TimeRangeAggregation { private static final Logger LOGGER = LoggerFactory.getLogger(VectorTimeRangeAggregation.class); private FieldNameParam fieldNameParam; private final Map descMap = new HashMap<>(); public VectorTimeRangeAggregation() { this(null); } public VectorTimeRangeAggregation(final FieldNameParam fieldNameParam) { super(); this.fieldNameParam = fieldNameParam; } @Override public FieldNameParam getParameters() { return fieldNameParam; } @Override public void setParameters(final FieldNameParam fieldNameParam) { this.fieldNameParam = fieldNameParam; } @Override protected Interval getInterval(final SimpleFeature entry) { if ((fieldNameParam != null) && !fieldNameParam.isEmpty()) { return TimeUtils.getInterval(entry, fieldNameParam.getFieldName()); } final String type = entry.getType().getName().getLocalPart(); TimeDescriptors desc = descMap.get(type); if (desc == null) { desc = TimeUtils.inferTimeAttributeDescriptor(entry.getFeatureType()); descMap.put(type, desc); } if ((desc.getStartRange() != null) && (desc.getEndRange() != null)) { final Object start = entry.getAttribute(desc.getStartRange().getName()); final Object end = entry.getAttribute(desc.getStartRange().getName()); if ((start == null) || (end == null)) { LOGGER.warn("start or end value is null, ignoring feature"); return null; } // TODO we may want to sanity check that start is less than end? return Interval.of( Instant.ofEpochMilli(TimeUtils.getTimeMillis(start)), Instant.ofEpochMilli(TimeUtils.getTimeMillis(end))); } else if (desc.getTime() != null) { final Object time = entry.getAttribute(desc.getTime().getName()); if ((time == null)) { LOGGER.warn("time attribute value is null, ignoring feature"); return null; } final Instant instant = Instant.ofEpochMilli(TimeUtils.getTimeMillis(time)); return Interval.of(instant, instant); } LOGGER.error( "time field not found for type '" + entry.getFeatureType().getTypeName() + "'. Consider explicitly setting field name."); return null; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/api/SpatialTemporalConstraintsBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.api; import java.util.Date; import org.locationtech.geowave.core.geotime.store.query.filter.SpatialQueryFilter.CompareOperation; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.jts.geom.Geometry; import org.threeten.extra.Interval; /** This is a builder for creating purely spatiotemporal query constraints */ public interface SpatialTemporalConstraintsBuilder { /** * clear any spatial constraints * * @return this builder */ SpatialTemporalConstraintsBuilder noSpatialConstraints(); /** * Set a bounding box as a spatial constraint * * @param minX the minimum x value * @param maxX the maximum x value * @param minY the minimum y value * @param maxY the maximum y value * @return */ SpatialTemporalConstraintsBuilder bboxConstraints( double minX, double maxX, double minY, double maxY); /** * set a geometry as a spatial constraint * * @param geometry the geometry * @return this builder */ SpatialTemporalConstraintsBuilder spatialConstraints(Geometry geometry); /** * set a Coordinate Reference System code to use associated with this builder's geometry. If no * geometry is set, this is inconsequential. * * @param crsCode the CRS code * @return this builder */ SpatialTemporalConstraintsBuilder spatialConstraintsCrs(String crsCode); /** * set a relational operation when comparing geometries to be uses with this builder's geometry. * If no geometry is set, this is inconsequential. * * @param spatialCompareOp the compare operation * @return this builder */ SpatialTemporalConstraintsBuilder spatialConstraintsCompareOperation( CompareOperation spatialCompareOp); /** * clear any temporal constraints * * @return this builder */ SpatialTemporalConstraintsBuilder noTemporalConstraints(); /** * add a time range * * @param startTime the start of the range (inclusive) * @param endTime the end of the range (exclusive) * @return this builder */ SpatialTemporalConstraintsBuilder addTimeRange(Date startTime, Date endTime); /** * add a time range as an interval * * @param timeRange the time range * @return this builder */ SpatialTemporalConstraintsBuilder addTimeRange(Interval timeRange); /** * set the time ranges to this array of intervals * * @param timeRanges the time ranges * @return this builder */ SpatialTemporalConstraintsBuilder setTimeRanges(Interval[] timeRanges); /** * build a query constraints that represents the spatiotemporal constraints of this builder * * @return the constraints */ QueryConstraints build(); } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/api/VectorAggregationQueryBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.api; import org.locationtech.geowave.core.geotime.store.query.BaseVectorQueryBuilder; import org.locationtech.geowave.core.geotime.store.query.VectorQueryConstraintsFactoryImpl; import org.locationtech.geowave.core.geotime.store.query.aggregate.VectorAggregationQueryBuilderImpl; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.api.AggregationQuery; import org.locationtech.geowave.core.store.api.AggregationQueryBuilder; import org.opengis.feature.simple.SimpleFeature; /** * An aggregation query builder particular for vector data. This should be preferentially used to * build AggregationQuery's for SimpleFeature data. * * @param

the type for input parameters * @param the result type */ public interface VectorAggregationQueryBuilder

extends AggregationQueryBuilder>, BaseVectorQueryBuilder, VectorAggregationQueryBuilder> { @Override default VectorQueryConstraintsFactory constraintsFactory() { return VectorQueryConstraintsFactoryImpl.SINGLETON_INSTANCE; } /** * get a default implementation of this builder * * @return the builder */ static

VectorAggregationQueryBuilder newBuilder() { return new VectorAggregationQueryBuilderImpl<>(); } /** * Convenience method for getting a bounding box of the results of a query. It uses the default * geometry for a feature type which is also the indexed geometry. * * @param typeNames the type names to constrain by * @return this builder */ VectorAggregationQueryBuilder bboxOfResults(String... typeNames); /** * Convenience method for getting a bounding box of the results of a query. This can be * particularly useful if you want to calculate the bbox on a different field than the * default/indexed Geometry. * * @param typeName the type name * @param geomAttributeName the geometry attribute name * @return this builder */ VectorAggregationQueryBuilder bboxOfResultsForGeometryField( String typeName, String geomAttributeName); /** * Convenience method for getting a time range of the results of a query. This has to use inferred * or hinted temporal attribute names. * * @param typeNames the type names to constrain by * @return this builder */ VectorAggregationQueryBuilder timeRangeOfResults(String... typeNames); /** * Convenience method for getting a time range of the results of a query. This can be particularly * useful if you want to calculate the time range on a specific time field. * * @param typeName the type names to constrain by * @param timeAttributeName the time attribute name * @return this builder */ VectorAggregationQueryBuilder timeRangeOfResultsForTimeField( String typeName, String timeAttributeName); } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/api/VectorQueryBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.api; import org.locationtech.geowave.core.geotime.store.query.BaseVectorQueryBuilder; import org.locationtech.geowave.core.geotime.store.query.VectorQueryBuilderImpl; import org.locationtech.geowave.core.geotime.store.query.VectorQueryConstraintsFactoryImpl; import org.locationtech.geowave.core.store.api.Query; import org.locationtech.geowave.core.store.api.QueryBuilder; import org.opengis.feature.simple.SimpleFeature; /** * A QueryBuilder for vector (SimpleFeature) data. This should be preferred as the mechanism for * constructing a query in all cases when working with SimpleFeature data. */ public interface VectorQueryBuilder extends QueryBuilder, BaseVectorQueryBuilder, VectorQueryBuilder> { static VectorQueryBuilder newBuilder() { return new VectorQueryBuilderImpl(); } @Override default VectorQueryConstraintsFactory constraintsFactory() { return VectorQueryConstraintsFactoryImpl.SINGLETON_INSTANCE; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/api/VectorQueryConstraintsFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.api; import org.locationtech.geowave.core.store.api.QueryConstraintsFactory; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.opengis.filter.Filter; /** A constraints factory for building constraints for SimpleFeature data. */ public interface VectorQueryConstraintsFactory extends QueryConstraintsFactory { /** * get a builder for spatiotemporal constraints * * @return the builder */ SpatialTemporalConstraintsBuilder spatialTemporalConstraints(); /** * create query constraints representing an OGC filter on vector data * * @param filter the OGC filter * @return the query constraints */ QueryConstraints filterConstraints(final Filter filter); /** * create query constraints representing this CQL expression (see Geoserver's syntax guide: * https://docs.geoserver.org/latest/en/user/filter/ecql_reference.html) * * @param cqlExpression the CQL expression * @return this builder */ QueryConstraints cqlConstraints(final String cqlExpression); } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/CQLQueryFilter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter; import java.nio.ByteBuffer; import org.geotools.filter.text.ecql.ECQL; import org.locationtech.geowave.core.geotime.store.InternalGeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.geotime.util.FilterToCQLTool; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.adapter.AbstractAdapterPersistenceEncoding; import org.locationtech.geowave.core.store.adapter.IndexedAdapterPersistenceEncoding; import org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding; import org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset; import org.locationtech.geowave.core.store.data.PersistentDataset; import org.locationtech.geowave.core.store.index.CommonIndexModel; import org.locationtech.geowave.core.store.index.IndexImpl; import org.locationtech.geowave.core.store.query.filter.QueryFilter; import org.opengis.feature.simple.SimpleFeature; import org.opengis.filter.Filter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class CQLQueryFilter implements QueryFilter { private static final Logger LOGGER = LoggerFactory.getLogger(CQLQueryFilter.class); private InternalGeotoolsFeatureDataAdapter adapter; private AdapterToIndexMapping indexMapping; private Filter filter; public CQLQueryFilter() { super(); } public CQLQueryFilter( final Filter filter, final InternalGeotoolsFeatureDataAdapter adapter, final AdapterToIndexMapping indexMapping) { this.filter = FilterToCQLTool.fixDWithin(filter); this.adapter = adapter; this.indexMapping = indexMapping; } public String getTypeName() { return adapter.getTypeName(); } @Override public boolean accept( final CommonIndexModel indexModel, final IndexedPersistenceEncoding persistenceEncoding) { if ((filter != null) && (indexModel != null) && (adapter != null)) { final PersistentDataset adapterExtendedValues = new MultiFieldPersistentDataset<>(); if (persistenceEncoding instanceof AbstractAdapterPersistenceEncoding) { ((AbstractAdapterPersistenceEncoding) persistenceEncoding).convertUnknownValues( adapter, indexModel); final PersistentDataset existingExtValues = ((AbstractAdapterPersistenceEncoding) persistenceEncoding).getAdapterExtendedData(); if (persistenceEncoding.isAsync()) { return false; } if (existingExtValues != null) { adapterExtendedValues.addValues(existingExtValues.getValues()); } } final IndexedAdapterPersistenceEncoding encoding = new IndexedAdapterPersistenceEncoding( persistenceEncoding.getInternalAdapterId(), persistenceEncoding.getDataId(), persistenceEncoding.getInsertionPartitionKey(), persistenceEncoding.getInsertionSortKey(), persistenceEncoding.getDuplicateCount(), persistenceEncoding.getCommonData(), new MultiFieldPersistentDataset(), adapterExtendedValues); final SimpleFeature feature = (SimpleFeature) adapter.decode( encoding, indexMapping, new IndexImpl( null, // because we // know the // feature data // adapter doesn't use the numeric // index // strategy and only the common // index // model to decode the simple // feature, // we pass along a null strategy to // eliminate the necessity to send a // serialization of the strategy in // the // options of this iterator indexModel)); if (feature == null) { return false; } return filter.evaluate(feature); } return true; } @Override public byte[] toBinary() { byte[] filterBytes; if (filter == null) { LOGGER.warn("CQL filter is null"); filterBytes = new byte[] {}; } else { filterBytes = StringUtils.stringToBinary(ECQL.toCQL(filter)); } byte[] adapterBytes; if (adapter != null) { adapterBytes = PersistenceUtils.toBinary(adapter); } else { LOGGER.warn("Feature Data Adapter is null"); adapterBytes = new byte[] {}; } byte[] mappingBytes; if (indexMapping != null) { mappingBytes = PersistenceUtils.toBinary(indexMapping); } else { LOGGER.warn("Adapter to index mapping is null"); mappingBytes = new byte[] {}; } final ByteBuffer buf = ByteBuffer.allocate( filterBytes.length + adapterBytes.length + mappingBytes.length + VarintUtils.unsignedIntByteLength(filterBytes.length) + VarintUtils.unsignedIntByteLength(adapterBytes.length)); VarintUtils.writeUnsignedInt(filterBytes.length, buf); buf.put(filterBytes); VarintUtils.writeUnsignedInt(adapterBytes.length, buf); buf.put(adapterBytes); buf.put(mappingBytes); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { GeometryUtils.initClassLoader(); final ByteBuffer buf = ByteBuffer.wrap(bytes); final int filterBytesLength = VarintUtils.readUnsignedInt(buf); if (filterBytesLength > 0) { final byte[] filterBytes = ByteArrayUtils.safeRead(buf, filterBytesLength); final String cql = StringUtils.stringFromBinary(filterBytes); try { filter = ECQL.toFilter(cql); } catch (final Exception e) { throw new IllegalArgumentException(cql, e); } } else { LOGGER.warn("CQL filter is empty bytes"); filter = null; } final int adapterBytesLength = VarintUtils.readUnsignedInt(buf); if (adapterBytesLength > 0) { final byte[] adapterBytes = ByteArrayUtils.safeRead(buf, adapterBytesLength); try { adapter = (InternalGeotoolsFeatureDataAdapter) PersistenceUtils.fromBinary(adapterBytes); } catch (final Exception e) { throw new IllegalArgumentException("Unable to read adapter from CQL filter binary", e); } } else { LOGGER.warn("Feature Data Adapter is empty bytes"); adapter = null; } final int mappingBytesLength = buf.remaining(); if (adapterBytesLength > 0) { final byte[] mappingBytes = ByteArrayUtils.safeRead(buf, mappingBytesLength); try { indexMapping = (AdapterToIndexMapping) PersistenceUtils.fromBinary(mappingBytes); } catch (final Exception e) { throw new IllegalArgumentException( "Unable to read adapter to index mapping from CQL filter binary", e); } } else { LOGGER.warn("Adapter to index mapping is empty bytes"); indexMapping = null; } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/SpatialQueryFilter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.numeric.BasicNumericDataset; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding; import org.locationtech.geowave.core.store.dimension.NumericDimensionField; import org.locationtech.geowave.core.store.index.CommonIndexModel; import org.locationtech.geowave.core.store.query.filter.BasicQueryFilter; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.prep.PreparedGeometry; import com.google.common.collect.Interner; import com.google.common.collect.Interners; /** * This filter can perform fine-grained acceptance testing (intersection test with a query geometry) * with JTS geometry */ public class SpatialQueryFilter extends BasicQueryFilter { private static final Interner geometryImageInterner = Interners.newWeakInterner(); private GeometryImage preparedGeometryImage; protected interface SpatialQueryCompareOp { public boolean compare(final Geometry dataGeometry, final PreparedGeometry constraintGeometry); public BasicQueryCompareOperation getBaseCompareOp(); } public enum CompareOperation implements SpatialQueryCompareOp { CONTAINS { @Override public boolean compare( final Geometry dataGeometry, final PreparedGeometry constraintGeometry) { return constraintGeometry.contains(dataGeometry); } @Override public BasicQueryCompareOperation getBaseCompareOp() { return BasicQueryCompareOperation.CONTAINS; } }, OVERLAPS { @Override public boolean compare( final Geometry dataGeometry, final PreparedGeometry constraintGeometry) { return constraintGeometry.overlaps(dataGeometry); } @Override public BasicQueryCompareOperation getBaseCompareOp() { return BasicQueryCompareOperation.OVERLAPS; } }, INTERSECTS { @Override public boolean compare( final Geometry dataGeometry, final PreparedGeometry constraintGeometry) { return constraintGeometry.intersects(dataGeometry); } @Override public BasicQueryCompareOperation getBaseCompareOp() { return BasicQueryCompareOperation.INTERSECTS; } }, TOUCHES { @Override public boolean compare( final Geometry dataGeometry, final PreparedGeometry constraintGeometry) { return constraintGeometry.touches(dataGeometry); } @Override public BasicQueryCompareOperation getBaseCompareOp() { return BasicQueryCompareOperation.TOUCHES; } }, WITHIN { @Override public boolean compare( final Geometry dataGeometry, final PreparedGeometry constraintGeometry) { return constraintGeometry.within(dataGeometry); } @Override public BasicQueryCompareOperation getBaseCompareOp() { return BasicQueryCompareOperation.WITHIN; } }, DISJOINT { @Override public boolean compare( final Geometry dataGeometry, final PreparedGeometry constraintGeometry) { return constraintGeometry.disjoint(dataGeometry); } @Override public BasicQueryCompareOperation getBaseCompareOp() { return BasicQueryCompareOperation.DISJOINT; } }, CROSSES { @Override public boolean compare( final Geometry dataGeometry, final PreparedGeometry constraintGeometry) { return constraintGeometry.crosses(dataGeometry); } @Override public BasicQueryCompareOperation getBaseCompareOp() { return BasicQueryCompareOperation.CROSSES; } }, EQUALS { @Override public boolean compare( final Geometry dataGeometry, final PreparedGeometry constraintGeometry) { // This method is same as Geometry.equalsTopo which is // computationally expensive. // See equalsExact for quick structural equality return constraintGeometry.getGeometry().equals(dataGeometry); } @Override public BasicQueryCompareOperation getBaseCompareOp() { return BasicQueryCompareOperation.EQUALS; } } }; private CompareOperation compareOperation = CompareOperation.INTERSECTS; private Set geometryFieldNames; public SpatialQueryFilter() { super(); } public SpatialQueryFilter( final MultiDimensionalNumericData query, final NumericDimensionField[] orderedConstrainedDimensionDefinitions, final NumericDimensionField[] unconstrainedDimensionDefinitions, final Geometry queryGeometry, final CompareOperation compareOp, final BasicQueryCompareOperation nonSpatialCompareOp) { this( stripGeometry( query, orderedConstrainedDimensionDefinitions, unconstrainedDimensionDefinitions), queryGeometry, compareOp, nonSpatialCompareOp); } private SpatialQueryFilter( final StrippedGeometry strippedGeometry, final Geometry queryGeometry, final CompareOperation compareOp, final BasicQueryCompareOperation nonSpatialCompareOp) { super( strippedGeometry.strippedQuery, strippedGeometry.strippedDimensionDefinitions, nonSpatialCompareOp); preparedGeometryImage = new GeometryImage(GeometryUtils.PREPARED_GEOMETRY_FACTORY.create(queryGeometry)); geometryFieldNames = strippedGeometry.geometryFieldNames; if (compareOp != null) { compareOperation = compareOp; } } private static class StrippedGeometry { private final MultiDimensionalNumericData strippedQuery; private final NumericDimensionField[] strippedDimensionDefinitions; private final Set geometryFieldNames; public StrippedGeometry( final MultiDimensionalNumericData strippedQuery, final NumericDimensionField[] strippedDimensionDefinitions, final Set geometryFieldNames) { this.strippedQuery = strippedQuery; this.strippedDimensionDefinitions = strippedDimensionDefinitions; this.geometryFieldNames = geometryFieldNames; } } private static StrippedGeometry stripGeometry( final MultiDimensionalNumericData query, final NumericDimensionField[] orderedConstrainedDimensionDefinitions, final NumericDimensionField[] unconstrainedDimensionDefinitions) { final Set geometryFieldNames = new HashSet<>(); final List numericDataPerDimension = new ArrayList<>(); final List> fields = new ArrayList<>(); final NumericData[] data = query.getDataPerDimension(); for (int d = 0; d < orderedConstrainedDimensionDefinitions.length; d++) { // if the type on the generic is assignable to geometry then save // the field ID for later filtering if (isSpatial(orderedConstrainedDimensionDefinitions[d])) { geometryFieldNames.add(orderedConstrainedDimensionDefinitions[d].getFieldName()); } else { numericDataPerDimension.add(data[d]); fields.add(orderedConstrainedDimensionDefinitions[d]); } } // we need to also add all geometry field IDs even if it is // unconstrained to be able to apply a geometry intersection (understand // that the bbox for a geometry can imply a full range based on its // envelope but the polygon may still need to be intersected with // results) for (int d = 0; d < unconstrainedDimensionDefinitions.length; d++) { if (isSpatial(unconstrainedDimensionDefinitions[d])) { geometryFieldNames.add(unconstrainedDimensionDefinitions[d].getFieldName()); } } return new StrippedGeometry( new BasicNumericDataset( numericDataPerDimension.toArray(new NumericData[numericDataPerDimension.size()])), fields.toArray(new NumericDimensionField[fields.size()]), geometryFieldNames); } public static boolean isSpatial(final NumericDimensionField d) { return Geometry.class.isAssignableFrom(d.getFieldClass()); } @Override public boolean accept( final CommonIndexModel indexModel, final IndexedPersistenceEncoding persistenceEncoding) { if (preparedGeometryImage == null) { return true; } // we can actually get the geometry for the data and test the // intersection of the query geometry with that boolean geometryPasses = false; for (final String fieldName : geometryFieldNames) { final Object geomObj = persistenceEncoding.getCommonData().getValue(fieldName); if (persistenceEncoding.isAsync()) { return false; } if ((geomObj != null) && (geomObj instanceof Geometry)) { final Geometry geom = (Geometry) geomObj; if (geometryPasses(geom)) { geometryPasses = true; break; } } } if (!geometryPasses) { return false; } if (isSpatialOnly()) { // if this is only a spatial index, return // true return true; } // otherwise, if the geometry passes, and there are other dimensions, // check the other dimensions return super.accept(indexModel, persistenceEncoding); } private boolean geometryPasses(final Geometry dataGeometry) { if (dataGeometry == null) { return false; } if (preparedGeometryImage != null) { return compareOperation.compare(dataGeometry, preparedGeometryImage.preparedGeometry); } return false; } protected boolean isSpatialOnly() { return (dimensionFields == null) || (dimensionFields.length == 0); } @Override public byte[] toBinary() { final byte[] geometryBinary = preparedGeometryImage.geometryBinary; final byte[] geometryFieldNamesBytes = StringUtils.stringsToBinary(geometryFieldNames.toArray(new String[0])); final byte[] theRest = super.toBinary(); final ByteBuffer buf = ByteBuffer.allocate( VarintUtils.unsignedIntByteLength(compareOperation.ordinal()) + VarintUtils.unsignedIntByteLength(geometryBinary.length) + VarintUtils.unsignedIntByteLength(geometryFieldNamesBytes.length) + geometryBinary.length + geometryFieldNamesBytes.length + theRest.length); VarintUtils.writeUnsignedInt(compareOperation.ordinal(), buf); VarintUtils.writeUnsignedInt(geometryBinary.length, buf); VarintUtils.writeUnsignedInt(geometryFieldNamesBytes.length, buf); buf.put(geometryBinary); buf.put(geometryFieldNamesBytes); buf.put(theRest); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); compareOperation = CompareOperation.values()[VarintUtils.readUnsignedInt(buf)]; final int geometryBinaryLength = VarintUtils.readUnsignedInt(buf); final int geometryFieldNamesByteLength = VarintUtils.readUnsignedInt(buf); final byte[] geometryBinary = ByteArrayUtils.safeRead(buf, geometryBinaryLength); final byte[] geometryFieldNamesBytes = ByteArrayUtils.safeRead(buf, geometryFieldNamesByteLength); geometryFieldNames = new HashSet<>(Arrays.asList(StringUtils.stringsFromBinary(geometryFieldNamesBytes))); final byte[] theRest = new byte[buf.remaining()]; buf.get(theRest); preparedGeometryImage = geometryImageInterner.intern(new GeometryImage(geometryBinary)); // build the the PreparedGeometry and underling Geometry if not // reconstituted yet; most likely occurs if this thread constructed the // image. preparedGeometryImage.init(); super.fromBinary(theRest); } /** * This class is used for interning a PreparedGeometry. Prepared geometries cannot be interned * since they do not extend Object.hashCode(). * *

Interning a geometry assumes a geometry is already constructed on the heap at the time * interning begins. The byte image of geometry provides a more efficient component to hash and * associate with a single image of the geometry. * *

The approach of interning the Geometry prior to construction of a PreparedGeometry lead to * excessive memory use. Thus, this class is constructed to hold the prepared geometry and prevent * reconstruction of the underlying geometry from a byte array if the Geometry has been interned. * *

Using this approach increased performance of a large query unit test by 40% and reduced * heap memory consumption by roughly 50%. */ public static class GeometryImage { public byte[] geometryBinary; public PreparedGeometry preparedGeometry = null; public GeometryImage(final PreparedGeometry preparedGeometry) { super(); this.preparedGeometry = preparedGeometry; geometryBinary = GeometryUtils.geometryToBinary(preparedGeometry.getGeometry(), null); } public GeometryImage(final byte[] geometryBinary) { super(); this.geometryBinary = geometryBinary; } public synchronized void init() { if (preparedGeometry == null) { preparedGeometry = GeometryUtils.PREPARED_GEOMETRY_FACTORY.create( GeometryUtils.geometryFromBinary(geometryBinary, null)); } } public PreparedGeometry getGeometry() { return preparedGeometry; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + Arrays.hashCode(geometryBinary); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final GeometryImage other = (GeometryImage) obj; if (!Arrays.equals(geometryBinary, other.geometryBinary)) { return false; } return true; } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/CQLToGeoWaveConversionException.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression; /** * Thrown when the filter visitor is unable to directly translate the CQL filter to the GeoWave * filter format. */ public class CQLToGeoWaveConversionException extends RuntimeException { private static final long serialVersionUID = -9093452243825634064L; } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/CQLToGeoWaveFilterVisitor.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression; import java.util.Calendar; import java.util.Date; import org.apache.commons.lang.StringUtils; import org.locationtech.geowave.core.geotime.adapter.SpatialFieldDescriptor; import org.locationtech.geowave.core.geotime.adapter.TemporalFieldDescriptor; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialExpression; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialFieldValue; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialLiteral; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalExpression; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalFieldValue; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalLiteral; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.query.filter.expression.And; import org.locationtech.geowave.core.store.query.filter.expression.BooleanFieldValue; import org.locationtech.geowave.core.store.query.filter.expression.BooleanLiteral; import org.locationtech.geowave.core.store.query.filter.expression.ComparableExpression; import org.locationtech.geowave.core.store.query.filter.expression.Expression; import org.locationtech.geowave.core.store.query.filter.expression.Filter; import org.locationtech.geowave.core.store.query.filter.expression.GenericFieldValue; import org.locationtech.geowave.core.store.query.filter.expression.GenericLiteral; import org.locationtech.geowave.core.store.query.filter.expression.Literal; import org.locationtech.geowave.core.store.query.filter.expression.Not; import org.locationtech.geowave.core.store.query.filter.expression.Or; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericExpression; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldValue; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericLiteral; import org.locationtech.geowave.core.store.query.filter.expression.text.TextExpression; import org.locationtech.geowave.core.store.query.filter.expression.text.TextFieldValue; import org.locationtech.geowave.core.store.query.filter.expression.text.TextLiteral; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; import org.opengis.filter.FilterVisitor; import org.opengis.filter.expression.ExpressionVisitor; /** * This filter attempts to convert a CQL filter into a GeoWave filter. Since GeoWave filters are a * subset of the functionality found in CQL, an exception will be thrown if the filter cannot be * mapped exactly. */ public class CQLToGeoWaveFilterVisitor implements FilterVisitor, ExpressionVisitor { private enum ExpressionType { ANY, NUMERIC, TEXT, SPATIAL, TEMPORAL, BOOLEAN, } private final DataTypeAdapter adapter; public CQLToGeoWaveFilterVisitor(final DataTypeAdapter adapter) { this.adapter = adapter; } @Override public Object visit( final org.opengis.filter.expression.NilExpression expression, final Object extraData) { throw new CQLToGeoWaveConversionException(); } @Override public Object visit(final org.opengis.filter.expression.Add expression, final Object extraData) { final Object expr1 = expression.getExpression1().accept(this, ExpressionType.NUMERIC); final Object expr2 = expression.getExpression2().accept(this, ExpressionType.NUMERIC); if ((expr1 instanceof NumericExpression) && (expr2 instanceof NumericExpression)) { return ((NumericExpression) expr1).add(expr2); } throw new CQLToGeoWaveConversionException(); } @Override public Object visit( final org.opengis.filter.expression.Subtract expression, final Object extraData) { final Object expr1 = expression.getExpression1().accept(this, ExpressionType.NUMERIC); final Object expr2 = expression.getExpression2().accept(this, ExpressionType.NUMERIC); if ((expr1 instanceof NumericExpression) && (expr2 instanceof NumericExpression)) { return ((NumericExpression) expr1).subtract(expr2); } throw new CQLToGeoWaveConversionException(); } @Override public Object visit( final org.opengis.filter.expression.Multiply expression, final Object extraData) { final Object expr1 = expression.getExpression1().accept(this, ExpressionType.NUMERIC); final Object expr2 = expression.getExpression2().accept(this, ExpressionType.NUMERIC); if ((expr1 instanceof NumericExpression) && (expr2 instanceof NumericExpression)) { return ((NumericExpression) expr1).multiplyBy(expr2); } throw new CQLToGeoWaveConversionException(); } @Override public Object visit( final org.opengis.filter.expression.Divide expression, final Object extraData) { final Object expr1 = expression.getExpression1().accept(this, ExpressionType.NUMERIC); final Object expr2 = expression.getExpression2().accept(this, ExpressionType.NUMERIC); if ((expr1 instanceof NumericExpression) && (expr2 instanceof NumericExpression)) { return ((NumericExpression) expr1).divideBy(expr2); } throw new CQLToGeoWaveConversionException(); } @Override public Object visit( final org.opengis.filter.expression.Function expression, final Object extraData) { // TODO: Add support for commonly used functions (abs, strConcat, strEndsWith, // strEqualsIgnoreCase, strStartsWith) throw new CQLToGeoWaveConversionException(); } @Override public Object visit( final org.opengis.filter.expression.Literal expression, final Object extraData) { final Object value = expression.getValue(); if ((extraData != null) && (extraData instanceof ExpressionType)) { switch ((ExpressionType) extraData) { case NUMERIC: return NumericLiteral.of((Number) value); case SPATIAL: return SpatialLiteral.of(value); case TEMPORAL: return TemporalLiteral.of(value); case TEXT: return TextLiteral.of((String) value); case BOOLEAN: return BooleanLiteral.of(value); default: break; } } return inferLiteral(value); } private Literal inferLiteral(final Object object) { if ((object instanceof Geometry) || (object instanceof Envelope)) { return SpatialLiteral.of(object); } if (object instanceof Boolean) { return BooleanLiteral.of(object); } if (object instanceof Number) { return NumericLiteral.of((Number) object); } if (object instanceof String) { return TextLiteral.of((String) object); } if ((object instanceof Date) || (object instanceof Calendar)) { return TemporalLiteral.of(object); } return GenericLiteral.of(object); } @Override public Object visit( final org.opengis.filter.expression.PropertyName expression, final Object extraData) { String value = expression.getPropertyName(); FieldDescriptor descriptor = adapter.getFieldDescriptor(value); if (descriptor == null && (value == null || value.length() == 0)) { if (extraData != null && extraData.equals(ExpressionType.SPATIAL)) { // Attempt to infer the default geometry field final FieldDescriptor[] descriptors = adapter.getFieldDescriptors(); for (final FieldDescriptor field : descriptors) { if (Geometry.class.isAssignableFrom(field.bindingClass())) { value = field.fieldName(); descriptor = field; break; } } } } if (descriptor == null) { throw new CQLToGeoWaveConversionException(); } if ((extraData != null) && (extraData instanceof ExpressionType)) { switch ((ExpressionType) extraData) { case NUMERIC: return NumericFieldValue.of(value); case SPATIAL: return SpatialFieldValue.of(value); case TEMPORAL: return TemporalFieldValue.of(value); case TEXT: return TextFieldValue.of(value); case BOOLEAN: return BooleanFieldValue.of(value); default: break; } } if ((descriptor instanceof SpatialFieldDescriptor) || Geometry.class.isAssignableFrom(descriptor.bindingClass()) || Envelope.class.isAssignableFrom(descriptor.bindingClass())) { return SpatialFieldValue.of(value); } if ((descriptor instanceof TemporalFieldDescriptor) || Date.class.isAssignableFrom(descriptor.bindingClass()) || Calendar.class.isAssignableFrom(descriptor.bindingClass())) { return TemporalFieldValue.of(value); } if (Boolean.class.isAssignableFrom(descriptor.bindingClass())) { return BooleanFieldValue.of(value); } if (Number.class.isAssignableFrom(descriptor.bindingClass())) { return NumericFieldValue.of(value); } if (String.class.isAssignableFrom(descriptor.bindingClass())) { return TextFieldValue.of(value); } return GenericFieldValue.of(value); } @Override public Object visitNullFilter(final Object extraData) { throw new CQLToGeoWaveConversionException(); } @Override public Object visit(final org.opengis.filter.ExcludeFilter filter, final Object extraData) { return Filter.exclude(); } @Override public Object visit(final org.opengis.filter.IncludeFilter filter, final Object extraData) { return Filter.include(); } @Override public Object visit(final org.opengis.filter.And filter, final Object extraData) { final Filter[] children = filter.getChildren().stream().map(f -> f.accept(this, extraData)).filter( f -> f instanceof Filter).toArray(Filter[]::new); if (children.length == filter.getChildren().size()) { return new And(children); } throw new CQLToGeoWaveConversionException(); } @Override public Object visit(final org.opengis.filter.Or filter, final Object extraData) { final Filter[] children = filter.getChildren().stream().map(f -> f.accept(this, extraData)).filter( f -> f instanceof Filter).toArray(Filter[]::new); if (children.length == filter.getChildren().size()) { return new Or(children); } throw new CQLToGeoWaveConversionException(); } @Override public Object visit(final org.opengis.filter.Not filter, final Object extraData) { final Object transformed = filter.getFilter().accept(this, extraData); if (transformed instanceof Filter) { return new Not((Filter) transformed); } throw new CQLToGeoWaveConversionException(); } @Override public Object visit(final org.opengis.filter.Id filter, final Object extraData) { throw new CQLToGeoWaveConversionException(); } @Override public Object visit(final org.opengis.filter.PropertyIsBetween filter, final Object extraData) { final Object expression = filter.getExpression().accept(this, ExpressionType.ANY); final Object lowerBound = filter.getLowerBoundary().accept(this, ExpressionType.ANY); final Object upperBound = filter.getUpperBoundary().accept(this, ExpressionType.ANY); if ((expression instanceof ComparableExpression) && (lowerBound instanceof ComparableExpression) && (upperBound instanceof ComparableExpression)) { return ((ComparableExpression) expression).isBetween(lowerBound, upperBound); } throw new CQLToGeoWaveConversionException(); } @Override public Object visit(final org.opengis.filter.PropertyIsEqualTo filter, final Object extraData) { final Object expression1 = filter.getExpression1().accept(this, ExpressionType.ANY); final Object expression2 = filter.getExpression2().accept(this, ExpressionType.ANY); if ((expression1 instanceof Expression) && (expression2 instanceof Expression)) { return ((Expression) expression1).isEqualTo(expression2); } throw new CQLToGeoWaveConversionException(); } @Override public Object visit( final org.opengis.filter.PropertyIsNotEqualTo filter, final Object extraData) { final Object expression1 = filter.getExpression1().accept(this, ExpressionType.ANY); final Object expression2 = filter.getExpression2().accept(this, ExpressionType.ANY); if ((expression1 instanceof Expression) && (expression2 instanceof Expression)) { return ((Expression) expression1).isNotEqualTo(expression2); } throw new CQLToGeoWaveConversionException(); } @Override public Object visit( final org.opengis.filter.PropertyIsGreaterThan filter, final Object extraData) { final Object expression1 = filter.getExpression1().accept(this, ExpressionType.ANY); final Object expression2 = filter.getExpression2().accept(this, ExpressionType.ANY); if ((expression1 instanceof ComparableExpression) && (expression2 instanceof ComparableExpression)) { return ((ComparableExpression) expression1).isGreaterThan(expression2); } throw new CQLToGeoWaveConversionException(); } @Override public Object visit( final org.opengis.filter.PropertyIsGreaterThanOrEqualTo filter, final Object extraData) { final Object expression1 = filter.getExpression1().accept(this, ExpressionType.ANY); final Object expression2 = filter.getExpression2().accept(this, ExpressionType.ANY); if ((expression1 instanceof ComparableExpression) && (expression2 instanceof ComparableExpression)) { return ((ComparableExpression) expression1).isGreaterThanOrEqualTo(expression2); } throw new CQLToGeoWaveConversionException(); } @Override public Object visit(final org.opengis.filter.PropertyIsLessThan filter, final Object extraData) { final Object expression1 = filter.getExpression1().accept(this, ExpressionType.ANY); final Object expression2 = filter.getExpression2().accept(this, ExpressionType.ANY); if ((expression1 instanceof ComparableExpression) && (expression2 instanceof ComparableExpression)) { return ((ComparableExpression) expression1).isLessThan(expression2); } throw new CQLToGeoWaveConversionException(); } @Override public Object visit( final org.opengis.filter.PropertyIsLessThanOrEqualTo filter, final Object extraData) { final Object expression1 = filter.getExpression1().accept(this, ExpressionType.ANY); final Object expression2 = filter.getExpression2().accept(this, ExpressionType.ANY); if ((expression1 instanceof ComparableExpression) && (expression2 instanceof ComparableExpression)) { return ((ComparableExpression) expression1).isLessThanOrEqualTo(expression2); } throw new CQLToGeoWaveConversionException(); } @Override public Object visit(final org.opengis.filter.PropertyIsLike filter, final Object extraData) { final Object expression = filter.getExpression().accept(this, ExpressionType.TEXT); if (!(expression instanceof TextExpression)) { throw new CQLToGeoWaveConversionException(); } final String likeStr = filter.getLiteral(); if (likeStr.matches( ".*(^\\b|[^\\" + filter.getEscape() + "])" + filter.getSingleChar() + ".*")) { // We can't handle character wildcards throw new CQLToGeoWaveConversionException(); } final int count = StringUtils.countMatches(likeStr, filter.getWildCard()); if (count == 0) { return ((TextExpression) expression).isEqualTo( StringUtils.replace(likeStr, filter.getEscape(), ""), !filter.isMatchingCase()); } else if (count == 1) { if (likeStr.startsWith(filter.getWildCard())) { return ((TextExpression) expression).endsWith( likeStr.substring(filter.getWildCard().length()), !filter.isMatchingCase()); } if (likeStr.endsWith(filter.getWildCard())) { return ((TextExpression) expression).startsWith( likeStr.substring(0, likeStr.length() - filter.getWildCard().length()), !filter.isMatchingCase()); } } else if (count == 2) { if (likeStr.startsWith(filter.getWildCard()) && likeStr.endsWith(filter.getWildCard())) { return ((TextExpression) expression).contains( likeStr.substring( filter.getWildCard().length(), likeStr.length() - filter.getWildCard().length()), !filter.isMatchingCase()); } } throw new CQLToGeoWaveConversionException(); } @Override public Object visit(final org.opengis.filter.PropertyIsNull filter, final Object extraData) { final Object expression = filter.getExpression().accept(this, ExpressionType.ANY); if (expression instanceof Expression) { return ((Expression) expression).isNull(); } throw new CQLToGeoWaveConversionException(); } @Override public Object visit(final org.opengis.filter.PropertyIsNil filter, final Object extraData) { final Object expression = filter.getExpression().accept(this, ExpressionType.ANY); if (expression instanceof Expression) { return ((Expression) expression).isNull(); } throw new CQLToGeoWaveConversionException(); } @Override public Object visit(final org.opengis.filter.spatial.BBOX filter, final Object extraData) { final Object expression = filter.getExpression1().accept(this, ExpressionType.SPATIAL); if (expression instanceof SpatialExpression) { return ((SpatialExpression) expression).bbox( filter.getBounds().getMinX(), filter.getBounds().getMinY(), filter.getBounds().getMaxX(), filter.getBounds().getMaxY(), filter.getBounds().getCoordinateReferenceSystem()); } throw new CQLToGeoWaveConversionException(); } @Override public Object visit(final org.opengis.filter.spatial.Beyond filter, final Object extraData) { throw new CQLToGeoWaveConversionException(); } @Override public Object visit(final org.opengis.filter.spatial.Contains filter, final Object extraData) { final Object expression1 = filter.getExpression1().accept(this, ExpressionType.SPATIAL); final Object expression2 = filter.getExpression2().accept(this, ExpressionType.SPATIAL); if ((expression1 instanceof SpatialExpression) && (expression2 instanceof SpatialExpression)) { return ((SpatialExpression) expression1).contains(expression2); } throw new CQLToGeoWaveConversionException(); } @Override public Object visit(final org.opengis.filter.spatial.Crosses filter, final Object extraData) { final Object expression1 = filter.getExpression1().accept(this, ExpressionType.SPATIAL); final Object expression2 = filter.getExpression2().accept(this, ExpressionType.SPATIAL); if ((expression1 instanceof SpatialExpression) && (expression2 instanceof SpatialExpression)) { return ((SpatialExpression) expression1).crosses(expression2); } throw new CQLToGeoWaveConversionException(); } @Override public Object visit(final org.opengis.filter.spatial.Disjoint filter, final Object extraData) { final Object expression1 = filter.getExpression1().accept(this, ExpressionType.SPATIAL); final Object expression2 = filter.getExpression2().accept(this, ExpressionType.SPATIAL); if ((expression1 instanceof SpatialExpression) && (expression2 instanceof SpatialExpression)) { return ((SpatialExpression) expression1).disjoint(expression2); } throw new CQLToGeoWaveConversionException(); } @Override public Object visit(final org.opengis.filter.spatial.DWithin filter, final Object extraData) { throw new CQLToGeoWaveConversionException(); } @Override public Object visit(final org.opengis.filter.spatial.Equals filter, final Object extraData) { final Object expression1 = filter.getExpression1().accept(this, ExpressionType.SPATIAL); final Object expression2 = filter.getExpression2().accept(this, ExpressionType.SPATIAL); if ((expression1 instanceof SpatialExpression) && (expression2 instanceof SpatialExpression)) { return ((SpatialExpression) expression1).isEqualTo(expression2); } throw new CQLToGeoWaveConversionException(); } @Override public Object visit(final org.opengis.filter.spatial.Intersects filter, final Object extraData) { final Object expression1 = filter.getExpression1().accept(this, ExpressionType.SPATIAL); final Object expression2 = filter.getExpression2().accept(this, ExpressionType.SPATIAL); if ((expression1 instanceof SpatialExpression) && (expression2 instanceof SpatialExpression)) { return ((SpatialExpression) expression1).intersects(expression2); } throw new CQLToGeoWaveConversionException(); } @Override public Object visit(final org.opengis.filter.spatial.Overlaps filter, final Object extraData) { final Object expression1 = filter.getExpression1().accept(this, ExpressionType.SPATIAL); final Object expression2 = filter.getExpression2().accept(this, ExpressionType.SPATIAL); if ((expression1 instanceof SpatialExpression) && (expression2 instanceof SpatialExpression)) { return ((SpatialExpression) expression1).overlaps(expression2); } throw new CQLToGeoWaveConversionException(); } @Override public Object visit(final org.opengis.filter.spatial.Touches filter, final Object extraData) { final Object expression1 = filter.getExpression1().accept(this, ExpressionType.SPATIAL); final Object expression2 = filter.getExpression2().accept(this, ExpressionType.SPATIAL); if ((expression1 instanceof SpatialExpression) && (expression2 instanceof SpatialExpression)) { return ((SpatialExpression) expression1).touches(expression2); } throw new CQLToGeoWaveConversionException(); } @Override public Object visit(final org.opengis.filter.spatial.Within filter, final Object extraData) { final Object expression1 = filter.getExpression1().accept(this, ExpressionType.SPATIAL); final Object expression2 = filter.getExpression2().accept(this, ExpressionType.SPATIAL); if ((expression1 instanceof SpatialExpression) && (expression2 instanceof SpatialExpression)) { return ((SpatialExpression) expression1).within(expression2); } throw new CQLToGeoWaveConversionException(); } @Override public Object visit(final org.opengis.filter.temporal.After after, final Object extraData) { final Object expression1 = after.getExpression1().accept(this, ExpressionType.TEMPORAL); final Object expression2 = after.getExpression2().accept(this, ExpressionType.TEMPORAL); if ((expression1 instanceof TemporalExpression) && (expression2 instanceof TemporalExpression)) { return ((TemporalExpression) expression1).isAfter(expression2); } throw new CQLToGeoWaveConversionException(); } @Override public Object visit( final org.opengis.filter.temporal.AnyInteracts anyInteracts, final Object extraData) { final Object expression1 = anyInteracts.getExpression1().accept(this, ExpressionType.TEMPORAL); final Object expression2 = anyInteracts.getExpression2().accept(this, ExpressionType.TEMPORAL); if ((expression1 instanceof TemporalExpression) && (expression2 instanceof TemporalExpression)) { return ((TemporalExpression) expression1).overlaps(expression2); } throw new CQLToGeoWaveConversionException(); } @Override public Object visit(final org.opengis.filter.temporal.Before before, final Object extraData) { final Object expression1 = before.getExpression1().accept(this, ExpressionType.TEMPORAL); final Object expression2 = before.getExpression2().accept(this, ExpressionType.TEMPORAL); if ((expression1 instanceof TemporalExpression) && (expression2 instanceof TemporalExpression)) { return ((TemporalExpression) expression1).isBefore(expression2); } throw new CQLToGeoWaveConversionException(); } @Override public Object visit(final org.opengis.filter.temporal.Begins begins, final Object extraData) { throw new CQLToGeoWaveConversionException(); } @Override public Object visit(final org.opengis.filter.temporal.BegunBy begunBy, final Object extraData) { throw new CQLToGeoWaveConversionException(); } @Override public Object visit(final org.opengis.filter.temporal.During during, final Object extraData) { final Object expression1 = during.getExpression1().accept(this, ExpressionType.TEMPORAL); final Object expression2 = during.getExpression2().accept(this, ExpressionType.TEMPORAL); if ((expression1 instanceof TemporalExpression) && (expression2 instanceof TemporalExpression)) { return ((TemporalExpression) expression1).isDuring(expression2); } throw new CQLToGeoWaveConversionException(); } @Override public Object visit(final org.opengis.filter.temporal.EndedBy endedBy, final Object extraData) { throw new CQLToGeoWaveConversionException(); } @Override public Object visit(final org.opengis.filter.temporal.Ends ends, final Object extraData) { throw new CQLToGeoWaveConversionException(); } @Override public Object visit(final org.opengis.filter.temporal.Meets meets, final Object extraData) { throw new CQLToGeoWaveConversionException(); } @Override public Object visit(final org.opengis.filter.temporal.MetBy metBy, final Object extraData) { throw new CQLToGeoWaveConversionException(); } @Override public Object visit( final org.opengis.filter.temporal.OverlappedBy overlappedBy, final Object extraData) { throw new CQLToGeoWaveConversionException(); } @Override public Object visit( final org.opengis.filter.temporal.TContains contains, final Object extraData) { final Object expression1 = contains.getExpression1().accept(this, ExpressionType.TEMPORAL); final Object expression2 = contains.getExpression2().accept(this, ExpressionType.TEMPORAL); if ((expression1 instanceof TemporalExpression) && (expression2 instanceof TemporalExpression)) { // This is really just the inverse of `During` return ((TemporalExpression) expression2).isDuring(expression1); } throw new CQLToGeoWaveConversionException(); } @Override public Object visit(final org.opengis.filter.temporal.TEquals equals, final Object extraData) { final Object expression1 = equals.getExpression1().accept(this, ExpressionType.TEMPORAL); final Object expression2 = equals.getExpression2().accept(this, ExpressionType.TEMPORAL); if ((expression1 instanceof Expression) && (expression2 instanceof Expression)) { return ((TemporalExpression) expression1).isEqualTo(expression2); } throw new CQLToGeoWaveConversionException(); } @Override public Object visit( final org.opengis.filter.temporal.TOverlaps contains, final Object extraData) { throw new CQLToGeoWaveConversionException(); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/BBox.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial; import org.geotools.geometry.jts.ReferencedEnvelope; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.jts.geom.Envelope; import org.opengis.referencing.crs.CoordinateReferenceSystem; /** * Predicate that passes when the first operand is within the bounding box of the second operand. */ public class BBox extends Intersects { public BBox() {} public BBox( final SpatialExpression expression, final double minX, final double minY, final double maxX, final double maxY, final boolean loose) { this(expression, minX, minY, maxX, maxY, null, loose); } public BBox( final SpatialExpression expression, final double minX, final double minY, final double maxX, final double maxY, final CoordinateReferenceSystem crs, final boolean loose) { super( expression, SpatialLiteral.of( new ReferencedEnvelope( minX, maxX, minY, maxY, crs == null ? GeometryUtils.getDefaultCRS() : crs)), loose); } @Override public String toString() { final StringBuilder sb = new StringBuilder(loose ? "BBOXLOOSE(" : "BBOX("); final Envelope envelope = expression2.evaluateValue(null).getGeometry().getEnvelopeInternal(); sb.append(expression1.toString()); sb.append(","); sb.append(envelope.getMinX()); sb.append(","); sb.append(envelope.getMinY()); sb.append(","); sb.append(envelope.getMaxX()); sb.append(","); sb.append(envelope.getMaxY()); sb.append(")"); return sb.toString(); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/BinarySpatialPredicate.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial; import java.util.Map; import java.util.Set; import org.geotools.referencing.CRS; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.IndexFieldMapper; import org.locationtech.geowave.core.store.query.filter.expression.BinaryPredicate; import org.locationtech.geowave.core.store.query.filter.expression.FieldValue; import org.locationtech.geowave.core.store.query.filter.expression.FilterConstraints; import org.locationtech.geowave.core.store.query.filter.expression.FilterRange; import org.locationtech.geowave.core.store.query.filter.expression.IndexFieldConstraints; import org.locationtech.geowave.core.store.query.filter.expression.IndexFieldConstraints.DimensionConstraints; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldConstraints; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; import org.opengis.referencing.FactoryException; import org.opengis.referencing.crs.CoordinateReferenceSystem; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; /** * Abstract class for comparing two spatial expressions. It handles any necessary CRS * transformations and delegates the actual comparison operation to the child classes. */ public abstract class BinarySpatialPredicate extends BinaryPredicate { public BinarySpatialPredicate() {} public BinarySpatialPredicate( final SpatialExpression expression1, final SpatialExpression expression2) { super(expression1, expression2); } @Override public void prepare( final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index) { CoordinateReferenceSystem expression1Crs = expression1.getCRS(adapter); CoordinateReferenceSystem expression2Crs = expression2.getCRS(adapter); if (expression1.isLiteral() && !(expression1 instanceof SpatialLiteral)) { expression1 = SpatialLiteral.of(expression1.evaluateValue(null), expression1Crs); } if (expression2.isLiteral() && !(expression2 instanceof SpatialLiteral)) { expression2 = SpatialLiteral.of(expression2.evaluateValue(null), expression2Crs); } if ((expression1 instanceof FieldValue) && isFieldMappedToIndex(((FieldValue) expression1).getFieldName(), indexMapping)) { expression1Crs = GeometryUtils.getIndexCrs(index); } if ((expression2 instanceof FieldValue) && isFieldMappedToIndex(((FieldValue) expression2).getFieldName(), indexMapping)) { expression2Crs = GeometryUtils.getIndexCrs(index); } if (expression1.isLiteral()) { ((SpatialLiteral) expression1).prepare(expression2Crs); } else if (expression2.isLiteral()) { ((SpatialLiteral) expression2).prepare(expression1Crs); } } private boolean isFieldMappedToIndex( final String fieldName, final AdapterToIndexMapping indexMapping) { for (final IndexFieldMapper mapper : indexMapping.getIndexFieldMappers()) { for (final String adapterField : mapper.getAdapterFields()) { if (adapterField.equals(fieldName)) { return true; } } } return false; } @Override public boolean evaluate(final Map fieldValues) { final Object value1 = expression1.evaluateValue(fieldValues); final Object value2 = expression2.evaluateValue(fieldValues); if ((value1 == null) || (value2 == null)) { return false; } return evaluateInternal((FilterGeometry) value1, (FilterGeometry) value2); } @Override public boolean evaluate(final DataTypeAdapter adapter, final T entry) { final Object value1 = expression1.evaluateValue(adapter, entry); final Object value2 = expression2.evaluateValue(adapter, entry); if ((value1 == null) || (value2 == null)) { return false; } return evaluateInternal((FilterGeometry) value1, (FilterGeometry) value2); } protected abstract boolean evaluateInternal( final FilterGeometry value1, final FilterGeometry value2); @Override public Set getConstrainableFields() { if ((expression1 instanceof FieldValue) && expression2.isLiteral()) { return Sets.newHashSet(((FieldValue) expression1).getFieldName()); } else if ((expression2 instanceof FieldValue) && expression1.isLiteral()) { return Sets.newHashSet(((FieldValue) expression2).getFieldName()); } return Sets.newHashSet(); } @SuppressWarnings("unchecked") @Override public > FilterConstraints getConstraints( final Class constraintClass, final DataStatisticsStore statsStore, final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index, final Set indexedFields) { if (!constraintClass.isAssignableFrom(Double.class)) { return FilterConstraints.empty(); } final Map> dimensionRanges = Maps.newHashMap(); FilterGeometry literal = null; String fieldName = null; CoordinateReferenceSystem literalCRS = GeometryUtils.getDefaultCRS(); if ((expression1 instanceof FieldValue) && indexedFields.contains(((FieldValue) expression1).getFieldName()) && expression2.isLiteral()) { literal = expression2.evaluateValue(null, null); if (expression2 instanceof SpatialExpression) { literalCRS = expression2.getCRS(adapter); } fieldName = ((FieldValue) expression1).getFieldName(); } else if ((expression2 instanceof FieldValue) && indexedFields.contains(((FieldValue) expression2).getFieldName()) && expression1.isLiteral()) { literal = expression1.evaluateValue(null, null); if (expression1 instanceof SpatialExpression) { literalCRS = expression1.getCRS(adapter); } fieldName = ((FieldValue) expression2).getFieldName(); } if ((literal != null) && (fieldName != null)) { final CoordinateReferenceSystem indexCRS = GeometryUtils.getIndexCrs(index); Geometry literalGeometry = literal.getGeometry(); if ((indexCRS != null) && !indexCRS.equals(literalCRS)) { try { literalGeometry = GeometryUtils.crsTransform( literalGeometry, CRS.findMathTransform(literalCRS, indexCRS)); } catch (final FactoryException e) { throw new RuntimeException("Unable to transform spatial literal to the index CRS."); } } final Envelope envelope = literalGeometry.getEnvelopeInternal(); if (!envelope.isNull()) { dimensionRanges.put( 0, DimensionConstraints.of( Lists.newArrayList( FilterRange.of( envelope.getMinX(), envelope.getMaxX(), true, true, isExact())))); dimensionRanges.put( 1, DimensionConstraints.of( Lists.newArrayList( FilterRange.of( envelope.getMinY(), envelope.getMaxY(), true, true, isExact())))); } } if (dimensionRanges.isEmpty()) { return FilterConstraints.empty(); } return FilterConstraints.of( adapter, indexMapping, index, fieldName, (IndexFieldConstraints) NumericFieldConstraints.of(dimensionRanges)); } protected abstract boolean isExact(); } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/Crosses.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial; /** * Predicate that passes when the first operand crosses the second operand. */ public class Crosses extends BinarySpatialPredicate { public Crosses() {} public Crosses(final SpatialExpression expression1, final SpatialExpression expression2) { super(expression1, expression2); } @Override public boolean evaluateInternal(final FilterGeometry value1, final FilterGeometry value2) { return value1.crosses(value2); } @Override protected boolean isExact() { return false; } @Override public String toString() { final StringBuilder sb = new StringBuilder("CROSSES("); sb.append(expression1.toString()); sb.append(","); sb.append(expression2.toString()); sb.append(")"); return sb.toString(); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/Disjoint.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial; import java.nio.ByteBuffer; import java.util.Set; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.query.filter.expression.FilterConstraints; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; /** * Predicate that passes when the first operand is disjoint from the second operand. */ public class Disjoint extends BinarySpatialPredicate { private boolean loose; public Disjoint() {} public Disjoint( final SpatialExpression expression1, final SpatialExpression expression2, final boolean loose) { super(expression1, expression2); this.loose = loose; } @Override public boolean evaluateInternal(final FilterGeometry value1, final FilterGeometry value2) { if (loose) { return value1.getGeometry().getEnvelopeInternal().disjoint( value2.getGeometry().getEnvelopeInternal()); } return value1.disjoint(value2); } @Override public > FilterConstraints getConstraints( final Class constraintClass, final DataStatisticsStore statsStore, final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index, final Set indexedFields) { // This is a full scan because there isn't currently a way to do a set of constraints with a // hole in it. return FilterConstraints.empty(); } @Override protected boolean isExact() { return isLoose(); } public boolean isLoose() { return loose; } @Override public String toString() { final StringBuilder sb = new StringBuilder(loose ? "LOOSE_DISJOINT(" : "DISJOINT("); sb.append(expression1.toString()); sb.append(","); sb.append(expression2.toString()); sb.append(")"); return sb.toString(); } @Override public byte[] toBinary() { final byte[] superBinary = super.toBinary(); final ByteBuffer buffer = ByteBuffer.allocate(1 + superBinary.length); buffer.put(loose ? (byte) 1 : (byte) 0); buffer.put(superBinary); return buffer.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); loose = buffer.get() != 0; final byte[] superBinary = new byte[buffer.remaining()]; buffer.get(superBinary); super.fromBinary(superBinary); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/FilterGeometry.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.jts.geom.Geometry; /** * Interface for geometries within filter expressions. This is primarily to avoid having to check * for prepared vs non-prepared geometries throughout the expression implementations. */ public interface FilterGeometry extends Persistable { /** * @return the raw geometry */ public Geometry getGeometry(); /** * Check to see if this geometry intersects the provided geometry. * * @param other the geometry to test against * @return {@code true} if the geometries intersect */ boolean intersects(FilterGeometry other); /** * Check to see if this geometry is disjoint from the provided geometry. * * @param other the geometry to test against * @return {@code true} if the geometries are disjoint */ boolean disjoint(FilterGeometry other); /** * Check to see if this geometry crosses the provided geometry. * * @param other the geometry to test against * @return {@code true} if this geometry crosses the provided geometry */ boolean crosses(FilterGeometry other); /** * Check to see if this geometry overlaps the provided geometry. * * @param other the geometry to test against * @return {@code true} if the geometries overlap */ boolean overlaps(FilterGeometry other); /** * Check to see if this geometry touches the provided geometry. * * @param other the geometry to test against * @return {@code true} if the geometries touch */ boolean touches(FilterGeometry other); /** * Check to see if this geometry is within the provided geometry. * * @param other the geomtery to test against * @return {@code true} if this geometry is within the provided geometry */ boolean within(FilterGeometry other); /** * Check to see if this geometry contains the provided geometry. * * @param other the geomtery to test against * @return {@code true} if this geometry contains the provided geometry */ boolean contains(FilterGeometry other); /** * Check to see if this geometry is topologically equal to the provided geometry. * * @param other the geomtery to test against * @return {@code true} if this geometry is topologically equal to the provided geometry */ boolean isEqualTo(FilterGeometry other); } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/Intersects.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial; import java.nio.ByteBuffer; /** * Predicate that passes when the first operand intersects the second operand. */ public class Intersects extends BinarySpatialPredicate { protected boolean loose; public Intersects() {} public Intersects( final SpatialExpression expression1, final SpatialExpression expression2, final boolean loose) { super(expression1, expression2); this.loose = loose; } @Override public boolean evaluateInternal(final FilterGeometry value1, final FilterGeometry value2) { if (loose) { return value1.getGeometry().getEnvelopeInternal().intersects( value2.getGeometry().getEnvelopeInternal()); } return value1.intersects(value2); } @Override protected boolean isExact() { return isLoose(); } public boolean isLoose() { return loose; } @Override public String toString() { final StringBuilder sb = new StringBuilder(loose ? "LOOSE_INTERSECTS(" : "INTERSECTS("); sb.append(expression1.toString()); sb.append(","); sb.append(expression2.toString()); sb.append(")"); return sb.toString(); } @Override public byte[] toBinary() { final byte[] superBinary = super.toBinary(); final ByteBuffer buffer = ByteBuffer.allocate(1 + superBinary.length); buffer.put(loose ? (byte) 1 : (byte) 0); buffer.put(superBinary); return buffer.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); loose = buffer.get() != 0; final byte[] superBinary = new byte[buffer.remaining()]; buffer.get(superBinary); super.fromBinary(superBinary); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/Overlaps.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial; /** * Predicate that passes when the first operand overlaps the second operand. */ public class Overlaps extends BinarySpatialPredicate { public Overlaps() {} public Overlaps(final SpatialExpression expression1, final SpatialExpression expression2) { super(expression1, expression2); } @Override public boolean evaluateInternal(final FilterGeometry value1, final FilterGeometry value2) { return value1.overlaps(value2); } @Override protected boolean isExact() { return false; } @Override public String toString() { final StringBuilder sb = new StringBuilder("OVERLAPS("); sb.append(expression1.toString()); sb.append(","); sb.append(expression2.toString()); sb.append(")"); return sb.toString(); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/PreparedFilterGeometry.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.prep.PreparedGeometry; /** * A {@link FilterGeometry} implementation for prepared geometries. */ public class PreparedFilterGeometry implements FilterGeometry { private PreparedGeometry geometry; public PreparedFilterGeometry() {} public PreparedFilterGeometry(final PreparedGeometry geometry) { this.geometry = geometry; } @Override public Geometry getGeometry() { return geometry.getGeometry(); } @Override public boolean intersects(final FilterGeometry other) { return geometry.intersects(other.getGeometry()); } @Override public boolean disjoint(final FilterGeometry other) { return geometry.disjoint(other.getGeometry()); } @Override public boolean crosses(final FilterGeometry other) { return geometry.crosses(other.getGeometry()); } @Override public boolean overlaps(final FilterGeometry other) { return geometry.overlaps(other.getGeometry()); } @Override public boolean touches(final FilterGeometry other) { return geometry.touches(other.getGeometry()); } @Override public boolean within(final FilterGeometry other) { return geometry.within(other.getGeometry()); } @Override public boolean contains(final FilterGeometry other) { return geometry.contains(other.getGeometry()); } @Override public boolean isEqualTo(final FilterGeometry other) { return geometry.getGeometry().equalsTopo(other.getGeometry()); } @Override public byte[] toBinary() { return GeometryUtils.geometryToBinary(getGeometry(), null); } @Override public void fromBinary(final byte[] bytes) { final Geometry unprepared = GeometryUtils.geometryFromBinary(bytes, null); geometry = GeometryUtils.PREPARED_GEOMETRY_FACTORY.create(unprepared); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/SpatialContains.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial; /** * Predicate that passes when the first operand contains the second operand. */ public class SpatialContains extends BinarySpatialPredicate { public SpatialContains() {} public SpatialContains(final SpatialExpression expression1, final SpatialExpression expression2) { super(expression1, expression2); } @Override public boolean evaluateInternal(final FilterGeometry value1, final FilterGeometry value2) { return value1.contains(value2); } @Override protected boolean isExact() { return false; } @Override public String toString() { final StringBuilder sb = new StringBuilder("CONTAINS("); sb.append(expression1.toString()); sb.append(","); sb.append(expression2.toString()); sb.append(")"); return sb.toString(); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/SpatialEqualTo.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial; import java.util.Map; import org.locationtech.geowave.core.store.api.DataTypeAdapter; /** * Predicate that passes when the first operand is topologically equal to the second operand. */ public class SpatialEqualTo extends BinarySpatialPredicate { public SpatialEqualTo() {} public SpatialEqualTo(final SpatialExpression expr1, final SpatialExpression expr2) { super(expr1, expr2); } @Override public boolean evaluate(final Map fieldValues) { final Object value1 = getExpression1().evaluateValue(fieldValues); final Object value2 = getExpression2().evaluateValue(fieldValues); if (value1 == null) { return value2 == null; } if (value2 == null) { return false; } return evaluateInternal((FilterGeometry) value1, (FilterGeometry) value2); } @Override public boolean evaluate(final DataTypeAdapter adapter, final T entry) { final Object value1 = getExpression1().evaluateValue(adapter, entry); final Object value2 = getExpression2().evaluateValue(adapter, entry); if (value1 == null) { return value2 == null; } if (value2 == null) { return false; } return evaluateInternal((FilterGeometry) value1, (FilterGeometry) value2); } @Override protected boolean isExact() { return false; } @Override protected boolean evaluateInternal(final FilterGeometry value1, final FilterGeometry value2) { return value1.isEqualTo(value2); } @Override public String toString() { final StringBuilder sb = new StringBuilder("EQUALS("); sb.append(expression1.toString()); sb.append(","); sb.append(expression2.toString()); sb.append(")"); return sb.toString(); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/SpatialExpression.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.query.filter.expression.Expression; import org.locationtech.geowave.core.store.query.filter.expression.Predicate; import org.opengis.referencing.crs.CoordinateReferenceSystem; /** * Interface for expressions that resolve to spatial geometry objects. */ public interface SpatialExpression extends Expression { /** * Get the coordinate reference system for this expression. In cases where a field value geometry * is not indexed, the CRS will be derived from the field descriptor of the adapter. * * @param adapter the adapter being filtered * @return the coordinate reference system of this expression */ CoordinateReferenceSystem getCRS(final DataTypeAdapter adapter); /** * Create a predicate that tests this expression against the provided bounding box. * * @param minX the minimum X value * @param minY the minimum Y value * @param maxX the maximum X value * @param maxY the maximum Y value * @return the bounding box predicate */ default Predicate bbox( final double minX, final double minY, final double maxX, final double maxY) { return new BBox(this, minX, minY, maxX, maxY, false); } /** * Create a predicate that tests this expression against the provided bounding box in the given * coordinate reference system. * * @param minX the minimum X value * @param minY the minimum Y value * @param maxX the maximum X value * @param maxY the maximum Y value * @param crs the coordinate reference system of the bounding box * @return the bounding box predicate */ default Predicate bbox( final double minX, final double minY, final double maxX, final double maxY, final CoordinateReferenceSystem crs) { return new BBox(this, minX, minY, maxX, maxY, crs, false); } /** * Create a predicate that loosely tests this expression against the provided bounding box. This * is meant to be a faster implementation for situations where exact accuracy is not needed. * * @param minX the minimum X value * @param minY the minimum Y value * @param maxX the maximum X value * @param maxY the maximum Y value * @return the bounding box predicate */ default Predicate bboxLoose( final double minX, final double minY, final double maxX, final double maxY) { return new BBox(this, minX, minY, maxX, maxY, true); } /** * Create a predicate that loosely tests this expression against the provided bounding box in the * given coordinate reference system. This is meant to be a faster implementation for situations * where exact accuracy is not needed. * * @param minX the minimum X value * @param minY the minimum Y value * @param maxX the maximum X value * @param maxY the maximum Y value * @param crs the coordinate reference system of the bounding box * @return the bounding box predicate */ default Predicate bboxLoose( final double minX, final double minY, final double maxX, final double maxY, final CoordinateReferenceSystem crs) { return new BBox(this, minX, minY, maxX, maxY, crs, true); } /** * Create a predicate that tests to see if this expression intersects the provided spatial object. * The operand can be either another spatial expression, or any object that can be converted to a * spatial literal. * * @param other the spatial object to test against * @return the intersection predicate */ default Predicate intersects(final Object other) { return new Intersects(this, toSpatialExpression(other), false); } /** * Create a predicate that tests to see if this expression intersects the provided spatial object. * This is meant to be a faster implementation for situations where accuracy is not needed. The * operand can be either another spatial expression, or any object that can be converted to a * spatial literal. * * @param other the spatial object to test against * @return the intersection predicate */ default Predicate intersectsLoose(final Object other) { return new Intersects(this, toSpatialExpression(other), true); } /** * Create a predicate that tests to see if this expression is disjoint to the provided spatial * object. The operand can be either another spatial expression, or any object that can be * converted to a spatial literal. * * @param other the spatial object to test against * @return the disjoint predicate */ default Predicate disjoint(final Object other) { return new Disjoint(this, toSpatialExpression(other), false); } /** * Create a predicate that tests to see if this expression is disjoint to the provided spatial * object. This is meant to be a faster implementation for situations where accuracy is not * needed. The operand can be either another spatial expression, or any object that can be * converted to a spatial literal. * * @param other the spatial object to test against * @return the disjoint predicate */ default Predicate disjointLoose(final Object other) { return new Disjoint(this, toSpatialExpression(other), true); } /** * Create a predicate that tests to see if this expression contains the provided spatial object. * The operand can be either another spatial expression, or any object that can be converted to a * spatial literal. * * @param other the spatial object to test against * @return the contains predicate */ default Predicate contains(final Object other) { return new SpatialContains(this, toSpatialExpression(other)); } /** * Create a predicate that tests to see if this expression is within the provided spatial object. * The operand can be either another spatial expression, or any object that can be converted to a * spatial literal. * * @param other the spatial object to test against * @return the within predicate */ default Predicate within(final Object other) { return new Within(this, toSpatialExpression(other)); } /** * Create a predicate that tests to see if this expression touches the provided spatial object. * The operand can be either another spatial expression, or any object that can be converted to a * spatial literal. * * @param other the spatial object to test against * @return the touches predicate */ default Predicate touches(final Object other) { return new Touches(this, toSpatialExpression(other)); } /** * Create a predicate that tests to see if this expression crosses the provided spatial object. * The operand can be either another spatial expression, or any object that can be converted to a * spatial literal. * * @param other the spatial object to test against * @return the crosses predicate */ default Predicate crosses(final Object other) { return new Crosses(this, toSpatialExpression(other)); } /** * Create a predicate that tests to see if this expression overlaps the provided spatial object. * The operand can be either another spatial expression, or any object that can be converted to a * spatial literal. * * @param other the spatial object to test against * @return the overlaps predicate */ default Predicate overlaps(final Object other) { return new Overlaps(this, toSpatialExpression(other)); } /** * Create a predicate that tests to see if this expression is topologically equal to the provided * spatial object. The operand can be either another spatial expression, or any object that can be * converted to a spatial literal. * * @param other the spatial object to test against * @return the equals predicate */ @Override default Predicate isEqualTo(final Object other) { return new SpatialEqualTo(this, toSpatialExpression(other)); } /** * Create a predicate that tests to see if this expression is not topologically equal to the * provided spatial object. The operand can be either another spatial expression, or any object * that can be converted to a spatial literal. * * @param other the spatial object to test against * @return the not equals predicate */ @Override default Predicate isNotEqualTo(final Object other) { return new SpatialNotEqualTo(this, toSpatialExpression(other)); } /** * Convert the given object into a spatial expression, if it is not already one. * * @param obj the object to convert * @return the spatial expression */ public static SpatialExpression toSpatialExpression(final Object obj) { if (obj instanceof SpatialExpression) { return (SpatialExpression) obj; } return SpatialLiteral.of(obj); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/SpatialFieldValue.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial; import org.locationtech.geowave.core.geotime.adapter.SpatialFieldDescriptor; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.query.filter.expression.FieldValue; import org.locationtech.jts.geom.Geometry; import org.opengis.referencing.crs.CoordinateReferenceSystem; /** * A field value implementation for spatial adapter fields. */ public class SpatialFieldValue extends FieldValue implements SpatialExpression { public SpatialFieldValue() {} public SpatialFieldValue(final String fieldName) { super(fieldName); } @Override public CoordinateReferenceSystem getCRS(final DataTypeAdapter adapter) { final FieldDescriptor fieldDescriptor = adapter.getFieldDescriptor(fieldName); if ((fieldDescriptor != null) && (fieldDescriptor instanceof SpatialFieldDescriptor)) { return ((SpatialFieldDescriptor) fieldDescriptor).crs(); } return GeometryUtils.getDefaultCRS(); } public static SpatialFieldValue of(final String fieldName) { return new SpatialFieldValue(fieldName); } @Override protected FilterGeometry evaluateValueInternal(final Object value) { return new UnpreparedFilterGeometry((Geometry) value); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/SpatialLiteral.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial; import java.nio.ByteBuffer; import org.geotools.geometry.jts.ReferencedEnvelope; import org.geotools.referencing.CRS; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.query.filter.expression.Expression; import org.locationtech.geowave.core.store.query.filter.expression.InvalidFilterException; import org.locationtech.geowave.core.store.query.filter.expression.Literal; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.io.ParseException; import org.locationtech.jts.io.WKTReader; import org.opengis.referencing.FactoryException; import org.opengis.referencing.crs.CoordinateReferenceSystem; /** * A spatial implementation of literal, representing spatial (geometric) literal objects. */ public class SpatialLiteral extends Literal implements SpatialExpression { private CoordinateReferenceSystem crs; public SpatialLiteral() {} public SpatialLiteral(final FilterGeometry literal) { super(literal); crs = GeometryUtils.getDefaultCRS(); } public SpatialLiteral(final FilterGeometry literal, final CoordinateReferenceSystem crs) { super(literal); this.crs = crs; } @Override public CoordinateReferenceSystem getCRS(final DataTypeAdapter adapter) { return crs; } /** * Prepare this literal by converting it to the provided coordinate reference system and preparing * the geometry. * * @param targetCRS the target coordinate reference system of the geometry */ public void prepare(final CoordinateReferenceSystem targetCRS) { if ((literal != null) && (literal instanceof UnpreparedFilterGeometry)) { try { final Geometry transformed = GeometryUtils.crsTransform( literal.getGeometry(), CRS.findMathTransform(crs, targetCRS)); literal = new PreparedFilterGeometry(GeometryUtils.PREPARED_GEOMETRY_FACTORY.create(transformed)); crs = targetCRS; } catch (final FactoryException e) { throw new RuntimeException("Unable to transform spatial literal", e); } } } private static FilterGeometry toGeometry(final Object literal) { final Geometry geometry; if (literal == null) { return null; } if (literal instanceof Geometry) { geometry = (Geometry) literal; } else if (literal instanceof Envelope) { geometry = GeometryUtils.GEOMETRY_FACTORY.toGeometry((Envelope) literal); } else if (literal instanceof String) { try { geometry = new WKTReader().read((String) literal); } catch (ParseException e) { throw new InvalidFilterException("Unable to parse well-known text geometry", e); } } else { throw new InvalidFilterException("Invalid spatial literal: " + literal.getClass().getName()); } return new UnpreparedFilterGeometry(geometry); } public static SpatialLiteral of(final Object literal) { final CoordinateReferenceSystem crs; if (literal instanceof ReferencedEnvelope) { crs = ((ReferencedEnvelope) literal).getCoordinateReferenceSystem(); } else { crs = GeometryUtils.getDefaultCRS(); } return of(literal, crs); } public static SpatialLiteral of(Object literal, final CoordinateReferenceSystem crs) { if (literal == null) { return new SpatialLiteral(null); } if (literal instanceof SpatialLiteral) { return (SpatialLiteral) literal; } if (literal instanceof Expression && ((Expression) literal).isLiteral()) { literal = ((Expression) literal).evaluateValue(null); } return new SpatialLiteral(toGeometry(literal), crs); } @Override public String toString() { return literal.getGeometry().toText(); } @Override public byte[] toBinary() { if (literal == null) { return new byte[] {(byte) 0}; } final byte[] crsBytes = StringUtils.stringToBinary(crs.toWKT()); final byte[] geometryBytes = PersistenceUtils.toBinary(literal); final ByteBuffer buffer = ByteBuffer.allocate( 1 + VarintUtils.unsignedIntByteLength(crsBytes.length) + VarintUtils.unsignedIntByteLength(geometryBytes.length) + crsBytes.length + geometryBytes.length); buffer.put((byte) 1); VarintUtils.writeUnsignedInt(crsBytes.length, buffer); buffer.put(crsBytes); VarintUtils.writeUnsignedInt(geometryBytes.length, buffer); buffer.put(geometryBytes); return buffer.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); final byte nullByte = buffer.get(); if (nullByte == 0) { literal = null; return; } final byte[] crsBytes = new byte[VarintUtils.readUnsignedInt(buffer)]; buffer.get(crsBytes); final byte[] geometryBytes = new byte[VarintUtils.readUnsignedInt(buffer)]; buffer.get(geometryBytes); try { crs = CRS.parseWKT(StringUtils.stringFromBinary(crsBytes)); } catch (final FactoryException e) { throw new RuntimeException("Unable to parse CRS from spatial literal."); } literal = (FilterGeometry) PersistenceUtils.fromBinary(geometryBytes); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/SpatialNotEqualTo.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial; import java.util.Map; import java.util.Set; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.query.filter.expression.FilterConstraints; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; /** * Predicate that passes when the first operand is not topologically equal to the second operand. */ public class SpatialNotEqualTo extends BinarySpatialPredicate { public SpatialNotEqualTo() {} public SpatialNotEqualTo(final SpatialExpression expr1, final SpatialExpression expr2) { super(expr1, expr2); } @Override public boolean evaluate(final Map fieldValues) { final Object value1 = getExpression1().evaluateValue(fieldValues); final Object value2 = getExpression2().evaluateValue(fieldValues); if (value1 == null) { return value2 != null; } if (value2 == null) { return true; } return evaluateInternal((FilterGeometry) value1, (FilterGeometry) value2); } @Override public boolean evaluate(final DataTypeAdapter adapter, final T entry) { final Object value1 = getExpression1().evaluateValue(adapter, entry); final Object value2 = getExpression2().evaluateValue(adapter, entry); if (value1 == null) { return value2 != null; } if (value2 == null) { return true; } return evaluateInternal((FilterGeometry) value1, (FilterGeometry) value2); } public > FilterConstraints getConstraints( final Class constraintClass, final DataStatisticsStore statsStore, final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index, final Set indexedFields) { return FilterConstraints.empty(); } @Override protected boolean isExact() { return false; } @Override protected boolean evaluateInternal(final FilterGeometry value1, final FilterGeometry value2) { return !value1.isEqualTo(value2); } @Override public String toString() { final StringBuilder sb = new StringBuilder("NOT_EQUALS("); sb.append(expression1.toString()); sb.append(","); sb.append(expression2.toString()); sb.append(")"); return sb.toString(); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/TextToSpatialExpression.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial; import java.util.Map; import java.util.Set; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.query.filter.expression.text.TextExpression; import org.locationtech.jts.io.ParseException; import org.locationtech.jts.io.WKTReader; import org.opengis.referencing.crs.CoordinateReferenceSystem; public class TextToSpatialExpression implements SpatialExpression { private TextExpression baseExpression; private WKTReader wktReader = new WKTReader(); public TextToSpatialExpression() {} public TextToSpatialExpression(final TextExpression baseExpression) { this.baseExpression = baseExpression; } @Override public FilterGeometry evaluateValue(Map fieldValues) { return evaluateInternal(baseExpression.evaluateValue(fieldValues)); } @Override public FilterGeometry evaluateValue(DataTypeAdapter adapter, T entry) { return evaluateInternal(baseExpression.evaluateValue(adapter, entry)); } private FilterGeometry evaluateInternal(final String value) { if (value != null) { try { return new UnpreparedFilterGeometry(wktReader.read(value)); } catch (ParseException e) { throw new RuntimeException("Unable to cast text expression to geometry: " + value); } } return null; } @Override public boolean isLiteral() { return baseExpression.isLiteral(); } @Override public void addReferencedFields(Set fields) { baseExpression.addReferencedFields(fields); } @Override public byte[] toBinary() { return PersistenceUtils.toBinary(baseExpression); } @Override public void fromBinary(byte[] bytes) { baseExpression = (TextExpression) PersistenceUtils.fromBinary(bytes); } @Override public CoordinateReferenceSystem getCRS(DataTypeAdapter adapter) { return GeometryUtils.getDefaultCRS(); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/Touches.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial; /** * Predicate that passes when the first operand touches the second operand. */ public class Touches extends BinarySpatialPredicate { public Touches() {} public Touches(final SpatialExpression expression1, final SpatialExpression expression2) { super(expression1, expression2); } @Override public boolean evaluateInternal(final FilterGeometry value1, final FilterGeometry value2) { return value1.touches(value2); } @Override protected boolean isExact() { return false; } @Override public String toString() { final StringBuilder sb = new StringBuilder("TOUCHES("); sb.append(expression1.toString()); sb.append(","); sb.append(expression2.toString()); sb.append(")"); return sb.toString(); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/UnpreparedFilterGeometry.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.jts.geom.Geometry; /** * A {@link FilterGeometry} implementation for unprepared geometries. It attempts to optimize the * spatial operation by utilizing the other operand if it is a prepared geometry. */ public class UnpreparedFilterGeometry implements FilterGeometry { private Geometry geometry; public UnpreparedFilterGeometry() {} public UnpreparedFilterGeometry(final Geometry geometry) { this.geometry = geometry; } @Override public Geometry getGeometry() { return geometry; } @Override public boolean intersects(final FilterGeometry other) { if (other instanceof PreparedFilterGeometry) { return other.intersects(this); } return geometry.intersects(other.getGeometry()); } @Override public boolean disjoint(final FilterGeometry other) { if (other instanceof PreparedFilterGeometry) { return other.disjoint(this); } return geometry.disjoint(other.getGeometry()); } @Override public boolean crosses(final FilterGeometry other) { if (other instanceof PreparedFilterGeometry) { return other.crosses(this); } return geometry.crosses(other.getGeometry()); } @Override public boolean overlaps(final FilterGeometry other) { if (other instanceof PreparedFilterGeometry) { return other.overlaps(this); } return geometry.overlaps(other.getGeometry()); } @Override public boolean touches(final FilterGeometry other) { if (other instanceof PreparedFilterGeometry) { return other.touches(this); } return geometry.touches(other.getGeometry()); } @Override public boolean within(final FilterGeometry other) { if (other instanceof PreparedFilterGeometry) { // contains is the inverse of within return other.contains(this); } return geometry.within(other.getGeometry()); } @Override public boolean contains(final FilterGeometry other) { if (other instanceof PreparedFilterGeometry) { // within is the inverse of contains return other.within(this); } return geometry.contains(other.getGeometry()); } @Override public boolean isEqualTo(final FilterGeometry other) { return geometry.equalsTopo(other.getGeometry()); } @Override public byte[] toBinary() { return GeometryUtils.geometryToBinary(geometry, null); } @Override public void fromBinary(final byte[] bytes) { geometry = GeometryUtils.geometryFromBinary(bytes, null); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/spatial/Within.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial; /** * Predicate that passes when the first operand is within the second operand. */ public class Within extends BinarySpatialPredicate { public Within() {} public Within(final SpatialExpression expression1, final SpatialExpression expression2) { super(expression1, expression2); } @Override public boolean evaluateInternal(final FilterGeometry value1, final FilterGeometry value2) { return value1.within(value2); } @Override protected boolean isExact() { return false; } @Override public String toString() { final StringBuilder sb = new StringBuilder("WITHIN("); sb.append(expression1.toString()); sb.append(","); sb.append(expression2.toString()); sb.append(")"); return sb.toString(); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/temporal/After.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal; import org.locationtech.geowave.core.geotime.util.TimeUtils; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldConstraints; import org.threeten.extra.Interval; /** * Predicate that passes when the first operand takes place after the second operand. */ public class After extends BinaryTemporalPredicate { public After() {} public After(final TemporalExpression expression1, final TemporalExpression expression2) { super(expression1, expression2); } @Override public boolean evaluateInternal(final Interval value1, final Interval value2) { if ((value1 == null) || (value2 == null)) { return false; } return value1.getStart().compareTo(TimeUtils.getIntervalEnd(value2)) >= 0; } @Override public String toString() { final StringBuilder sb = new StringBuilder(expression1.toString()); sb.append(" AFTER "); sb.append(expression2.toString()); return sb.toString(); } @Override public NumericFieldConstraints getConstraints( final Interval literal, final Double minValue, final Double maxValue, final boolean reversed, final boolean exact) { if (reversed) { return NumericFieldConstraints.of( minValue, (double) literal.getStart().toEpochMilli(), true, false, exact); } return NumericFieldConstraints.of( (double) TimeUtils.getIntervalEnd(literal).toEpochMilli(), maxValue, false, true, exact); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/temporal/Before.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal; import org.locationtech.geowave.core.geotime.util.TimeUtils; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldConstraints; import org.threeten.extra.Interval; /** * Predicate that passes when the first operand takes place before the second operand. */ public class Before extends BinaryTemporalPredicate { public Before() {} public Before(final TemporalExpression expression1, final TemporalExpression expression2) { super(expression1, expression2); } @Override public boolean evaluateInternal(final Interval value1, final Interval value2) { if ((value1 == null) || (value2 == null)) { return false; } return TimeUtils.getIntervalEnd(value1).compareTo(value2.getStart()) <= 0; } @Override public String toString() { final StringBuilder sb = new StringBuilder(expression1.toString()); sb.append(" BEFORE "); sb.append(expression2.toString()); return sb.toString(); } @Override public NumericFieldConstraints getConstraints( final Interval literal, final Double minValue, final Double maxValue, final boolean reversed, final boolean exact) { if (reversed) { return NumericFieldConstraints.of( (double) TimeUtils.getIntervalEnd(literal).toEpochMilli(), maxValue, false, true, exact); } return NumericFieldConstraints.of( minValue, (double) literal.getStart().toEpochMilli(), true, false, exact); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/temporal/BeforeOrDuring.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal; import org.locationtech.geowave.core.geotime.util.TimeUtils; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldConstraints; import org.threeten.extra.Interval; /** * Predicate that passes when the first operand takes place before or during the second operand. */ public class BeforeOrDuring extends BinaryTemporalPredicate { public BeforeOrDuring() {} public BeforeOrDuring( final TemporalExpression expression1, final TemporalExpression expression2) { super(expression1, expression2); } @Override public boolean evaluateInternal(final Interval value1, final Interval value2) { if ((value1 == null) || (value2 == null)) { return false; } return TimeUtils.getIntervalEnd(value1).compareTo(TimeUtils.getIntervalEnd(value2)) <= 0; } @Override public String toString() { final StringBuilder sb = new StringBuilder(expression1.toString()); sb.append(" BEFORE OR DURING "); sb.append(expression2.toString()); return sb.toString(); } @Override public NumericFieldConstraints getConstraints( final Interval literal, final Double minValue, final Double maxValue, final boolean reversed, final boolean exact) { if (reversed) { return NumericFieldConstraints.of( (double) literal.getStart().toEpochMilli(), maxValue, true, true, exact); } return NumericFieldConstraints.of( minValue, (double) TimeUtils.getIntervalEnd(literal).toEpochMilli(), true, false, exact); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/temporal/BinaryTemporalPredicate.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal; import java.util.Map; import java.util.Set; import org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic; import org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic.TimeRangeValue; import org.locationtech.geowave.core.geotime.util.TimeUtils; import org.locationtech.geowave.core.index.simple.SimpleNumericIndexStrategy; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.IndexFieldMapper; import org.locationtech.geowave.core.store.query.filter.expression.BinaryPredicate; import org.locationtech.geowave.core.store.query.filter.expression.FieldValue; import org.locationtech.geowave.core.store.query.filter.expression.FilterConstraints; import org.locationtech.geowave.core.store.query.filter.expression.IndexFieldConstraints; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldConstraints; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.locationtech.geowave.core.store.statistics.InternalStatisticsHelper; import org.threeten.extra.Interval; import com.google.common.collect.Sets; /** * Abstract class for comparing two temporal expressions. */ public abstract class BinaryTemporalPredicate extends BinaryPredicate { public BinaryTemporalPredicate() {} public BinaryTemporalPredicate( final TemporalExpression expression1, final TemporalExpression expression2) { super(expression1, expression2); } @Override public void prepare( final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index) { if (expression1.isLiteral() && !(expression1 instanceof TemporalLiteral)) { expression1 = TemporalLiteral.of(expression1.evaluateValue(null)); } if (expression2.isLiteral() && !(expression2 instanceof TemporalLiteral)) { expression2 = TemporalLiteral.of(expression2.evaluateValue(null)); } } @Override public boolean evaluate(final Map fieldValues) { final Interval value1 = TimeUtils.getInterval(expression1.evaluateValue(fieldValues)); final Interval value2 = TimeUtils.getInterval(expression2.evaluateValue(fieldValues)); return evaluateInternal(value1, value2); } @Override public boolean evaluate(final DataTypeAdapter adapter, final T entry) { final Interval value1 = TimeUtils.getInterval(expression1.evaluateValue(adapter, entry)); final Interval value2 = TimeUtils.getInterval(expression2.evaluateValue(adapter, entry)); return evaluateInternal(value1, value2); } protected abstract boolean evaluateInternal(final Interval value1, final Interval value2); @Override public Set getConstrainableFields() { if ((expression1 instanceof FieldValue) && expression2.isLiteral()) { return Sets.newHashSet(((FieldValue) expression1).getFieldName()); } else if ((expression2 instanceof FieldValue) && expression1.isLiteral()) { return Sets.newHashSet(((FieldValue) expression2).getFieldName()); } return Sets.newHashSet(); } private boolean isPartOfRange(final String fieldName, final AdapterToIndexMapping indexMapping) { for (final IndexFieldMapper mapper : indexMapping.getIndexFieldMappers()) { final String[] adapterFields = mapper.getAdapterFields(); for (int i = 0; i < adapterFields.length; i++) { if (adapterFields[i].equals(fieldName)) { return adapterFields.length > 1; } } } return false; } @SuppressWarnings("unchecked") @Override public > FilterConstraints getConstraints( final Class constraintClass, final DataStatisticsStore statsStore, final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index, final Set indexedFields) { if ((expression1 instanceof FieldValue) && indexedFields.contains(((FieldValue) expression1).getFieldName()) && expression2.isLiteral() && constraintClass.isAssignableFrom(Double.class)) { final Double minValue; final Double maxValue; if (index.getIndexStrategy() instanceof SimpleNumericIndexStrategy) { minValue = null; maxValue = null; } else { final TimeRangeValue timeRange = InternalStatisticsHelper.getFieldStatistic( statsStore, TimeRangeStatistic.STATS_TYPE, adapter.getTypeName(), ((FieldValue) expression1).getFieldName()); if (timeRange != null) { minValue = (double) timeRange.getMin(); maxValue = (double) timeRange.getMax(); } else { // We cannot determine the query range for the binned return FilterConstraints.empty(); } } String fieldName = ((FieldValue) expression1).getFieldName(); final boolean partOfRange = isPartOfRange(fieldName, indexMapping); final Interval literal = expression2.evaluateValue(null, null); if (literal != null) { return FilterConstraints.of( adapter, indexMapping, index, fieldName, (IndexFieldConstraints) getConstraints( literal, minValue, maxValue, false, !partOfRange && index.getIndexStrategy() instanceof SimpleNumericIndexStrategy)); } } else if ((expression2 instanceof FieldValue) && indexedFields.contains(((FieldValue) expression2).getFieldName()) && expression1.isLiteral() && constraintClass.isAssignableFrom(Double.class)) { final Double minValue; final Double maxValue; if (index.getIndexStrategy() instanceof SimpleNumericIndexStrategy) { minValue = null; maxValue = null; } else { final TimeRangeValue timeRange = InternalStatisticsHelper.getFieldStatistic( statsStore, TimeRangeStatistic.STATS_TYPE, adapter.getTypeName(), ((FieldValue) expression2).getFieldName()); if (timeRange != null) { minValue = (double) timeRange.getMin(); maxValue = (double) timeRange.getMax(); } else { // We cannot determine the query range for the binned return FilterConstraints.empty(); } } String fieldName = ((FieldValue) expression2).getFieldName(); final boolean partOfRange = isPartOfRange(fieldName, indexMapping); final Interval literal = expression1.evaluateValue(null, null); if (literal != null) { return FilterConstraints.of( adapter, indexMapping, index, fieldName, (IndexFieldConstraints) getConstraints( literal, minValue, maxValue, true, !partOfRange && index.getIndexStrategy() instanceof SimpleNumericIndexStrategy)); } } return FilterConstraints.empty(); } protected abstract NumericFieldConstraints getConstraints( final Interval literal, final Double minRange, final Double maxRange, final boolean reversed, final boolean exact); } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/temporal/During.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal; import org.locationtech.geowave.core.geotime.util.TimeUtils; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldConstraints; import org.threeten.extra.Interval; /** * Predicate that passes when the first operand takes place during the second operand. */ public class During extends BinaryTemporalPredicate { public During() {} public During(final TemporalExpression expression1, final TemporalExpression expression2) { super(expression1, expression2); } @Override public boolean evaluateInternal(final Interval value1, final Interval value2) { if ((value1 == null) || (value2 == null)) { return false; } return value1.getStart().compareTo(value2.getStart()) >= 0 && TimeUtils.getIntervalEnd(value1).compareTo(TimeUtils.getIntervalEnd(value2)) <= 0; } @Override public String toString() { final StringBuilder sb = new StringBuilder(expression1.toString()); sb.append(" DURING "); sb.append(expression2.toString()); return sb.toString(); } @Override public NumericFieldConstraints getConstraints( final Interval literal, final Double minValue, final Double maxValue, final boolean reversed, final boolean exact) { return NumericFieldConstraints.of( (double) literal.getStart().toEpochMilli(), (double) TimeUtils.getIntervalEnd(literal).toEpochMilli(), true, false, exact); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/temporal/DuringOrAfter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal; import org.locationtech.geowave.core.geotime.util.TimeUtils; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldConstraints; import org.threeten.extra.Interval; /** * Predicate that passes when the first operand takes place during or after the second operand. */ public class DuringOrAfter extends BinaryTemporalPredicate { public DuringOrAfter() {} public DuringOrAfter(final TemporalExpression expression1, final TemporalExpression expression2) { super(expression1, expression2); } @Override public boolean evaluateInternal(final Interval value1, final Interval value2) { if ((value1 == null) || (value2 == null)) { return false; } return value1.getStart().compareTo(value2.getStart()) >= 0; } @Override public String toString() { final StringBuilder sb = new StringBuilder(expression1.toString()); sb.append(" DURING OR AFTER "); sb.append(expression2.toString()); return sb.toString(); } @Override public NumericFieldConstraints getConstraints( final Interval literal, final Double minValue, final Double maxValue, final boolean reversed, final boolean exact) { if (reversed) { return NumericFieldConstraints.of( minValue, (double) TimeUtils.getIntervalEnd(literal).toEpochMilli(), true, false, exact); } return NumericFieldConstraints.of( (double) literal.getStart().toEpochMilli(), maxValue, true, true, exact); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/temporal/TemporalBetween.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal; import java.util.List; import java.util.Map; import java.util.Set; import org.locationtech.geowave.core.geotime.util.TimeUtils; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.index.simple.SimpleNumericIndexStrategy; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.query.filter.expression.FieldValue; import org.locationtech.geowave.core.store.query.filter.expression.Filter; import org.locationtech.geowave.core.store.query.filter.expression.FilterConstraints; import org.locationtech.geowave.core.store.query.filter.expression.IndexFieldConstraints; import org.locationtech.geowave.core.store.query.filter.expression.Predicate; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldConstraints; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.threeten.extra.Interval; import com.google.common.collect.Sets; /** * Predicate that passes when the first operand is between the provided lower and upper bound * operands. If the lower bound is a time range, the start value of the bound is used. If the upper * bound is a time range, the end value of the bound is used. */ public class TemporalBetween implements Predicate { private TemporalExpression valueExpr; private TemporalExpression lowerBoundExpr; private TemporalExpression upperBoundExpr; public TemporalBetween() {} public TemporalBetween( final TemporalExpression value, final TemporalExpression lowerBound, final TemporalExpression upperBound) { valueExpr = value; lowerBoundExpr = lowerBound; upperBoundExpr = upperBound; } public TemporalExpression getValue() { return valueExpr; } public TemporalExpression getLowerBound() { return lowerBoundExpr; } public TemporalExpression getUpperBound() { return upperBoundExpr; } @Override public void prepare( final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index) { if (valueExpr.isLiteral() && !(valueExpr instanceof TemporalLiteral)) { valueExpr = TemporalLiteral.of(valueExpr.evaluateValue(null)); } if (lowerBoundExpr.isLiteral() && !(lowerBoundExpr instanceof TemporalLiteral)) { lowerBoundExpr = TemporalLiteral.of(lowerBoundExpr.evaluateValue(null)); } if (upperBoundExpr.isLiteral() && !(upperBoundExpr instanceof TemporalLiteral)) { upperBoundExpr = TemporalLiteral.of(upperBoundExpr.evaluateValue(null)); } } @Override public void addReferencedFields(final Set fields) { if (valueExpr instanceof FieldValue) { fields.add(((FieldValue) valueExpr).getFieldName()); } if (lowerBoundExpr instanceof FieldValue) { fields.add(((FieldValue) lowerBoundExpr).getFieldName()); } if (upperBoundExpr instanceof FieldValue) { fields.add(((FieldValue) upperBoundExpr).getFieldName()); } } @Override public boolean evaluate(final Map fieldValues) { final Interval value = TimeUtils.getInterval(valueExpr.evaluateValue(fieldValues)); final Interval lowerBound = TimeUtils.getInterval(lowerBoundExpr.evaluateValue(fieldValues)); final Interval upperBound = TimeUtils.getInterval(upperBoundExpr.evaluateValue(fieldValues)); return evaluate(value, lowerBound, upperBound); } @Override public boolean evaluate(final DataTypeAdapter adapter, final T entry) { final Interval value = TimeUtils.getInterval(valueExpr.evaluateValue(adapter, entry)); final Interval lowerBound = TimeUtils.getInterval(lowerBoundExpr.evaluateValue(adapter, entry)); final Interval upperBound = TimeUtils.getInterval(upperBoundExpr.evaluateValue(adapter, entry)); return evaluate(value, lowerBound, upperBound); } private boolean evaluate( final Interval value, final Interval lowerBound, final Interval upperBound) { if ((value == null) || (lowerBound == null) || (upperBound == null)) { return false; } return value.getStart().compareTo(lowerBound.getStart()) >= 0 && TimeUtils.getIntervalEnd(value).compareTo(TimeUtils.getIntervalEnd(upperBound)) <= 0; } @Override public Filter removePredicatesForFields(Set fields) { final Set referencedFields = Sets.newHashSet(); valueExpr.addReferencedFields(referencedFields); lowerBoundExpr.addReferencedFields(referencedFields); upperBoundExpr.addReferencedFields(referencedFields); if (fields.containsAll(referencedFields)) { return null; } return this; } @Override public String toString() { final StringBuilder sb = new StringBuilder(valueExpr.toString()); sb.append(" BETWEEN "); sb.append(lowerBoundExpr.toString()); sb.append(" AND "); sb.append(upperBoundExpr.toString()); return sb.toString(); } @Override public byte[] toBinary() { return PersistenceUtils.toBinary(new Persistable[] {valueExpr, lowerBoundExpr, upperBoundExpr}); } @Override public void fromBinary(final byte[] bytes) { final List expressions = PersistenceUtils.fromBinaryAsList(bytes); valueExpr = (TemporalExpression) expressions.get(0); lowerBoundExpr = (TemporalExpression) expressions.get(1); upperBoundExpr = (TemporalExpression) expressions.get(2); } @Override public Set getConstrainableFields() { if ((valueExpr instanceof FieldValue) && lowerBoundExpr.isLiteral() && upperBoundExpr.isLiteral()) { return Sets.newHashSet(((FieldValue) valueExpr).getFieldName()); } return Sets.newHashSet(); } @SuppressWarnings("unchecked") @Override public > FilterConstraints getConstraints( final Class constraintClass, final DataStatisticsStore statsStore, final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index, final Set indexedFields) { if ((valueExpr instanceof FieldValue) && indexedFields.contains(((FieldValue) valueExpr).getFieldName()) && lowerBoundExpr.isLiteral() && upperBoundExpr.isLiteral() && constraintClass.isAssignableFrom(Double.class)) { final Interval lowerBound = lowerBoundExpr.evaluateValue(null, null); final Interval upperBound = upperBoundExpr.evaluateValue(null, null); if ((lowerBound != null) && (upperBound != null)) { return FilterConstraints.of( adapter, indexMapping, index, ((FieldValue) valueExpr).getFieldName(), (IndexFieldConstraints) NumericFieldConstraints.of( (double) lowerBound.getStart().toEpochMilli(), (double) TimeUtils.getIntervalEnd(upperBound).toEpochMilli(), true, false, index.getIndexStrategy() instanceof SimpleNumericIndexStrategy)); } } return FilterConstraints.empty(); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/temporal/TemporalEqualTo.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal; import org.locationtech.geowave.core.geotime.util.TimeUtils; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldConstraints; import org.threeten.extra.Interval; /** * Predicate that passes when the first operand is equal to the second operand. */ public class TemporalEqualTo extends BinaryTemporalPredicate { public TemporalEqualTo() {} public TemporalEqualTo( final TemporalExpression expression1, final TemporalExpression expression2) { super(expression1, expression2); } @Override public boolean evaluateInternal(final Interval value1, final Interval value2) { if (value1 == null) { return value2 == null; } else if (value2 == null) { return false; } return value1.getStart().compareTo(value2.getStart()) == 0 && TimeUtils.getIntervalEnd(value1).compareTo(TimeUtils.getIntervalEnd(value2)) == 0; } @Override public String toString() { final StringBuilder sb = new StringBuilder(expression1.toString()); sb.append(" = "); sb.append(expression2.toString()); return sb.toString(); } @Override public NumericFieldConstraints getConstraints( final Interval literal, final Double minValue, final Double maxValue, final boolean reversed, final boolean exact) { if (exact && literal.isEmpty()) { return NumericFieldConstraints.of( (double) literal.getStart().toEpochMilli(), (double) literal.getStart().toEpochMilli(), true, true, exact); } return NumericFieldConstraints.of( (double) literal.getStart().toEpochMilli(), (double) TimeUtils.getIntervalEnd(literal).toEpochMilli(), true, false, false); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/temporal/TemporalExpression.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; import org.locationtech.geowave.core.geotime.util.TimeUtils; import org.locationtech.geowave.core.store.query.filter.expression.ComparableExpression; import org.locationtech.geowave.core.store.query.filter.expression.FieldValue; import org.locationtech.geowave.core.store.query.filter.expression.Predicate; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldValue; import org.locationtech.geowave.core.store.query.filter.expression.text.TextFieldValue; import org.threeten.extra.Interval; /** * Interface for expressions that resolve to temporal objects. */ public interface TemporalExpression extends ComparableExpression { // SimpleDateFormat is not thread safe public static final ThreadLocal SUPPORTED_DATE_FORMATS = new ThreadLocal() { @Override protected SimpleDateFormat[] initialValue() { return new SimpleDateFormat[] { new SimpleDateFormat("yyyy-MM-dd HH:mm:ssZ"), new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'"), new SimpleDateFormat("yyyy-MM-dd")}; } }; /** * Create a predicate that tests to see if this expression is equal to the provided object. The * operand can be either another temporal expression, or any object that can be converted to a * temporal literal such as Date, Calendar, or Long. * * @param other the temporal object to test against * @return the equals predicate */ @Override default Predicate isEqualTo(final Object other) { return new TemporalEqualTo(this, toTemporalExpression(other)); } /** * Create a predicate that tests to see if this expression is not equal to the provided object. * The operand can be either another temporal expression, or any object that can be converted to a * temporal literal such as Date, Calendar, or Long. * * @param other the temporal object to test against * @return the not equals predicate */ @Override default Predicate isNotEqualTo(final Object other) { return new TemporalNotEqualTo(this, toTemporalExpression(other)); } /** * Create a predicate that tests to see if this expression is less than (before) the provided * object. The operand can be either another temporal expression, or any object that can be * converted to a temporal literal such as Date, Calendar, or Long. * * @param other the temporal object to test against * @return the less than predicate */ @Override default Predicate isLessThan(final Object other) { return isBefore(toTemporalExpression(other)); } /** * Create a predicate that tests to see if this expression is less than or equal to (before or * during) the provided object. The operand can be either another temporal expression, or any * object that can be converted to a temporal literal such as Date, Calendar, or Long. * * @param other the temporal object to test against * @return the less than or equal to predicate */ @Override default Predicate isLessThanOrEqualTo(final Object other) { return isBeforeOrDuring(toTemporalExpression(other)); } /** * Create a predicate that tests to see if this expression is greater than (after) the provided * object. The operand can be either another temporal expression, or any object that can be * converted to a temporal literal such as Date, Calendar, or Long. * * @param other the temporal object to test against * @return the greater than predicate */ @Override default Predicate isGreaterThan(final Object other) { return isAfter(toTemporalExpression(other)); } /** * Create a predicate that tests to see if this expression is greater than or equal to (during or * after) the provided object. The operand can be either another temporal expression, or any * object that can be converted to a temporal literal such as Date, Calendar, or Long. * * @param other the temporal object to test against * @return the greater than or equal to predicate */ @Override default Predicate isGreaterThanOrEqualTo(final Object other) { return isDuringOrAfter(toTemporalExpression(other)); } /** * Create a predicate that tests to see if this expression is between the provided lower and upper * bounds. The operands can be either temporal expressions, or any object that can be converted to * a temporal literal such as Date, Calendar, or Long. * * @param lowerBound the lower bound to test against * @param upperBound the upper bound to test against * @return the between predicate */ @Override default Predicate isBetween(final Object lowerBound, final Object upperBound) { return new TemporalBetween( this, toTemporalExpression(lowerBound), toTemporalExpression(upperBound)); } /** * Create a predicate that tests to see if this expression is after the provided object. The * operand can be either another temporal expression, or any object that can be converted to a * temporal literal such as Date, Calendar, or Long. * * @param other the temporal object to test against * @return the after predicate */ default Predicate isAfter(final Object other) { return new After(this, toTemporalExpression(other)); } /** * Create a predicate that tests to see if this expression is during or after to the provided * object. The operand can be either another temporal expression, or any object that can be * converted to a temporal literal such as Date, Calendar, or Long. * * @param other the temporal object to test against * @return the during or after predicate */ default Predicate isDuringOrAfter(final Object other) { return new DuringOrAfter(this, toTemporalExpression(other)); } /** * Create a predicate that tests to see if this expression is before to the provided object. The * operand can be either another temporal expression, or any object that can be converted to a * temporal literal such as Date, Calendar, or Long. * * @param other the temporal object to test against * @return the before predicate */ default Predicate isBefore(final Object other) { return new Before(this, toTemporalExpression(other)); } /** * Create a predicate that tests to see if this expression is before or during to the provided * object. The operand can be either another temporal expression, or any object that can be * converted to a temporal literal such as Date, Calendar, or Long. * * @param other the temporal object to test against * @return the before or during predicate */ default Predicate isBeforeOrDuring(final Object other) { return new BeforeOrDuring(this, toTemporalExpression(other)); } /** * Create a predicate that tests to see if this expression is during to the provided object. The * operand can be either another temporal expression, or any object that can be converted to a * temporal literal such as Date, Calendar, or Long. * * @param other the temporal object to test against * @return the equals predicate */ default Predicate isDuring(final Object other) { return new During(this, toTemporalExpression(other)); } /** * Create a predicate that tests to see if this expression contains the provided object. The * operand can be either another temporal expression, or any object that can be converted to a * temporal literal such as Date, Calendar, or Long. * * @param other the temporal object to test against * @return the contains predicate */ default Predicate contains(final Object other) { // this contains other if other is during this return new During(toTemporalExpression(other), this); } /** * Create a predicate that tests to see if this expression overlaps the provided object. The * operand can be either another temporal expression, or any object that can be converted to a * temporal literal such as Date, Calendar, or Long. * * @param other the temporal object to test against * @return the overlaps predicate */ default Predicate overlaps(final Object other) { return new TimeOverlaps(this, toTemporalExpression(other)); } /** * Convert the given object into a temporal expression, if it is not already one. * * @param obj the object to convert * @return the temporal expression */ public static TemporalExpression toTemporalExpression(final Object obj) { if (obj instanceof TemporalExpression) { return (TemporalExpression) obj; } if (obj instanceof NumericFieldValue || obj instanceof TextFieldValue) { // Numeric and text field values could be interpreted as time if needed // e.g. dateField AFTER timestamp return TemporalFieldValue.of(((FieldValue) obj).getFieldName()); } return TemporalLiteral.of(obj); } public static Date stringToDate(final String dateStr) { for (final SimpleDateFormat format : SUPPORTED_DATE_FORMATS.get()) { try { return format.parse(dateStr); } catch (ParseException e) { // Did not match date format } } return null; } public static Interval stringToInterval(final String intervalStr) { // 2005-05-19T20:32:56Z/2005-05-19T21:32:56Z if (intervalStr.contains("/")) { final String[] split = intervalStr.split("/"); if (split.length == 2) { final Date date1 = stringToDate(split[0]); if (date1 != null) { final Date date2 = stringToDate(split[1]); if (date2 != null) { return TimeUtils.getInterval(date1, date2); } } } return null; } return TimeUtils.getInterval(stringToDate(intervalStr)); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/temporal/TemporalFieldValue.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal; import org.locationtech.geowave.core.geotime.util.TimeUtils; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.query.filter.expression.FieldValue; import org.threeten.extra.Interval; /** * A field value implementation for temporal adapter fields. */ public class TemporalFieldValue extends FieldValue implements TemporalExpression { public TemporalFieldValue() {} public TemporalFieldValue(final String fieldName) { super(fieldName); } @Override public Interval evaluateValue(final DataTypeAdapter adapter, final T entry) { final Object value = super.evaluateValue(adapter, entry); if (value == null) { return null; } return TimeUtils.getInterval(value); } public static TemporalFieldValue of(final String fieldName) { return new TemporalFieldValue(fieldName); } @Override protected Interval evaluateValueInternal(final Object value) { if (value instanceof String) { final Interval interval = TemporalExpression.stringToInterval((String) value); if (interval == null) { throw new RuntimeException("'" + (String) value + "' is not in a supported date format."); } return interval; } return TimeUtils.getInterval(value); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/temporal/TemporalLiteral.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal; import java.nio.ByteBuffer; import org.locationtech.geowave.core.geotime.store.field.IntervalSerializationProvider.IntervalReader; import org.locationtech.geowave.core.geotime.store.field.IntervalSerializationProvider.IntervalWriter; import org.locationtech.geowave.core.geotime.util.TimeUtils; import org.locationtech.geowave.core.store.query.filter.expression.Expression; import org.locationtech.geowave.core.store.query.filter.expression.InvalidFilterException; import org.locationtech.geowave.core.store.query.filter.expression.Literal; import org.threeten.extra.Interval; /** * A temporal implementation of literal, representing temporal literal objects. */ public class TemporalLiteral extends Literal implements TemporalExpression { public TemporalLiteral() {} public TemporalLiteral(final Interval literal) { super(literal); } public static TemporalLiteral of(Object literal) { if (literal == null) { return new TemporalLiteral(null); } if (literal instanceof TemporalLiteral) { return (TemporalLiteral) literal; } if (literal instanceof Expression && ((Expression) literal).isLiteral()) { literal = ((Expression) literal).evaluateValue(null); } if (literal instanceof String) { final Interval interval = TemporalExpression.stringToInterval((String) literal); if (interval != null) { literal = interval; } } final Interval time = TimeUtils.getInterval(literal); if (time != null) { return new TemporalLiteral(time); } throw new InvalidFilterException("Unable to resolve temporal literal."); } @Override public String toString() { if (literal.getStart().equals(literal.getEnd())) { return literal.getStart().toString(); } return literal.getStart().toString() + "/" + literal.getEnd().toString(); } @Override public byte[] toBinary() { if (literal == null) { return new byte[] {(byte) 0}; } final byte[] intervalBytes = new IntervalWriter().writeField(literal); final ByteBuffer buffer = ByteBuffer.allocate(1 + intervalBytes.length); buffer.put((byte) 1); buffer.put(intervalBytes); return buffer.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); final byte nullByte = buffer.get(); if (nullByte == 0) { literal = null; return; } final byte[] intervalBytes = new byte[buffer.remaining()]; buffer.get(intervalBytes); literal = new IntervalReader().readField(intervalBytes); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/temporal/TemporalNotEqualTo.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal; import org.locationtech.geowave.core.geotime.util.TimeUtils; import org.locationtech.geowave.core.store.query.filter.expression.FilterRange; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldConstraints; import org.threeten.extra.Interval; import com.google.common.collect.Lists; /** * Predicate that passes when the first operand is not equal to the second operand. */ public class TemporalNotEqualTo extends BinaryTemporalPredicate { public TemporalNotEqualTo() {} public TemporalNotEqualTo( final TemporalExpression expression1, final TemporalExpression expression2) { super(expression1, expression2); } @Override public boolean evaluateInternal(final Interval value1, final Interval value2) { if (value1 == null) { return value2 != null; } else if (value2 == null) { return true; } return value1.getStart().compareTo(value2.getStart()) != 0 || TimeUtils.getIntervalEnd(value1).compareTo(TimeUtils.getIntervalEnd(value2)) != 0; } @Override public String toString() { final StringBuilder sb = new StringBuilder(expression1.toString()); sb.append(" <> "); sb.append(expression2.toString()); return sb.toString(); } @Override public NumericFieldConstraints getConstraints( final Interval literal, final Double minValue, final Double maxValue, final boolean reversed, final boolean exact) { if (exact) { if (literal.isEmpty()) { return NumericFieldConstraints.of( Lists.newArrayList( FilterRange.of( minValue, (double) literal.getStart().toEpochMilli(), true, false, exact), FilterRange.of( (double) literal.getStart().toEpochMilli(), maxValue, false, true, exact))); } else { return NumericFieldConstraints.of(Lists.newArrayList()); } } return NumericFieldConstraints.of(minValue, maxValue, true, true, false); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/temporal/TimeOverlaps.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal; import org.locationtech.geowave.core.geotime.util.TimeUtils; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldConstraints; import org.threeten.extra.Interval; /** * Predicate that passes when the first operand overlaps the second operand at any point in time. */ public class TimeOverlaps extends BinaryTemporalPredicate { public TimeOverlaps() {} public TimeOverlaps(final TemporalExpression expression1, final TemporalExpression expression2) { super(expression1, expression2); } @Override public boolean evaluateInternal(final Interval value1, final Interval value2) { if ((value1 == null) || (value2 == null)) { return false; } return TimeUtils.getIntervalEnd(value1).compareTo(value2.getStart()) > 0 && value1.getStart().compareTo(TimeUtils.getIntervalEnd(value2)) < 0; } @Override public String toString() { final StringBuilder sb = new StringBuilder(expression1.toString()); sb.append(" OVERLAPS "); sb.append(expression2.toString()); return sb.toString(); } @Override public NumericFieldConstraints getConstraints( final Interval literal, final Double minValue, final Double maxValue, final boolean reversed, final boolean exact) { return NumericFieldConstraints.of( (double) literal.getStart().toEpochMilli(), (double) TimeUtils.getIntervalEnd(literal).toEpochMilli(), true, false, exact); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/gwql/BboxFunction.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.gwql; import org.locationtech.geowave.core.geotime.store.query.aggregate.VectorBoundingBoxAggregation; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.query.aggregate.FieldNameParam; import org.locationtech.geowave.core.store.query.gwql.function.aggregation.AggregationFunction; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; /** * Bounding box aggregation function that accepts a single argument. If `*` is passed to the * function, the default geometry of the feature will be used for the calculation, otherwise, the * supplied geometry column name will be used. */ public class BboxFunction implements AggregationFunction { @Override public String getName() { return "BBOX"; } @Override public Class getReturnType() { return Envelope.class; } @Override public Aggregation getAggregation( final DataTypeAdapter adapter, final String[] functionArgs) { if (functionArgs == null || functionArgs.length != 1) { throw new RuntimeException("BBOX takes exactly 1 parameter"); } final FieldNameParam columnName = functionArgs[0].equals("*") ? null : new FieldNameParam(functionArgs[0]); if (columnName != null) { FieldDescriptor descriptor = adapter.getFieldDescriptor(columnName.getFieldName()); if (descriptor == null) { throw new RuntimeException( "No attribute called '" + columnName.getFieldName() + "' was found in the given type."); } if (!Geometry.class.isAssignableFrom(descriptor.bindingClass())) { throw new RuntimeException( "BBOX aggregation only works on geometry fields, given field was of type " + descriptor.bindingClass().getName() + "."); } } return new VectorBoundingBoxAggregation<>(columnName); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/gwql/DateCastableType.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.gwql; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalExpression; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalFieldValue; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalLiteral; import org.locationtech.geowave.core.store.query.filter.expression.Expression; import org.locationtech.geowave.core.store.query.filter.expression.InvalidFilterException; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldValue; import org.locationtech.geowave.core.store.query.filter.expression.text.TextFieldValue; import org.locationtech.geowave.core.store.query.gwql.CastableType; import org.locationtech.geowave.core.store.query.gwql.GWQLParseException; import org.threeten.extra.Interval; public class DateCastableType implements CastableType { @Override public String getName() { return "date"; } @Override public TemporalExpression cast(Object objectOrExpression) { return toTemporalExpression(objectOrExpression); } public static TemporalExpression toTemporalExpression(Object objectOrExpression) { if (objectOrExpression instanceof TemporalExpression) { return (TemporalExpression) objectOrExpression; } if (objectOrExpression instanceof Expression && ((Expression) objectOrExpression).isLiteral()) { objectOrExpression = ((Expression) objectOrExpression).evaluateValue(null); } if (objectOrExpression instanceof Expression) { if (objectOrExpression instanceof NumericFieldValue) { return new TemporalFieldValue(((NumericFieldValue) objectOrExpression).getFieldName()); } else if (objectOrExpression instanceof TextFieldValue) { return new TemporalFieldValue(((TextFieldValue) objectOrExpression).getFieldName()); } else { throw new GWQLParseException("Unable to cast expression to date"); } } else { try { return TemporalLiteral.of(objectOrExpression); } catch (InvalidFilterException e) { throw new GWQLParseException("Unable to cast literal to date", e); } } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/gwql/GWQLSpatialTemporalExtensions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.gwql; import java.util.Calendar; import java.util.Date; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialFieldValue; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalFieldValue; import org.locationtech.geowave.core.store.query.gwql.CastableType; import org.locationtech.geowave.core.store.query.gwql.GWQLExtensionRegistrySpi; import org.locationtech.geowave.core.store.query.gwql.function.aggregation.AggregationFunction; import org.locationtech.geowave.core.store.query.gwql.function.expression.ExpressionFunction; import org.locationtech.geowave.core.store.query.gwql.function.operator.OperatorFunction; import org.locationtech.geowave.core.store.query.gwql.function.predicate.PredicateFunction; import org.locationtech.jts.geom.Geometry; import com.google.common.collect.Lists; /** * The built-in set of functions used by the GeoWave query language. */ public class GWQLSpatialTemporalExtensions implements GWQLExtensionRegistrySpi { @Override public AggregationFunction[] getAggregationFunctions() { return new AggregationFunction[] {new BboxFunction()}; } @Override public PredicateFunction[] getPredicateFunctions() { return new PredicateFunction[] { new SpatialPredicates.BboxFunction(), new SpatialPredicates.BboxLooseFunction(), new SpatialPredicates.IntersectsFunction(), new SpatialPredicates.IntersectsLooseFunction(), new SpatialPredicates.DisjointFunction(), new SpatialPredicates.DisjointLooseFunction(), new SpatialPredicates.CrossesFunction(), new SpatialPredicates.OverlapsFunction(), new SpatialPredicates.ContainsFunction(), new SpatialPredicates.TouchesFunction(), new SpatialPredicates.WithinFunction(), new TemporalPredicates.OverlapsFunction(), new TemporalPredicates.ContainsFunction()}; } @Override public ExpressionFunction[] getExpressionFunctions() { return null; } @Override public OperatorFunction[] getOperatorFunctions() { return new OperatorFunction[] { new TemporalOperators.BeforeOperator(), new TemporalOperators.BeforeOrDuringOperator(), new TemporalOperators.DuringOperator(), new TemporalOperators.DuringOrAfterOperator(), new TemporalOperators.AfterOperator()}; } @Override public CastableType[] getCastableTypes() { return new CastableType[] {new GeometryCastableType(), new DateCastableType()}; } @Override public FieldValueBuilder[] getFieldValueBuilders() { return new FieldValueBuilder[] { new FieldValueBuilder(Lists.newArrayList(Geometry.class), (fieldName) -> { return SpatialFieldValue.of(fieldName); }), new FieldValueBuilder(Lists.newArrayList(Date.class, Calendar.class), (fieldName) -> { return TemporalFieldValue.of(fieldName); })}; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/gwql/GeometryCastableType.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.gwql; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.FilterGeometry; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialExpression; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialLiteral; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.TextToSpatialExpression; import org.locationtech.geowave.core.store.query.filter.expression.Expression; import org.locationtech.geowave.core.store.query.filter.expression.InvalidFilterException; import org.locationtech.geowave.core.store.query.filter.expression.text.TextExpression; import org.locationtech.geowave.core.store.query.gwql.CastableType; import org.locationtech.geowave.core.store.query.gwql.GWQLParseException; public class GeometryCastableType implements CastableType { @Override public String getName() { return "geometry"; } @Override public SpatialExpression cast(Object objectOrExpression) { return toSpatialExpression(objectOrExpression); } public static SpatialExpression toSpatialExpression(Object objectOrExpression) { if (objectOrExpression instanceof SpatialExpression) { return (SpatialExpression) objectOrExpression; } if (objectOrExpression instanceof Expression && ((Expression) objectOrExpression).isLiteral()) { objectOrExpression = ((Expression) objectOrExpression).evaluateValue(null); } if (objectOrExpression instanceof Expression) { if (objectOrExpression instanceof TextExpression) { return new TextToSpatialExpression((TextExpression) objectOrExpression); } else { throw new GWQLParseException("Unable to cast expression to geometry"); } } else { try { return SpatialLiteral.of(objectOrExpression); } catch (InvalidFilterException e) { throw new GWQLParseException("Unable to cast literal to geometry", e); } } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/gwql/SpatialPredicates.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.gwql; import java.util.List; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialExpression; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.store.query.filter.expression.Expression; import org.locationtech.geowave.core.store.query.filter.expression.Predicate; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericExpression; import org.locationtech.geowave.core.store.query.filter.expression.text.TextExpression; import org.locationtech.geowave.core.store.query.gwql.GWQLParseException; import org.locationtech.geowave.core.store.query.gwql.function.predicate.PredicateFunction; import org.opengis.referencing.crs.CoordinateReferenceSystem; public class SpatialPredicates { private static abstract class AbstractBboxFunction implements PredicateFunction { @Override public Predicate create(List> arguments) { if (arguments.size() < 5 && arguments.size() > 6) { throw new GWQLParseException("Function expects 5 or 6 arguments, got " + arguments.size()); } final SpatialExpression expression = GeometryCastableType.toSpatialExpression(arguments.get(0)); final double minX = getNumber(arguments.get(1)); final double minY = getNumber(arguments.get(2)); final double maxX = getNumber(arguments.get(3)); final double maxY = getNumber(arguments.get(4)); if (arguments.size() == 6) { if (arguments.get(5).isLiteral() && arguments.get(5) instanceof TextExpression) { final String crsStr = ((TextExpression) arguments.get(5)).evaluateValue(null); return bbox(expression, minX, minY, maxX, maxY, GeometryUtils.decodeCRS(crsStr)); } throw new GWQLParseException( "Expected a text literal for the coordinate reference system."); } else { return bbox(expression, minX, minY, maxX, maxY, null); } } protected abstract Predicate bbox( final SpatialExpression expression, final double minX, final double minY, final double maxX, final double maxY, final CoordinateReferenceSystem crs); private double getNumber(final Expression expression) { if (expression.isLiteral() && expression instanceof NumericExpression) { return ((NumericExpression) expression).evaluateValue(null); } throw new GWQLParseException("Expected a numeric literal for bounding box constraints."); } } public static class BboxFunction extends AbstractBboxFunction { @Override public String getName() { return "BBOX"; } @Override protected Predicate bbox( SpatialExpression expression, double minX, double minY, double maxX, double maxY, CoordinateReferenceSystem crs) { if (crs == null) { return expression.bbox(minX, minY, maxX, maxY); } return expression.bbox(minX, minY, maxX, maxY, crs); } } public static class BboxLooseFunction extends AbstractBboxFunction { @Override public String getName() { return "BBOXLOOSE"; } @Override protected Predicate bbox( SpatialExpression expression, double minX, double minY, double maxX, double maxY, CoordinateReferenceSystem crs) { if (crs == null) { return expression.bboxLoose(minX, minY, maxX, maxY); } return expression.bboxLoose(minX, minY, maxX, maxY, crs); } } private static abstract class SpatialPredicateFunction implements PredicateFunction { @Override public Predicate create(List> arguments) { if (arguments.size() == 2) { final SpatialExpression expression1 = GeometryCastableType.toSpatialExpression(arguments.get(0)); final SpatialExpression expression2 = GeometryCastableType.toSpatialExpression(arguments.get(1)); return createInternal(expression1, expression2); } throw new GWQLParseException("Function expects 2 arguments, got " + arguments.size()); } protected abstract Predicate createInternal( final SpatialExpression expression1, final SpatialExpression expression2); } public static class IntersectsFunction extends SpatialPredicateFunction { @Override public String getName() { return "INTERSECTS"; } @Override protected Predicate createInternal( final SpatialExpression expression1, final SpatialExpression expression2) { return expression1.intersects(expression2); } } public static class IntersectsLooseFunction extends SpatialPredicateFunction { @Override public String getName() { return "INTERSECTSLOOSE"; } @Override protected Predicate createInternal( final SpatialExpression expression1, final SpatialExpression expression2) { return expression1.intersectsLoose(expression2); } } public static class DisjointFunction extends SpatialPredicateFunction { @Override public String getName() { return "DISJOINT"; } @Override protected Predicate createInternal( final SpatialExpression expression1, final SpatialExpression expression2) { return expression1.disjoint(expression2); } } public static class DisjointLooseFunction extends SpatialPredicateFunction { @Override public String getName() { return "DISJOINTLOOSE"; } @Override protected Predicate createInternal( final SpatialExpression expression1, final SpatialExpression expression2) { return expression1.disjointLoose(expression2); } } public static class CrossesFunction extends SpatialPredicateFunction { @Override public String getName() { return "CROSSES"; } @Override protected Predicate createInternal( final SpatialExpression expression1, final SpatialExpression expression2) { return expression1.crosses(expression2); } } public static class OverlapsFunction extends SpatialPredicateFunction { @Override public String getName() { return "OVERLAPS"; } @Override protected Predicate createInternal( final SpatialExpression expression1, final SpatialExpression expression2) { return expression1.overlaps(expression2); } } public static class TouchesFunction extends SpatialPredicateFunction { @Override public String getName() { return "TOUCHES"; } @Override protected Predicate createInternal( final SpatialExpression expression1, final SpatialExpression expression2) { return expression1.touches(expression2); } } public static class WithinFunction extends SpatialPredicateFunction { @Override public String getName() { return "WITHIN"; } @Override protected Predicate createInternal( final SpatialExpression expression1, final SpatialExpression expression2) { return expression1.within(expression2); } } public static class ContainsFunction extends SpatialPredicateFunction { @Override public String getName() { return "CONTAINS"; } @Override protected Predicate createInternal( final SpatialExpression expression1, final SpatialExpression expression2) { return expression1.contains(expression2); } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/gwql/TemporalOperators.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.gwql; import org.locationtech.geowave.core.store.query.filter.expression.Expression; import org.locationtech.geowave.core.store.query.filter.expression.Predicate; import org.locationtech.geowave.core.store.query.gwql.function.operator.OperatorFunction; public class TemporalOperators { public static class BeforeOperator implements OperatorFunction { @Override public String getName() { return "BEFORE"; } @Override public Predicate create(Expression expression1, Expression expression2) { return DateCastableType.toTemporalExpression(expression1).isBefore( DateCastableType.toTemporalExpression(expression2)); } } public static class BeforeOrDuringOperator implements OperatorFunction { @Override public String getName() { return "BEFORE_OR_DURING"; } @Override public Predicate create(Expression expression1, Expression expression2) { return DateCastableType.toTemporalExpression(expression1).isBeforeOrDuring( DateCastableType.toTemporalExpression(expression2)); } } public static class DuringOperator implements OperatorFunction { @Override public String getName() { return "DURING"; } @Override public Predicate create(Expression expression1, Expression expression2) { return DateCastableType.toTemporalExpression(expression1).isDuring( DateCastableType.toTemporalExpression(expression2)); } } public static class DuringOrAfterOperator implements OperatorFunction { @Override public String getName() { return "DURING_OR_AFTER"; } @Override public Predicate create(Expression expression1, Expression expression2) { return DateCastableType.toTemporalExpression(expression1).isDuringOrAfter( DateCastableType.toTemporalExpression(expression2)); } } public static class AfterOperator implements OperatorFunction { @Override public String getName() { return "AFTER"; } @Override public Predicate create(Expression expression1, Expression expression2) { return DateCastableType.toTemporalExpression(expression1).isAfter( DateCastableType.toTemporalExpression(expression2)); } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/query/gwql/TemporalPredicates.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.gwql; import java.util.List; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalExpression; import org.locationtech.geowave.core.store.query.filter.expression.Expression; import org.locationtech.geowave.core.store.query.filter.expression.Predicate; import org.locationtech.geowave.core.store.query.gwql.GWQLParseException; import org.locationtech.geowave.core.store.query.gwql.function.predicate.PredicateFunction; public class TemporalPredicates { private static abstract class TemporalPredicateFunction implements PredicateFunction { @Override public Predicate create(List> arguments) { if (arguments.size() == 2) { final TemporalExpression expression1 = DateCastableType.toTemporalExpression(arguments.get(0)); final TemporalExpression expression2 = DateCastableType.toTemporalExpression(arguments.get(1)); return createInternal(expression1, expression2); } throw new GWQLParseException("Function expects 2 arguments, got " + arguments.size()); } protected abstract Predicate createInternal( final TemporalExpression expression1, final TemporalExpression expression2); } public static class ContainsFunction extends TemporalPredicateFunction { @Override public String getName() { return "TCONTAINS"; } @Override protected Predicate createInternal( final TemporalExpression expression1, final TemporalExpression expression2) { return expression1.contains(expression2); } } public static class OverlapsFunction extends TemporalPredicateFunction { @Override public String getName() { return "TOVERLAPS"; } @Override protected Predicate createInternal( final TemporalExpression expression1, final TemporalExpression expression2) { return expression1.overlaps(expression2); } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/statistics/AbstractBoundingBoxValue.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.statistics; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback; import org.locationtech.jts.geom.Envelope; public abstract class AbstractBoundingBoxValue extends StatisticValue implements StatisticsIngestCallback { protected double minX = Double.MAX_VALUE; protected double minY = Double.MAX_VALUE; protected double maxX = -Double.MAX_VALUE; protected double maxY = -Double.MAX_VALUE; protected AbstractBoundingBoxValue(final Statistic statistic) { super(statistic); } public boolean isSet() { if ((minX == Double.MAX_VALUE) || (minY == Double.MAX_VALUE) || (maxX == -Double.MAX_VALUE) || (maxY == -Double.MAX_VALUE)) { return false; } return true; } public double getMinX() { return minX; } public double getMinY() { return minY; } public double getMaxX() { return maxX; } public double getMaxY() { return maxY; } public double getWidth() { return maxX - minX; } public double getHeight() { return maxY - minY; } @Override public void merge(Mergeable merge) { if ((merge != null) && (merge instanceof AbstractBoundingBoxValue)) { final AbstractBoundingBoxValue bboxStats = (AbstractBoundingBoxValue) merge; if (bboxStats.isSet()) { minX = Math.min(minX, bboxStats.minX); minY = Math.min(minY, bboxStats.minY); maxX = Math.max(maxX, bboxStats.maxX); maxY = Math.max(maxY, bboxStats.maxY); } } } @Override public void entryIngested(DataTypeAdapter adapter, T entry, GeoWaveRow... rows) { final Envelope env = getEnvelope(adapter, entry); if (env != null) { minX = Math.min(minX, env.getMinX()); minY = Math.min(minY, env.getMinY()); maxX = Math.max(maxX, env.getMaxX()); maxY = Math.max(maxY, env.getMaxY()); } } public abstract Envelope getEnvelope(DataTypeAdapter adapter, T entry, GeoWaveRow... rows); @Override public Envelope getValue() { if (isSet()) { return new Envelope(minX, maxX, minY, maxY); } else { return new Envelope(); } } @Override public byte[] toBinary() { final ByteBuffer buffer = ByteBuffer.allocate(32); buffer.putDouble(minX); buffer.putDouble(minY); buffer.putDouble(maxX); buffer.putDouble(maxY); return buffer.array(); } @Override public void fromBinary(byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); minX = buffer.getDouble(); minY = buffer.getDouble(); maxX = buffer.getDouble(); maxY = buffer.getDouble(); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/statistics/AbstractTimeRangeValue.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.statistics; import java.nio.ByteBuffer; import java.time.Instant; import java.util.Calendar; import java.util.Date; import java.util.TimeZone; import org.locationtech.geowave.core.geotime.store.query.TemporalRange; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback; import org.threeten.extra.Interval; public abstract class AbstractTimeRangeValue extends StatisticValue implements StatisticsIngestCallback { private long min = Long.MAX_VALUE; private long max = Long.MIN_VALUE; protected AbstractTimeRangeValue(final Statistic statistic) { super(statistic); } public boolean isSet() { if ((min == Long.MAX_VALUE) && (max == Long.MIN_VALUE)) { return false; } return true; } public TemporalRange asTemporalRange() { return new TemporalRange(new Date(getMin()), new Date(getMax())); } public long getMin() { return min; } public long getMax() { return max; } public long getRange() { return max - min; } public Date getMaxTime() { final Calendar c = Calendar.getInstance(TimeZone.getTimeZone("GMT")); c.setTimeInMillis(getMax()); return c.getTime(); } public Date getMinTime() { final Calendar c = Calendar.getInstance(TimeZone.getTimeZone("GMT")); c.setTimeInMillis(getMin()); return c.getTime(); } @Override public byte[] toBinary() { final ByteBuffer buffer = ByteBuffer.allocate(VarintUtils.timeByteLength(min) + VarintUtils.timeByteLength(max)); VarintUtils.writeTime(min, buffer); VarintUtils.writeTime(max, buffer); return buffer.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); min = VarintUtils.readTime(buffer); max = VarintUtils.readTime(buffer); } @Override public void entryIngested( final DataTypeAdapter adapter, final T entry, final GeoWaveRow... rows) { final Interval range = getInterval(adapter, entry, rows); if (range != null) { min = Math.min(min, range.getStart().toEpochMilli()); max = Math.max(max, range.getEnd().toEpochMilli()); } } protected abstract Interval getInterval( final DataTypeAdapter adapter, final T entry, final GeoWaveRow... rows); @Override public void merge(final Mergeable merge) { if ((merge != null) && (merge instanceof AbstractTimeRangeValue)) { final AbstractTimeRangeValue stats = (AbstractTimeRangeValue) merge; if (stats.isSet()) { min = Math.min(min, stats.getMin()); max = Math.max(max, stats.getMax()); } } } @Override public Interval getValue() { if (isSet()) { return Interval.of(Instant.ofEpochMilli(min), Instant.ofEpochMilli(max)); } return null; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/statistics/BoundingBoxStatistic.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.statistics; import java.nio.ByteBuffer; import org.geotools.referencing.CRS; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.FieldStatistic; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.statistics.field.FieldStatisticType; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; import org.opengis.referencing.FactoryException; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.opengis.referencing.operation.MathTransform; import com.beust.jcommander.IStringConverter; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; public class BoundingBoxStatistic extends FieldStatistic { public static final FieldStatisticType STATS_TYPE = new FieldStatisticType<>("BOUNDING_BOX"); @Parameter( names = {"--sourceCrs"}, description = "CRS of source geometry.", converter = CRSConverter.class) private CoordinateReferenceSystem sourceCrs = null; @Parameter( names = {"--crs"}, description = "CRS of the bounding box statistic.", converter = CRSConverter.class) private CoordinateReferenceSystem destinationCrs = null; private MathTransform crsTransform = null; public BoundingBoxStatistic() { this(null, null); } public BoundingBoxStatistic(final String typeName, final String fieldName) { this(typeName, fieldName, null, null); } public BoundingBoxStatistic( final String typeName, final String fieldName, final CoordinateReferenceSystem sourceCrs, final CoordinateReferenceSystem destinationCrs) { super(STATS_TYPE, typeName, fieldName); this.sourceCrs = sourceCrs; this.destinationCrs = destinationCrs; } public MathTransform getTransform() { if (sourceCrs != null && destinationCrs != null && crsTransform == null) { try { crsTransform = CRS.findMathTransform(sourceCrs, destinationCrs, true); } catch (FactoryException e) { throw new ParameterException( "Unable to create CRS transform for bounding box statistic.", e); } } return crsTransform; } public void setSourceCrs(final CoordinateReferenceSystem sourceCrs) { this.sourceCrs = sourceCrs; } public CoordinateReferenceSystem getSourceCrs() { return sourceCrs; } public void setDestinationCrs(final CoordinateReferenceSystem destinationCrs) { this.destinationCrs = destinationCrs; } public CoordinateReferenceSystem getDestinationCrs() { return destinationCrs; } @Override public boolean isCompatibleWith(final Class fieldClass) { return Geometry.class.isAssignableFrom(fieldClass); } @Override public String getDescription() { return "Maintains the bounding box for a geometry field."; } @Override public BoundingBoxValue createEmpty() { return new BoundingBoxValue(this); } private byte[] sourceCrsBytes = null; private byte[] destinationCrsBytes = null; private void transformToBytes() { sourceCrsBytes = sourceCrs == null ? new byte[0] : StringUtils.stringToBinary(sourceCrs.toWKT()); destinationCrsBytes = destinationCrs == null ? new byte[0] : StringUtils.stringToBinary(destinationCrs.toWKT()); } @Override public int byteLength() { if (sourceCrsBytes == null) { transformToBytes(); } return super.byteLength() + sourceCrsBytes.length + VarintUtils.unsignedShortByteLength((short) sourceCrsBytes.length) + destinationCrsBytes.length + VarintUtils.unsignedShortByteLength((short) destinationCrsBytes.length); } @Override public void writeBytes(final ByteBuffer buffer) { super.writeBytes(buffer); if (sourceCrsBytes == null) { transformToBytes(); } VarintUtils.writeUnsignedShort((short) sourceCrsBytes.length, buffer); buffer.put(sourceCrsBytes); VarintUtils.writeUnsignedShort((short) destinationCrsBytes.length, buffer); buffer.put(destinationCrsBytes); sourceCrsBytes = null; destinationCrsBytes = null; } @Override public void readBytes(final ByteBuffer buffer) { super.readBytes(buffer); try { short length = VarintUtils.readUnsignedShort(buffer); sourceCrsBytes = new byte[length]; buffer.get(sourceCrsBytes); if (length > 0) { sourceCrs = CRS.parseWKT(StringUtils.stringFromBinary(sourceCrsBytes)); } length = VarintUtils.readUnsignedShort(buffer); destinationCrsBytes = new byte[length]; buffer.get(destinationCrsBytes); if (length > 0) { destinationCrs = CRS.parseWKT(StringUtils.stringFromBinary(destinationCrsBytes)); } } catch (FactoryException e) { throw new RuntimeException("Unable to parse statistic CRS", e); } sourceCrsBytes = null; destinationCrsBytes = null; } public static class BoundingBoxValue extends AbstractBoundingBoxValue { public BoundingBoxValue() { this(null); } public BoundingBoxValue(final Statistic statistic) { super(statistic); } @Override public Envelope getEnvelope(DataTypeAdapter adapter, T entry, GeoWaveRow... rows) { BoundingBoxStatistic bboxStatistic = (BoundingBoxStatistic) statistic; Object fieldValue = adapter.getFieldValue(entry, bboxStatistic.getFieldName()); if ((fieldValue != null) && (fieldValue instanceof Geometry)) { Geometry geometry = (Geometry) fieldValue; if (bboxStatistic.getTransform() != null) { geometry = GeometryUtils.crsTransform(geometry, bboxStatistic.getTransform()); } if (geometry != null && !geometry.isEmpty()) { return geometry.getEnvelopeInternal(); } } return null; } } public static class CRSConverter implements IStringConverter { @Override public CoordinateReferenceSystem convert(final String value) { CoordinateReferenceSystem convertedValue; try { convertedValue = CRS.decode(value); } catch (Exception e) { throw new ParameterException("Unrecognized CRS: " + value); } return convertedValue; } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/statistics/GeotimeRegisteredStatistics.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.statistics; import org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic.BoundingBoxValue; import org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic.TimeRangeValue; import org.locationtech.geowave.core.geotime.store.statistics.binning.SpatialFieldValueBinningStrategy; import org.locationtech.geowave.core.geotime.store.statistics.binning.TimeRangeFieldValueBinningStrategy; import org.locationtech.geowave.core.store.statistics.StatisticsRegistrySPI; public class GeotimeRegisteredStatistics implements StatisticsRegistrySPI { @Override public RegisteredStatistic[] getRegisteredStatistics() { return new RegisteredStatistic[] { // Field Statistics new RegisteredStatistic( BoundingBoxStatistic.STATS_TYPE, BoundingBoxStatistic::new, BoundingBoxValue::new, (short) 2100, (short) 2101), new RegisteredStatistic( TimeRangeStatistic.STATS_TYPE, TimeRangeStatistic::new, TimeRangeValue::new, (short) 2102, (short) 2103)}; } @Override public RegisteredBinningStrategy[] getRegisteredBinningStrategies() { return new RegisteredBinningStrategy[] { new RegisteredBinningStrategy( TimeRangeFieldValueBinningStrategy.NAME, TimeRangeFieldValueBinningStrategy::new, (short) 2150), new RegisteredBinningStrategy( SpatialFieldValueBinningStrategy.NAME, SpatialFieldValueBinningStrategy::new, (short) 2151)}; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/statistics/SpatialTemporalStatisticQueryBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.statistics; import org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic.BoundingBoxValue; import org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic.TimeRangeValue; import org.locationtech.geowave.core.store.api.StatisticQueryBuilder; import org.locationtech.geowave.core.store.statistics.query.FieldStatisticQueryBuilder; import org.locationtech.jts.geom.Envelope; import org.threeten.extra.Interval; public interface SpatialTemporalStatisticQueryBuilder { /** * Create a new field statistic query builder for a bounding box statistic. * * @return the field statistic query builder */ static FieldStatisticQueryBuilder bbox() { return StatisticQueryBuilder.newBuilder(BoundingBoxStatistic.STATS_TYPE); } /** * Create a new field statistic query builder for a time range statistic. * * @return the field statistic query builder */ static FieldStatisticQueryBuilder timeRange() { return StatisticQueryBuilder.newBuilder(TimeRangeStatistic.STATS_TYPE); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/statistics/TimeRangeStatistic.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.statistics; import java.util.Calendar; import java.util.Date; import org.locationtech.geowave.core.geotime.util.TimeUtils; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.FieldStatistic; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.statistics.field.FieldStatisticType; import org.threeten.extra.Interval; public class TimeRangeStatistic extends FieldStatistic { public static final FieldStatisticType STATS_TYPE = new FieldStatisticType<>("TIME_RANGE"); public TimeRangeStatistic() { super(STATS_TYPE); } public TimeRangeStatistic(final String typeName, final String fieldName) { super(STATS_TYPE, typeName, fieldName); } @Override public String getDescription() { return "Maintains the time range of a temporal field."; } @Override public TimeRangeValue createEmpty() { return new TimeRangeValue(this); } @Override public boolean isCompatibleWith(final Class fieldClass) { return Date.class.isAssignableFrom(fieldClass) || Calendar.class.isAssignableFrom(fieldClass) || Number.class.isAssignableFrom(fieldClass); } public static class TimeRangeValue extends AbstractTimeRangeValue { public TimeRangeValue() { this(null); } public TimeRangeValue(final Statistic statistic) { super(statistic); } @Override protected Interval getInterval( final DataTypeAdapter adapter, final T entry, final GeoWaveRow... rows) { final Object fieldValue = adapter.getFieldValue(entry, ((TimeRangeStatistic) statistic).getFieldName()); return TimeUtils.getInterval(fieldValue); } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/statistics/binning/SpatialFieldValueBinningStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.statistics.binning; import java.nio.ByteBuffer; import java.util.Arrays; import org.apache.commons.lang3.ArrayUtils; import org.locationtech.geowave.core.geotime.binning.ComplexGeometryBinningOption; import org.locationtech.geowave.core.geotime.binning.SpatialBinningType; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.api.BinConstraints.ByteArrayConstraints; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.statistics.binning.BinningStrategyUtils; import org.locationtech.geowave.core.store.statistics.binning.FieldValueBinningStrategy; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; import com.beust.jcommander.IStringConverter; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.google.common.primitives.Bytes; public class SpatialFieldValueBinningStrategy extends FieldValueBinningStrategy { public static final String NAME = "SPATIAL"; @Parameter( names = {"--precision", "--resolution", "--length", "--level"}, description = "The precision (also called resolution, length, or level) of the binning strategy") protected int precision = 8; @Parameter( names = {"--geometry"}, converter = ComplexGeometryBinningOptionConverter.class, description = "Approach for handling complex geometry. Available options are 'USE_CENTROID_ONLY', 'USE_FULL_GEOMETRY', and 'USE_FULL_GEOMETRY_SCALE_BY_OVERLAP'.") protected ComplexGeometryBinningOption complexGeometry = ComplexGeometryBinningOption.USE_CENTROID_ONLY; @Parameter( names = {"--type"}, converter = SpatialBinningTypeConverter.class, description = "The type of binning (either h3, s2, or geohash).") protected SpatialBinningType type = SpatialBinningType.S2; public SpatialFieldValueBinningStrategy() { super(); } public SpatialFieldValueBinningStrategy(final String... fields) { super(fields); } public SpatialFieldValueBinningStrategy( final SpatialBinningType type, final int precision, final ComplexGeometryBinningOption complexGeometry, final String... fields) { super(fields); this.type = type; this.precision = precision; this.complexGeometry = complexGeometry; } public int getPrecision() { return precision; } public void setPrecision(final int precision) { this.precision = precision; } public ComplexGeometryBinningOption getComplexGeometry() { return complexGeometry; } public void setComplexGeometry(final ComplexGeometryBinningOption complexGeometry) { this.complexGeometry = complexGeometry; } public SpatialBinningType getType() { return type; } public void setType(final SpatialBinningType type) { this.type = type; } @Override public String getDefaultTag() { // this intentionally doesn't include ComplexGeometryBinningOption, if for some reason a user // wants to have multiple on the same fields of the same type with the same precision just // different binning options, they'd need to define their own tags return super.getDefaultTag() + "-" + type + "(" + precision + ")"; } @Override public String getDescription() { return "Bin a statistic by a spatial aggregation (such as geohash, H3, or S2) on a specified geometry field."; } @Override public String getStrategyName() { return NAME; } protected ByteArray[] getSpatialBins(final Geometry geometry) { return type.getSpatialBins(geometry, precision); } @SuppressWarnings("unchecked") @Override public Class[] supportedConstraintClasses() { return ArrayUtils.addAll( super.supportedConstraintClasses(), Envelope.class, Envelope[].class, Geometry.class, Geometry[].class); } private ByteArray[] getSpatialBinsFromObj(final Object value) { if (value instanceof Geometry) { if (ComplexGeometryBinningOption.USE_CENTROID_ONLY.equals(complexGeometry)) { return getSpatialBins(((Geometry) value).getCentroid()); } return getSpatialBins((Geometry) value); } return new ByteArray[0]; } @Override public ByteArray[] getBins( final DataTypeAdapter adapter, final T entry, final GeoWaveRow... rows) { if (fields.isEmpty()) { return new ByteArray[0]; } else if (fields.size() == 1) { final Object value = adapter.getFieldValue(entry, fields.get(0)); return getSpatialBinsFromObj(value); } final ByteArray[][] fieldValues = fields.stream().map( field -> getSpatialBinsFromObj(adapter.getFieldValue(entry, field))).toArray( ByteArray[][]::new); return getAllCombinationsNoSeparator(fieldValues); } @Override public String binToString(final ByteArray bin) { final ByteBuffer buffer = ByteBuffer.wrap(bin.getBytes()); final StringBuffer sb = new StringBuffer(); while (buffer.remaining() > 0) { final byte[] binId = new byte[type.getBinByteLength(precision)]; buffer.get(binId); sb.append(type.binToString(binId)); } if (buffer.remaining() > 0) { sb.append('|'); } return sb.toString(); } @Override public byte[] toBinary() { final byte[] parentBinary = super.toBinary(); final ByteBuffer buf = ByteBuffer.allocate( parentBinary.length + VarintUtils.unsignedIntByteLength(precision) + VarintUtils.unsignedIntByteLength(complexGeometry.ordinal()) + VarintUtils.unsignedIntByteLength(type.ordinal())); VarintUtils.writeUnsignedInt(type.ordinal(), buf); VarintUtils.writeUnsignedInt(precision, buf); VarintUtils.writeUnsignedInt(complexGeometry.ordinal(), buf); buf.put(parentBinary); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); type = SpatialBinningType.values()[VarintUtils.readUnsignedInt(buf)]; precision = VarintUtils.readUnsignedInt(buf); complexGeometry = ComplexGeometryBinningOption.values()[VarintUtils.readUnsignedInt(buf)]; final byte[] parentBinary = new byte[buf.remaining()]; buf.get(parentBinary); super.fromBinary(parentBinary); } @Override public double getWeight( final ByteArray bin, final DataTypeAdapter type, final T entry, final GeoWaveRow... rows) { if (ComplexGeometryBinningOption.USE_FULL_GEOMETRY_SCALE_BY_OVERLAP.equals(complexGeometry)) { // only compute if its intended to scale by percent overlap final ByteBuffer buffer = ByteBuffer.wrap(bin.getBytes()); double weight = 1; int i = 0; while (buffer.remaining() > 0) { final byte[] binId = new byte[this.type.getBinByteLength(precision)]; buffer.get(binId); final Geometry binGeom = this.type.getBinGeometry(new ByteArray(binId), precision); final Object value = type.getFieldValue(entry, fields.get(i++)); if (value instanceof Geometry) { // This approach could be fairly expensive, but is accurate and general-purpose // take the intersection of the field geometry with the bin geometry and take the area // weight is the ratio of the intersection area to the entire field geometry area final double area = ((Geometry) value).getArea(); if (area > 0) { if (binGeom.intersects((Geometry) value)) { final double intersectionArea = binGeom.intersection((Geometry) value).getArea(); final double fieldWeight = intersectionArea / ((Geometry) value).getArea(); weight *= fieldWeight; } } else { final double length = ((Geometry) value).getLength(); if (length > 0) { final double intersectionLength = binGeom.intersection((Geometry) value).getLength(); final double fieldWeight = intersectionLength / ((Geometry) value).getLength(); weight *= fieldWeight; } // if it has no area and no length it must be point data and not very applicable for // scaling } } } return weight; } return 1; } @Override protected ByteArrayConstraints singleFieldConstraints(final Object constraints) { // just convert each into a geometry (or multi-geometry) and let the underlying hashing // algorithm handle the rest if (constraints instanceof Envelope[]) { return type.getGeometryConstraints( GeometryUtils.GEOMETRY_FACTORY.createGeometryCollection( Arrays.stream((Envelope[]) constraints).map( GeometryUtils.GEOMETRY_FACTORY::toGeometry).toArray(Geometry[]::new)), precision); } else if (constraints instanceof Envelope) { return type.getGeometryConstraints( GeometryUtils.GEOMETRY_FACTORY.toGeometry((Envelope) constraints), precision); } else if (constraints instanceof Geometry) { return type.getGeometryConstraints((Geometry) constraints, precision); } else if (constraints instanceof Geometry[]) { return type.getGeometryConstraints( GeometryUtils.GEOMETRY_FACTORY.createGeometryCollection((Geometry[]) constraints), precision); } return super.singleFieldConstraints(constraints); } private static ByteArray[] getAllCombinationsNoSeparator(final ByteArray[][] perFieldBins) { return BinningStrategyUtils.getAllCombinations( perFieldBins, a -> new ByteArray( Bytes.concat(Arrays.stream(a).map(ByteArray::getBytes).toArray(byte[][]::new)))); } public static class ComplexGeometryBinningOptionConverter implements IStringConverter { @Override public ComplexGeometryBinningOption convert(final String value) { ComplexGeometryBinningOption convertedValue = null; try { convertedValue = ComplexGeometryBinningOption.valueOf(value.toUpperCase()); } catch (final Exception e) { // we'll throw the parameter exception instead of printing a stack trace } if (convertedValue == null) { throw new ParameterException( "Value " + value + "can not be converted to ComplexGeometryBinningOption. " + "Available values are: " + Arrays.toString(ComplexGeometryBinningOption.values())); } return convertedValue; } } public static class SpatialBinningTypeConverter implements IStringConverter { @Override public SpatialBinningType convert(final String value) { SpatialBinningType convertedValue = null; try { convertedValue = SpatialBinningType.valueOf(value.toUpperCase()); } catch (final Exception e) { // we'll throw the parameter exception instead of printing a stack trace } if (convertedValue == null) { throw new ParameterException( "Value " + value + "can not be converted to SpatialBinningType. " + "Available values are: " + Arrays.toString(SpatialBinningType.values())); } return convertedValue; } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/store/statistics/binning/TimeRangeFieldValueBinningStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.statistics.binning; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Calendar; import java.util.Date; import java.util.stream.Stream; import org.apache.commons.lang3.ArrayUtils; import org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider.UnitConverter; import org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy; import org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit; import org.locationtech.geowave.core.geotime.util.TimeUtils; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.dimension.bin.BinRange; import org.locationtech.geowave.core.store.api.BinConstraints.ByteArrayConstraints; import org.locationtech.geowave.core.store.statistics.binning.FieldValueBinningStrategy; import org.locationtech.geowave.core.store.statistics.query.BinConstraintsImpl.ExplicitConstraints; import org.threeten.extra.Interval; import com.beust.jcommander.Parameter; /** * Statistic binning strategy that bins statistic values by the temporal representation of the value * of a given field. It bins time values by a temporal periodicity (any time unit), default to * daily. A statistic using this binning strategy can be constrained using * org.threeten.extra.Interval class as a constraint). */ public class TimeRangeFieldValueBinningStrategy extends FieldValueBinningStrategy { protected static Unit DEFAULT_PERIODICITY = Unit.DAY; public static final String NAME = "TIME_RANGE"; @Parameter( names = {"--binInteval"}, required = false, description = "The interval or periodicity at which to bin time values. Defaults to daily.", converter = UnitConverter.class) protected Unit periodicity = DEFAULT_PERIODICITY; @Parameter( names = {"-tz", "--timezone"}, required = false, description = "The timezone to convert all incoming time values into. Defaults to GMT.") protected String timezone = "GMT"; private TemporalBinningStrategy binningStrategy; @Override public String getStrategyName() { return NAME; } public TimeRangeFieldValueBinningStrategy() { super(); } public TimeRangeFieldValueBinningStrategy(final String... fields) { super(fields); } public TimeRangeFieldValueBinningStrategy(final Unit periodicity, final String... fields) { this("GMT", periodicity, fields); } public TimeRangeFieldValueBinningStrategy( final String timezone, final Unit periodicity, final String... fields) { super(fields); this.periodicity = periodicity; this.timezone = timezone; binningStrategy = new TemporalBinningStrategy(periodicity, timezone); } @Override public String getDescription() { return "Bin the statistic by the time value of a specified field."; } @SuppressWarnings("unchecked") @Override public Class[] supportedConstraintClasses() { return ArrayUtils.addAll( super.supportedConstraintClasses(), Date.class, Calendar.class, Number.class, Interval.class, Interval[].class); } @Override public ByteArrayConstraints singleFieldConstraints(final Object constraint) { if (constraint instanceof Interval) { return new ExplicitConstraints(getNumericBins((Interval) constraint)); } else if (constraint instanceof Interval[]) { final Stream stream = Arrays.stream((Interval[]) constraint).map(this::getNumericBins); return new ExplicitConstraints(stream.flatMap(Arrays::stream).toArray(ByteArray[]::new)); } final long timeMillis = TimeUtils.getTimeMillis(constraint); if (timeMillis != TimeUtils.RESERVED_MILLIS_FOR_NULL) { return new ExplicitConstraints(new ByteArray[] {getTimeBin(timeMillis)}); } return super.constraints(constraint); } @Override protected ByteArray getSingleBin(final Object value) { final long millis = TimeUtils.getTimeMillis(value); if (millis == TimeUtils.RESERVED_MILLIS_FOR_NULL) { return new ByteArray(); } return getTimeBin(millis); } private ByteArray getTimeBin(final long millis) { return new ByteArray(binningStrategy.getBinId(millis)); } private ByteArray[] getNumericBins(final Interval value) { final BinRange[] bins = binningStrategy.getNormalizedRanges(value); return Arrays.stream(bins).map(BinRange::getBinId).map(ByteArray::new).toArray( ByteArray[]::new); } @Override public byte[] toBinary() { final byte[] parentBinary = super.toBinary(); final byte[] timezoneBytes = StringUtils.stringToBinary(timezone); final ByteBuffer buf = ByteBuffer.allocate( parentBinary.length + VarintUtils.unsignedIntByteLength(periodicity.ordinal()) + VarintUtils.unsignedIntByteLength(timezoneBytes.length) + timezoneBytes.length); VarintUtils.writeUnsignedInt(periodicity.ordinal(), buf); VarintUtils.writeUnsignedInt(timezoneBytes.length, buf); buf.put(timezoneBytes); buf.put(parentBinary); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); periodicity = Unit.values()[VarintUtils.readUnsignedInt(buf)]; final byte[] timezoneBinary = new byte[VarintUtils.readUnsignedInt(buf)]; buf.get(timezoneBinary); timezone = StringUtils.stringFromBinary(timezoneBinary); binningStrategy = new TemporalBinningStrategy(periodicity, timezone); final byte[] parentBinary = new byte[buf.remaining()]; buf.get(parentBinary); super.fromBinary(parentBinary); } public Interval getInterval(final ByteArray binId) { return getInterval(binId.getBytes()); } private Interval getInterval(final byte[] binId) { return binningStrategy.getInterval(binId); } @Override public String binToString(final ByteArray bin) { final ByteBuffer buffer = ByteBuffer.wrap(bin.getBytes()); final StringBuffer sb = new StringBuffer(); while (buffer.remaining() > 0) { if (buffer.get() == 0x0) { sb.append(""); } else { final byte[] binId = new byte[binningStrategy.getFixedBinIdSize()]; buffer.get(binId); sb.append(getInterval(binId).toString()); } if (buffer.remaining() > 0) { sb.append(buffer.getChar()); } } return sb.toString(); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/util/DWithinFilterVisitor.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.util; import org.apache.commons.lang3.tuple.Pair; import org.geotools.filter.LiteralExpressionImpl; import org.geotools.filter.spatial.IntersectsImpl; import org.geotools.filter.visitor.DuplicatingFilterVisitor; import org.locationtech.jts.geom.Geometry; import org.opengis.filter.expression.Literal; import org.opengis.filter.expression.PropertyName; import org.opengis.filter.spatial.DWithin; import org.opengis.referencing.operation.TransformException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class DWithinFilterVisitor extends DuplicatingFilterVisitor { private static final Logger LOGGER = LoggerFactory.getLogger(DWithinFilterVisitor.class); /** * DWithin spatial operator will find out if a feature in a datalayer is within X meters of a * point, line, or polygon. */ @Override public Object visit(final DWithin filter, final Object extraData) { IntersectsImpl newWithImpl = null; try { if ((filter.getExpression1() instanceof PropertyName) && (filter.getExpression2() instanceof Literal)) { Pair geometryAndDegrees; geometryAndDegrees = GeometryUtils.buffer( GeometryUtils.getDefaultCRS(), filter.getExpression2().evaluate(extraData, Geometry.class), filter.getDistanceUnits(), filter.getDistance()); newWithImpl = new IntersectsImpl( filter.getExpression1(), new LiteralExpressionImpl(geometryAndDegrees.getLeft())); } else if ((filter.getExpression2() instanceof PropertyName) && (filter.getExpression1() instanceof Literal)) { final Pair geometryAndDegrees = GeometryUtils.buffer( GeometryUtils.getDefaultCRS(), filter.getExpression1().evaluate(extraData, Geometry.class), filter.getDistanceUnits(), filter.getDistance()); newWithImpl = new IntersectsImpl( new LiteralExpressionImpl(geometryAndDegrees.getLeft()), filter.getExpression2()); } } catch (final TransformException e) { LOGGER.error("Cannot transform geoemetry to support provide distance", e); return super.visit(filter, extraData); } return newWithImpl; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/util/ExtractAttributesFilter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.util; import java.util.Collection; import java.util.HashSet; import java.util.Set; import org.geotools.filter.visitor.DefaultFilterVisitor; import org.opengis.filter.expression.PropertyName; /** This class can be used to get the list of attributes used in a query */ public class ExtractAttributesFilter extends DefaultFilterVisitor { public ExtractAttributesFilter() {} @Override public Object visit(final PropertyName expression, final Object data) { if ((data != null) && (data instanceof Collection)) { ((Collection) data).add(expression.getPropertyName()); return data; } final Set names = new HashSet<>(); names.add(expression.getPropertyName()); return names; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/util/ExtractGeometryFilterVisitor.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.util; import org.apache.commons.lang3.tuple.Pair; import org.geotools.filter.visitor.NullFilterVisitor; import org.geotools.geometry.jts.JTS; import org.geotools.geometry.jts.ReferencedEnvelope; import org.geotools.referencing.CRS; import org.locationtech.geowave.core.geotime.store.query.filter.SpatialQueryFilter.CompareOperation; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; import org.opengis.filter.And; import org.opengis.filter.ExcludeFilter; import org.opengis.filter.Filter; import org.opengis.filter.Id; import org.opengis.filter.IncludeFilter; import org.opengis.filter.Not; import org.opengis.filter.Or; import org.opengis.filter.PropertyIsBetween; import org.opengis.filter.PropertyIsEqualTo; import org.opengis.filter.PropertyIsGreaterThan; import org.opengis.filter.PropertyIsGreaterThanOrEqualTo; import org.opengis.filter.PropertyIsLessThan; import org.opengis.filter.PropertyIsLessThanOrEqualTo; import org.opengis.filter.PropertyIsLike; import org.opengis.filter.PropertyIsNotEqualTo; import org.opengis.filter.PropertyIsNull; import org.opengis.filter.expression.Add; import org.opengis.filter.expression.Divide; import org.opengis.filter.expression.Function; import org.opengis.filter.expression.Literal; import org.opengis.filter.expression.Multiply; import org.opengis.filter.expression.NilExpression; import org.opengis.filter.expression.PropertyName; import org.opengis.filter.expression.Subtract; import org.opengis.filter.spatial.BBOX; import org.opengis.filter.spatial.Beyond; import org.opengis.filter.spatial.Contains; import org.opengis.filter.spatial.Crosses; import org.opengis.filter.spatial.DWithin; import org.opengis.filter.spatial.Disjoint; import org.opengis.filter.spatial.Equals; import org.opengis.filter.spatial.Intersects; import org.opengis.filter.spatial.Overlaps; import org.opengis.filter.spatial.Touches; import org.opengis.filter.spatial.Within; import org.opengis.geometry.BoundingBox; import org.opengis.geometry.MismatchedDimensionException; import org.opengis.referencing.FactoryException; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.opengis.referencing.operation.TransformException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class is used to exact single query geometry and its associated predicate from a CQL * expression. There are three possible outcomes based on the extracted results. 1) If CQL * expression is simple then we are able to extract query geometry and predicate successfully. 2) If * CQL expression combines multiple dissimilar geometric relationships (i.e. "BBOX(geom,...) AND * TOUCHES(geom,...)") then we wont be able combine that into a single query geometry and predicate. * In which case, we will only return query geometry for the purpose of creating linear constraints * and predicate value will be null. However, we are able to combine multiple geometric * relationships into one query/predicate if their predicates are same (i.e. "INTERSECTS(geom,...) * AND INTERSECTS(geom,...)") 3) In some case, we won't be able to extract query geometry and * predicate at all. In that case, we simply return null. This occurs if CQL expression doesn't * contain any geometric constraints or CQL expression has non-inclusive filter (i.e. NOT or * DISJOINT(...)). */ public class ExtractGeometryFilterVisitor extends NullFilterVisitor { private static Logger LOGGER = LoggerFactory.getLogger(ExtractGeometryFilterVisitor.class); private final CoordinateReferenceSystem crs; private final String attributeOfInterest; /** * This FilterVisitor is stateless - use ExtractGeometryFilterVisitor.BOUNDS_VISITOR. You may also * subclass in order to reuse this functionality in your own FilterVisitor implementation. */ public ExtractGeometryFilterVisitor( final CoordinateReferenceSystem crs, final String attributeOfInterest) { this.crs = crs; this.attributeOfInterest = attributeOfInterest; } /** * @param filter * @param crs * @return null if empty constraint (infinite not supported) */ public static ExtractGeometryFilterVisitorResult getConstraints( final Filter filter, final CoordinateReferenceSystem crs, final String attributeOfInterest) { final ExtractGeometryFilterVisitorResult geoAndCompareOpData = (ExtractGeometryFilterVisitorResult) filter.accept( new ExtractGeometryFilterVisitor(crs, attributeOfInterest), null); if (geoAndCompareOpData == null) { return null; } final Geometry geo = geoAndCompareOpData.getGeometry(); // empty or infinite geometry simply return null as we can't create // linear constraints from if ((geo == null) || geo.isEmpty()) { return null; } final double area = geo.getArea(); if (Double.isInfinite(area) || Double.isNaN(area)) { return null; } return geoAndCompareOpData; } /** * Produce an ReferencedEnvelope from the provided data parameter. * * @param data * @return ReferencedEnvelope */ private Geometry bbox(final Object data) { try { if (data == null) { return null; } else if (data instanceof Geometry) { return (Geometry) data; } else if (data instanceof ReferencedEnvelope) { return new GeometryFactory().toGeometry(((ReferencedEnvelope) data).transform(crs, true)); } else if (data instanceof Envelope) { return new GeometryFactory().toGeometry((Envelope) data); } else if (data instanceof CoordinateReferenceSystem) { return new GeometryFactory().toGeometry( new ReferencedEnvelope((CoordinateReferenceSystem) data).transform(crs, true)); } } catch (TransformException | FactoryException e) { LOGGER.warn("Unable to transform geometry", e); return null; } throw new ClassCastException("Could not cast data to ReferencedEnvelope"); } @Override public Object visit(final ExcludeFilter filter, final Object data) { return new ExtractGeometryFilterVisitorResult(null, null); } @Override public Object visit(final IncludeFilter filter, final Object data) { return new ExtractGeometryFilterVisitorResult(infinity(), null); } private Geometry infinity() { return GeometryUtils.infinity(); } @Override public Object visit(final BBOX filter, final Object data) { if (attributeOfInterest.equals(filter.getExpression1().toString())) { final Geometry bbox = bbox(data); final BoundingBox referencedBBox = filter.getBounds(); Geometry bounds = new GeometryFactory().toGeometry( new Envelope( referencedBBox.getMinX(), referencedBBox.getMaxX(), referencedBBox.getMinY(), referencedBBox.getMaxY())); if ((crs != null) && (referencedBBox.getCoordinateReferenceSystem() != null) && !crs.equals(referencedBBox.getCoordinateReferenceSystem())) { try { bounds = JTS.transform( bounds, CRS.findMathTransform(referencedBBox.getCoordinateReferenceSystem(), crs, true)); } catch (MismatchedDimensionException | TransformException | FactoryException e) { LOGGER.error("Unable to transforma bbox", e); } } if (bbox != null) { return bbox.union(bounds); } else { return new ExtractGeometryFilterVisitorResult(bounds, CompareOperation.INTERSECTS); } } else { return new ExtractGeometryFilterVisitorResult(infinity(), null); } } /** * Please note we are only visiting literals involved in spatial operations. * * @param expression hopefully a Geometry or Envelope * @param data Incoming BoundingBox (or Envelope or CRS) * @return ReferencedEnvelope updated to reflect literal */ @Override public Object visit(final Literal expression, final Object data) { final Object value = expression.getValue(); if (value instanceof Geometry) { final Geometry geometry = (Geometry) value; return geometry; } else { LOGGER.info("LiteralExpression ignored!"); } return bbox(data); } @Override public Object visit(final And filter, final Object data) { ExtractGeometryFilterVisitorResult finalResult = null; for (final Filter f : filter.getChildren()) { final Object obj = f.accept(this, data); if ((obj != null) && (obj instanceof ExtractGeometryFilterVisitorResult)) { final ExtractGeometryFilterVisitorResult currentResult = (ExtractGeometryFilterVisitorResult) obj; final Geometry currentGeom = currentResult.getGeometry(); final double currentArea = currentGeom.getArea(); if (finalResult == null) { finalResult = currentResult; } else if (!Double.isInfinite(currentArea) && !Double.isNaN(currentArea)) { // if predicates match then we can combine the geometry as // well as predicate if (currentResult.matchPredicate(finalResult)) { finalResult = new ExtractGeometryFilterVisitorResult( finalResult.getGeometry().intersection(currentGeom), currentResult.getCompareOp()); } else { // if predicate doesn't match then still combine // geometry but set predicate to null finalResult = new ExtractGeometryFilterVisitorResult( finalResult.getGeometry().intersection(currentGeom), null); } } else { finalResult = new ExtractGeometryFilterVisitorResult(finalResult.getGeometry(), null); } } } return finalResult; } @Override public Object visit(final Not filter, final Object data) { // no matter what we have to return an infinite envelope // rationale // !(finite envelope) -> an unbounded area -> infinite // !(non spatial filter) -> infinite (no spatial concern) // !(infinite) -> ... infinite, as the first infinite could be the // result // of !(finite envelope) return new ExtractGeometryFilterVisitorResult(infinity(), null); } @Override public Object visit(final Or filter, final Object data) { ExtractGeometryFilterVisitorResult finalResult = new ExtractGeometryFilterVisitorResult( new GeometryFactory().toGeometry(new Envelope()), null); for (final Filter f : filter.getChildren()) { final Object obj = f.accept(this, data); if ((obj != null) && (obj instanceof ExtractGeometryFilterVisitorResult)) { final ExtractGeometryFilterVisitorResult currentResult = (ExtractGeometryFilterVisitorResult) obj; final Geometry currentGeom = currentResult.getGeometry(); final double currentArea = currentGeom.getArea(); if (finalResult.getGeometry().isEmpty()) { finalResult = currentResult; } else if (!Double.isInfinite(currentArea) && !Double.isNaN(currentArea)) { if (currentResult.matchPredicate(finalResult)) { finalResult = new ExtractGeometryFilterVisitorResult( finalResult.getGeometry().union(currentGeom), currentResult.getCompareOp()); } else { finalResult = new ExtractGeometryFilterVisitorResult( finalResult.getGeometry().union(currentGeom), null); } } else { finalResult = new ExtractGeometryFilterVisitorResult(finalResult.getGeometry(), null); } } } if (finalResult.getGeometry().isEmpty()) { return new ExtractGeometryFilterVisitorResult(infinity(), null); } return finalResult; } @Override public Object visit(final Beyond filter, final Object data) { // beyond a certain distance from a finite object, no way to limit it return new ExtractGeometryFilterVisitorResult(infinity(), null); } @Override public Object visit(final Contains filter, Object data) { if (!attributeOfInterest.equals(filter.getExpression1().toString())) { return new ExtractGeometryFilterVisitorResult(infinity(), null); } data = filter.getExpression2().accept(this, data); // since predicate is defined relative to the query geometry we are // using WITHIN // which is converse of CONTAINS operator // CQL Expression "CONTAINS(geo, QueryGeometry)" is equivalent to // QueryGeometry.WITHIN(geo) return new ExtractGeometryFilterVisitorResult((Geometry) data, CompareOperation.WITHIN); } @Override public Object visit(final Crosses filter, Object data) { if (!attributeOfInterest.equals(filter.getExpression1().toString())) { return new ExtractGeometryFilterVisitorResult(infinity(), null); } data = filter.getExpression2().accept(this, data); return new ExtractGeometryFilterVisitorResult((Geometry) data, CompareOperation.CROSSES); } @Override public Object visit(final Disjoint filter, final Object data) { // disjoint does not define a rectangle, but a hole in the // Cartesian plane, no way to limit it return new ExtractGeometryFilterVisitorResult(infinity(), null); } @Override public Object visit(final DWithin filter, final Object data) { final Geometry bbox = bbox(data); if (!attributeOfInterest.equals(filter.getExpression1().toString())) { return new ExtractGeometryFilterVisitorResult(infinity(), null); } // we have to take the reference geometry bbox and // expand it by the distance. // We ignore the unit of measure for the moment Literal geometry = null; if ((filter.getExpression1() instanceof PropertyName) && (filter.getExpression2() instanceof Literal)) { geometry = (Literal) filter.getExpression2(); } if ((filter.getExpression2() instanceof PropertyName) && (filter.getExpression1() instanceof Literal)) { geometry = (Literal) filter.getExpression1(); } // we cannot desume a bbox from this filter if (geometry == null) { return null; } final Geometry geom = geometry.evaluate(null, Geometry.class); if (geom == null) { return new ExtractGeometryFilterVisitorResult(infinity(), null); } Pair geometryAndDegrees; try { geometryAndDegrees = GeometryUtils.buffer(crs, geom, filter.getDistanceUnits(), filter.getDistance()); } catch (final TransformException e) { LOGGER.error("Cannot transform geometry to CRS", e); geometryAndDegrees = Pair.of(geom, filter.getDistance()); } if (bbox != null) { return geometryAndDegrees.getLeft().union(bbox); } else { return new ExtractGeometryFilterVisitorResult( geometryAndDegrees.getLeft(), CompareOperation.INTERSECTS); } } @Override public Object visit(final Equals filter, Object data) { if (!attributeOfInterest.equals(filter.getExpression1().toString())) { return new ExtractGeometryFilterVisitorResult(infinity(), null); } data = filter.getExpression2().accept(this, data); return new ExtractGeometryFilterVisitorResult((Geometry) data, CompareOperation.EQUALS); } @Override public Object visit(final Intersects filter, Object data) { if (!attributeOfInterest.equals(filter.getExpression1().toString())) { return new ExtractGeometryFilterVisitorResult(infinity(), null); } data = filter.getExpression2().accept(this, data); return new ExtractGeometryFilterVisitorResult((Geometry) data, CompareOperation.INTERSECTS); } @Override public Object visit(final Overlaps filter, Object data) { if (!attributeOfInterest.equals(filter.getExpression1().toString())) { return new ExtractGeometryFilterVisitorResult(infinity(), null); } data = filter.getExpression2().accept(this, data); return new ExtractGeometryFilterVisitorResult((Geometry) data, CompareOperation.OVERLAPS); } @Override public Object visit(final Touches filter, Object data) { if (!attributeOfInterest.equals(filter.getExpression1().toString())) { return new ExtractGeometryFilterVisitorResult(infinity(), null); } data = filter.getExpression2().accept(this, data); return new ExtractGeometryFilterVisitorResult((Geometry) data, CompareOperation.TOUCHES); } @Override public Object visit(final Within filter, Object data) { if (!attributeOfInterest.equals(filter.getExpression1().toString())) { return new ExtractGeometryFilterVisitorResult(infinity(), null); } data = filter.getExpression2().accept(this, data); // since predicate is defined relative to the query geometry we are // using CONTAIN // which is converse of WITHIN operator // CQL Expression "WITHIN(geo, QueryGeometry)" is equivalent to // QueryGeometry.CONTAINS(geo) return new ExtractGeometryFilterVisitorResult((Geometry) data, CompareOperation.CONTAINS); } @Override public Object visit(final Add expression, final Object data) { return new ExtractGeometryFilterVisitorResult(infinity(), null); } @Override public Object visit(final Divide expression, final Object data) { return new ExtractGeometryFilterVisitorResult(infinity(), null); } @Override public Object visit(final Function expression, final Object data) { return new ExtractGeometryFilterVisitorResult(infinity(), null); } @Override public Object visit(final Id filter, final Object data) { return new ExtractGeometryFilterVisitorResult(infinity(), null); } @Override public Object visit(final Multiply expression, final Object data) { return new ExtractGeometryFilterVisitorResult(infinity(), null); } @Override public Object visit(final NilExpression expression, final Object data) { return new ExtractGeometryFilterVisitorResult(infinity(), null); } @Override public Object visit(final PropertyIsBetween filter, final Object data) { return new ExtractGeometryFilterVisitorResult(infinity(), null); } @Override public Object visit(final PropertyIsEqualTo filter, final Object data) { return new ExtractGeometryFilterVisitorResult(infinity(), null); } @Override public Object visit(final PropertyIsGreaterThan filter, final Object data) { return new ExtractGeometryFilterVisitorResult(infinity(), null); } @Override public Object visit(final PropertyIsGreaterThanOrEqualTo filter, final Object data) { return new ExtractGeometryFilterVisitorResult(infinity(), null); } @Override public Object visit(final PropertyIsLessThan filter, final Object data) { return new ExtractGeometryFilterVisitorResult(infinity(), null); } @Override public Object visit(final PropertyIsLessThanOrEqualTo filter, final Object data) { return new ExtractGeometryFilterVisitorResult(infinity(), null); } @Override public Object visit(final PropertyIsLike filter, final Object data) { return new ExtractGeometryFilterVisitorResult(infinity(), null); } @Override public Object visit(final PropertyIsNotEqualTo filter, final Object data) { return new ExtractGeometryFilterVisitorResult(infinity(), null); } @Override public Object visit(final PropertyIsNull filter, final Object data) { return new ExtractGeometryFilterVisitorResult(infinity(), null); } @Override public Object visit(final PropertyName expression, final Object data) { return new ExtractGeometryFilterVisitorResult(null, null); } @Override public Object visit(final Subtract expression, final Object data) { return new ExtractGeometryFilterVisitorResult(infinity(), null); } @Override public Object visitNullFilter(final Object data) { return new ExtractGeometryFilterVisitorResult(infinity(), null); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/util/ExtractGeometryFilterVisitorResult.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ /** */ package org.locationtech.geowave.core.geotime.util; import org.locationtech.geowave.core.geotime.store.query.filter.SpatialQueryFilter.CompareOperation; import org.locationtech.jts.geom.Geometry; /** * @author Ashish Shah

This class is used to store results extracted from * ExtractGeometryFilterVisitor class. It simply stores query geometry and its associated * predicate. */ public final class ExtractGeometryFilterVisitorResult { private final Geometry geometry; private final CompareOperation compareOp; public ExtractGeometryFilterVisitorResult( final Geometry geometry, final CompareOperation compareOp) { this.geometry = geometry; this.compareOp = compareOp; } /** @return geometry */ public Geometry getGeometry() { return geometry; } /** @return predicate associated with geometry */ public CompareOperation getCompareOp() { return compareOp; } /** * @param otherResult is ExtractGeometryFilterVisitorResult object * @return True if predicates of both ExtractGeometryFilterVisitorResult objects are same */ public boolean matchPredicate(final ExtractGeometryFilterVisitorResult otherResult) { return (compareOp == otherResult.getCompareOp()); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/util/ExtractTimeFilterVisitor.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.util; import java.sql.Timestamp; import java.util.Date; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.geotools.data.Query; import org.geotools.filter.visitor.NullFilterVisitor; import org.geotools.util.Converters; import org.locationtech.geowave.core.geotime.store.query.TemporalConstraints; import org.locationtech.geowave.core.geotime.store.query.TemporalConstraintsSet; import org.locationtech.geowave.core.geotime.store.query.TemporalRange; import org.opengis.filter.And; import org.opengis.filter.ExcludeFilter; import org.opengis.filter.Filter; import org.opengis.filter.Id; import org.opengis.filter.IncludeFilter; import org.opengis.filter.Not; import org.opengis.filter.Or; import org.opengis.filter.PropertyIsBetween; import org.opengis.filter.PropertyIsEqualTo; import org.opengis.filter.PropertyIsGreaterThan; import org.opengis.filter.PropertyIsGreaterThanOrEqualTo; import org.opengis.filter.PropertyIsLessThan; import org.opengis.filter.PropertyIsLessThanOrEqualTo; import org.opengis.filter.PropertyIsLike; import org.opengis.filter.PropertyIsNil; import org.opengis.filter.PropertyIsNotEqualTo; import org.opengis.filter.PropertyIsNull; import org.opengis.filter.expression.Add; import org.opengis.filter.expression.Divide; import org.opengis.filter.expression.Expression; import org.opengis.filter.expression.Function; import org.opengis.filter.expression.Literal; import org.opengis.filter.expression.NilExpression; import org.opengis.filter.expression.PropertyName; import org.opengis.filter.expression.Subtract; import org.opengis.filter.spatial.BBOX; import org.opengis.filter.spatial.Beyond; import org.opengis.filter.spatial.Contains; import org.opengis.filter.spatial.Crosses; import org.opengis.filter.spatial.DWithin; import org.opengis.filter.spatial.Disjoint; import org.opengis.filter.spatial.Equals; import org.opengis.filter.spatial.Intersects; import org.opengis.filter.spatial.Overlaps; import org.opengis.filter.spatial.Touches; import org.opengis.filter.spatial.Within; import org.opengis.filter.temporal.After; import org.opengis.filter.temporal.AnyInteracts; import org.opengis.filter.temporal.Before; import org.opengis.filter.temporal.Begins; import org.opengis.filter.temporal.BegunBy; import org.opengis.filter.temporal.During; import org.opengis.filter.temporal.EndedBy; import org.opengis.filter.temporal.Ends; import org.opengis.filter.temporal.Meets; import org.opengis.filter.temporal.MetBy; import org.opengis.filter.temporal.OverlappedBy; import org.opengis.filter.temporal.TContains; import org.opengis.filter.temporal.TEquals; import org.opengis.filter.temporal.TOverlaps; import org.opengis.temporal.Instant; import org.opengis.temporal.Period; import org.opengis.temporal.Position; /** * This class can be used to get Time range from an OpenGIS filter object. GeoWave then uses this * time range to perform a spatial intersection query. * *

Only those time elements associated with an index are extracted. At the moment, the adapter * only supports temporal indexing on a single attribute or a pair of attributes representing a time * range. */ public class ExtractTimeFilterVisitor extends NullFilterVisitor { private final List validParamRanges = new LinkedList<>(); private boolean approximation = false; public ExtractTimeFilterVisitor() {} public ExtractTimeFilterVisitor(final TimeDescriptors timeDescriptors) { if (timeDescriptors.hasTime() && (timeDescriptors.getStartRange() != null) && (timeDescriptors.getEndRange() != null)) { addRangeVariables( timeDescriptors.getStartRange().getLocalName(), timeDescriptors.getEndRange().getLocalName()); } } public void addRangeVariables(final String start, final String end) { validParamRanges.add(new String[] {start, end}); } public TemporalConstraintsSet getConstraints(final Filter filter) { final TemporalConstraintsSet constrainsSet = getRawConstraints(filter); constrainsSet.setExact(!approximation); for (final String[] range : validParamRanges) { if (constrainsSet.hasConstraintsFor(range[0]) || constrainsSet.hasConstraintsFor(range[1])) { final TemporalConstraints start = (constrainsSet.hasConstraintsFor(range[0])) ? constrainsSet.getConstraintsFor(range[0]) : constrainsSet.getConstraintsFor(range[1]); // Note: getConstraints has a side effect that is returns a // constraint--full range, if necessary // so if start and end are both not specific, the prior line // would create the end // thus sconstraints and econstraints will be identical final TemporalConstraints end = (constrainsSet.hasConstraintsFor(range[1])) ? constrainsSet.getConstraintsFor(range[1]) : start; constrainsSet.removeConstraints(range[0], range[1]); final TemporalConstraints constraintsForRange = constrainsSet.getConstraintsForRange(range[0], range[1]); constraintsForRange.replaceWithIntersections( new TemporalConstraints( new TemporalRange( start.getStartRange().getStartTime(), end.getEndRange().getEndTime()), constraintsForRange.getName())); } } return constrainsSet; } public TemporalConstraintsSet getConstraints(final Query query) { return getConstraints(query.getFilter()); } private TemporalConstraintsSet getRawConstraints(final Filter filter) { final Object output = filter.accept(this, null); if (output instanceof TemporalConstraintsSet) { return (TemporalConstraintsSet) output; } else if (output instanceof ParameterTimeConstraint) { final ParameterTimeConstraint paramConstraint = (ParameterTimeConstraint) output; final TemporalConstraintsSet constraintSet = new TemporalConstraintsSet(); constraintSet.getConstraintsFor(paramConstraint.getName()).replaceWithMerged(paramConstraint); return constraintSet; } return new TemporalConstraintsSet(); } /** * Produce an ReferencedEnvelope from the provided data parameter. * * @param data * @return ReferencedEnvelope */ private TemporalConstraints btime(final Object data) { if (data == null) { return null; } if (data instanceof Date) { return toSet(new TemporalRange((Date) data, (Date) data)); } else if (data instanceof Timestamp) { return toSet(new TemporalRange((Timestamp) data, (Timestamp) data)); } else if (data instanceof Number) { final long val = ((Number) data).longValue(); return toSet(new TemporalRange(new Date(val), new Date(val))); } else if (data instanceof TemporalRange) { return toSet((TemporalRange) data); } else if (data instanceof TemporalConstraints) { return (TemporalConstraints) data; } else if (data instanceof Period) { // all periods are exclusive final Position beginPosition = ((Period) data).getBeginning().getPosition(); final Position endPosition = ((Period) data).getEnding().getPosition(); Date s = TemporalRange.START_TIME, e = TemporalRange.START_TIME; if (beginPosition.getDate() != null) { // make it exclusive on start s = new Date(beginPosition.getDate().getTime() + 1); } else if (beginPosition.getTime() != null) { // make it exclusive on start s = new Date(beginPosition.getTime().getTime() + 1); } if (endPosition.getDate() != null) { // make it exclusive on end e = new Date(endPosition.getDate().getTime() - 1); } else if (endPosition.getTime() != null) { // make it exclusive on end e = new Date(endPosition.getTime().getTime() - 1); } if (s.getTime() > e.getTime()) { return new TemporalConstraints(); } return toSet(new TemporalRange(s, e)); } else if (data instanceof Instant) { final Position beginPosition = ((Instant) data).getPosition(); Date s = TemporalRange.START_TIME; if (beginPosition.getDate() != null) { s = beginPosition.getDate(); } else if (beginPosition.getTime() != null) { s = beginPosition.getTime(); } return toSet(new TemporalRange(s, s)); } final Date convertedDate = Converters.convert(data, Date.class); if (convertedDate != null) { return btime(convertedDate); } return new TemporalConstraints(); } @Override public Object visit(final ExcludeFilter filter, final Object data) { return new TemporalConstraints(); } @Override public Object visit(final IncludeFilter filter, final Object data) { return new TemporalConstraints(); } private TemporalConstraints toSet(final TemporalRange range) { final TemporalConstraints contraints = new TemporalConstraints(); contraints.add(range); return contraints; } /** * Please note we are only visiting literals involved in time. * * @param expression a literal time * @param data unused * @return temporal constraints updated to reflect literal */ @Override public Object visit(final Literal expression, final Object data) { final Object value = expression.getValue(); return btime(value); } @Override public Object visit(final And filter, final Object data) { final TemporalConstraintsSet constraints = new TemporalConstraintsSet(); for (final Filter f : filter.getChildren()) { final Object output = f.accept(this, data); if (output instanceof ParameterTimeConstraint) { final ParameterTimeConstraint ranges = (ParameterTimeConstraint) output; constraints.getConstraintsFor(ranges.getName()).replaceWithIntersections(ranges); } else if (output instanceof TemporalConstraintsSet) { final TemporalConstraintsSet rangeSet = (TemporalConstraintsSet) output; for (final Map.Entry entry : rangeSet.getSet()) { constraints.getConstraintsFor(entry.getKey()).replaceWithIntersections(entry.getValue()); } } } for (final String[] range : validParamRanges) { if (constraints.hasConstraintsFor(range[0]) && constraints.hasConstraintsFor(range[1])) { final TemporalConstraints start = constraints.getConstraintsFor(range[0]); final TemporalConstraints end = constraints.getConstraintsFor(range[1]); constraints.removeConstraints(range[0], range[1]); // TODO: make this logic more robust if (start.getEndRange().getEndTime().after(end.getStartRange().getStartTime())) { // does this really make sense? seems like start should always be the start time and end // should always be the end time, but perhaps with multiple and's and or's it probably // gets complicated such that this is the only working logic constraints.getConstraintsForRange(range[0], range[1]).add( new TemporalRange( end.getStartRange().getStartTime(), start.getEndRange().getEndTime())); } else { // if there are multiple non-instersecting ranges, this is // an approximation approximation |= (start.getRanges().size() > 1) || (end.getRanges().size() > 1); constraints.getConstraintsForRange(range[0], range[1]).add( new TemporalRange( start.getStartRange().getStartTime(), end.getEndRange().getEndTime())); } } } return constraints; } public boolean isApproximation() { return approximation; } @Override public Object visit(final Not filter, final Object data) { final Object output = filter.getFilter().accept(this, data); if (output instanceof ParameterTimeConstraint) { return not((ParameterTimeConstraint) output); } else if (output instanceof TemporalConstraintsSet) { final TemporalConstraintsSet newRangeSet = new TemporalConstraintsSet(); final TemporalConstraintsSet rangeSet = (TemporalConstraintsSet) output; for (final Map.Entry entry : rangeSet.getSet()) { newRangeSet.getConstraintsFor(entry.getKey()).replaceWithMerged(not(entry.getValue())); } return newRangeSet; } return output; } private TemporalConstraints not(final TemporalConstraints constraints) { final ParameterTimeConstraint notRanges = new ParameterTimeConstraint(constraints.getName()); notRanges.empty(); Date lastMax = TemporalRange.START_TIME; for (final TemporalRange range : constraints.getRanges()) { if (range.getStartTime().after(TemporalRange.START_TIME)) { notRanges.add(new TemporalRange(lastMax, new Date(range.getStartTime().getTime() - 1))); } lastMax = range.getEndTime(); } if (!constraints.isEmpty() && (TemporalRange.END_TIME.after(constraints.getEndRange().getEndTime()))) { notRanges.add(new TemporalRange(lastMax, TemporalRange.END_TIME)); } return notRanges; } @Override public Object visit(final Or filter, final Object data) { final TemporalConstraintsSet constraints = new TemporalConstraintsSet(); for (final Filter f : filter.getChildren()) { final Object output = f.accept(this, data); if (output instanceof ParameterTimeConstraint) { final ParameterTimeConstraint ranges = (ParameterTimeConstraint) output; constraints.getConstraintsFor(ranges.getName()).replaceWithMerged(ranges); } else if (output instanceof TemporalConstraintsSet) { final TemporalConstraintsSet rangeSet = (TemporalConstraintsSet) output; for (final Map.Entry entry : rangeSet.getSet()) { constraints.getConstraintsFor(entry.getKey()).replaceWithMerged(entry.getValue()); } } } return constraints; } // t1 > t2 // t1.start > t2 // t1 > t2.end // t1.start > t2.end @Override public Object visit(final After after, final Object data) { final TemporalConstraints leftResult = btime(after.getExpression1().accept(this, data)); final TemporalConstraints rightResult = btime(after.getExpression2().accept(this, data)); if (leftResult.isEmpty() || rightResult.isEmpty()) { return new TemporalConstraints(); } // property after value if (leftResult instanceof ParameterTimeConstraint) { return new ParameterTimeConstraint( new TemporalRange( rightResult.getMaxOr(TemporalRange.START_TIME, 1), TemporalRange.END_TIME), leftResult.getName()); } else if (rightResult instanceof ParameterTimeConstraint) { return new ParameterTimeConstraint( new TemporalRange( TemporalRange.START_TIME, leftResult.getMinOr(TemporalRange.END_TIME, -1)), rightResult.getName()); } // property after property return new TemporalConstraints(); } @Override public Object visit(final AnyInteracts anyInteracts, final Object data) { return new TemporalConstraints(); } // t1 < t2 // t1.end < t2 // t1 < t2.start // t1.end < t2.start // t1.end < t2.start @Override public Object visit(final Before before, final Object data) { final TemporalConstraints leftResult = btime(before.getExpression1().accept(this, data)); final TemporalConstraints rightResult = btime(before.getExpression2().accept(this, data)); if (leftResult.isEmpty() || rightResult.isEmpty()) { return new TemporalConstraints(); } // property before value if (leftResult instanceof ParameterTimeConstraint) { return new ParameterTimeConstraint( new TemporalRange( TemporalRange.START_TIME, rightResult.getMinOr(TemporalRange.END_TIME, -1)), leftResult.getName()); } else if (rightResult instanceof ParameterTimeConstraint) { return new ParameterTimeConstraint( new TemporalRange( leftResult.getMaxOr(TemporalRange.START_TIME, 1), TemporalRange.END_TIME), rightResult.getName()); } // property after property return new TemporalConstraints(); } // t1 = t2.start // t1.start = t2.start and t1.end < t2.end @Override public Object visit(final Begins begins, final Object data) { final TemporalConstraints leftResult = (TemporalConstraints) begins.getExpression1().accept(this, data); final TemporalConstraints rightResult = (TemporalConstraints) begins.getExpression2().accept(this, data); if (leftResult.isEmpty() || rightResult.isEmpty()) { return new TemporalConstraints(); } // property begins value if (leftResult instanceof ParameterTimeConstraint) { return new ParameterTimeConstraint(rightResult.getRanges(), leftResult.getName()); } else if (rightResult instanceof ParameterTimeConstraint) { return new ParameterTimeConstraint( new TemporalRange( leftResult.getMinOr(TemporalRange.START_TIME, 0), TemporalRange.END_TIME), rightResult.getName()); } // property begins property return new TemporalConstraints(); } // t1.start = t2 // t1.start = t2.start and t1.end > t2.end @Override public Object visit(final BegunBy begunBy, final Object data) { final TemporalConstraints leftResult = (TemporalConstraints) begunBy.getExpression1().accept(this, data); final TemporalConstraints rightResult = (TemporalConstraints) begunBy.getExpression2().accept(this, data); if (leftResult.isEmpty() || rightResult.isEmpty()) { return new TemporalConstraints(); } // property begun by value if (leftResult instanceof ParameterTimeConstraint) { return new ParameterTimeConstraint( new TemporalRange( rightResult.getMinOr(TemporalRange.START_TIME, 0), TemporalRange.END_TIME), leftResult.getName()); } else if (rightResult instanceof ParameterTimeConstraint) { return new ParameterTimeConstraint(leftResult.getRanges(), rightResult.getName()); } // property begins property return new TemporalConstraints(); } // t2.start < t1 < t2.end // t1.start > t2.start and t1.end < t2.end @Override public Object visit(final During during, final Object data) { final TemporalConstraints leftResult = (TemporalConstraints) during.getExpression1().accept(this, data); final TemporalConstraints rightResult = (TemporalConstraints) during.getExpression2().accept(this, data); if (leftResult.isEmpty() || rightResult.isEmpty()) { return new TemporalConstraints(); } // property during value if (leftResult instanceof ParameterTimeConstraint) { return new ParameterTimeConstraint(rightResult.getRanges(), leftResult.getName()); } // value during property else if (rightResult instanceof ParameterTimeConstraint) { return rightResult; } // property during property return new TemporalConstraints(); } // t1.end = t2 // t1.start < t2.start and t1.end = t2.end @Override public Object visit(final EndedBy endedBy, final Object data) { final TemporalConstraints leftResult = (TemporalConstraints) endedBy.getExpression1().accept(this, data); final TemporalConstraints rightResult = (TemporalConstraints) endedBy.getExpression2().accept(this, data); if (leftResult.isEmpty() || rightResult.isEmpty()) { return new TemporalConstraints(); } // property ended by value if (leftResult instanceof ParameterTimeConstraint) { return new ParameterTimeConstraint( new TemporalRange( TemporalRange.START_TIME, rightResult.getMaxOr(TemporalRange.END_TIME, 0)), leftResult.getName()); } else if (rightResult instanceof ParameterTimeConstraint) { return new ParameterTimeConstraint(leftResult.getRanges(), rightResult.getName()); } // property ended by property return new TemporalConstraints(); } // t1 = t2.end // t1.start > t2.start and t1.end = t2.end @Override public Object visit(final Ends ends, final Object data) { final TemporalConstraints leftResult = (TemporalConstraints) ends.getExpression1().accept(this, data); final TemporalConstraints rightResult = (TemporalConstraints) ends.getExpression2().accept(this, data); if (leftResult.isEmpty() || rightResult.isEmpty()) { return new TemporalConstraints(); } // property ends value if (leftResult instanceof ParameterTimeConstraint) { return new ParameterTimeConstraint(rightResult.getRanges(), leftResult.getName()); } else if (rightResult instanceof ParameterTimeConstraint) { return new ParameterTimeConstraint( new TemporalRange( TemporalRange.START_TIME, leftResult.getMaxOr(TemporalRange.END_TIME, 0)), rightResult.getName()); } // property ended by property return new TemporalConstraints(); } // t1.end = t2.start @Override public Object visit(final Meets meets, final Object data) { final TemporalConstraints leftResult = (TemporalConstraints) meets.getExpression1().accept(this, data); final TemporalConstraints rightResult = (TemporalConstraints) meets.getExpression2().accept(this, data); if (leftResult.isEmpty() || rightResult.isEmpty()) { return new TemporalConstraints(); } // property ends value if (leftResult instanceof ParameterTimeConstraint) { return new ParameterTimeConstraint( new TemporalRange( TemporalRange.START_TIME, rightResult.getMinOr(TemporalRange.END_TIME, 0)), leftResult.getName()); } else if (rightResult instanceof ParameterTimeConstraint) { return new ParameterTimeConstraint(rightResult.getName()); } // property ended by property return new TemporalConstraints(); } // t1.start = t2.end // met by @Override public Object visit(final MetBy metBy, final Object data) { final TemporalConstraints leftResult = (TemporalConstraints) metBy.getExpression1().accept(this, data); final TemporalConstraints rightResult = (TemporalConstraints) metBy.getExpression2().accept(this, data); if (leftResult.isEmpty() || rightResult.isEmpty()) { return new TemporalConstraints(); } // property ends value if (leftResult instanceof ParameterTimeConstraint) { return new ParameterTimeConstraint( new TemporalRange( rightResult.getMaxOr(TemporalRange.START_TIME, 0), TemporalRange.END_TIME), leftResult.getName()); } else if (rightResult instanceof ParameterTimeConstraint) { return new ParameterTimeConstraint( new TemporalRange( TemporalRange.START_TIME, leftResult.getMinOr(TemporalRange.END_TIME, 0)), rightResult.getName()); } // property ends property return new TemporalConstraints(); } // t1.start > t2.start and t1.start < t2.end and t1.end > t2.end @Override public Object visit(final OverlappedBy overlappedBy, final Object data) { final TemporalConstraints leftResult = (TemporalConstraints) overlappedBy.getExpression1().accept(this, data); final TemporalConstraints rightResult = (TemporalConstraints) overlappedBy.getExpression2().accept(this, data); if (leftResult.isEmpty() || rightResult.isEmpty()) { return new TemporalConstraints(); } // property overlappedBy value if (leftResult instanceof ParameterTimeConstraint) { return new ParameterTimeConstraint( new TemporalRange( rightResult.getMinOr(TemporalRange.START_TIME, 1), TemporalRange.END_TIME), leftResult.getName()); } else if (rightResult instanceof ParameterTimeConstraint) { return new ParameterTimeConstraint( new TemporalRange( TemporalRange.START_TIME, leftResult.getMaxOr(TemporalRange.END_TIME, -1)), rightResult.getName()); } // property overlappedBy property return new TemporalConstraints(); } // t1.start < t2 < t1.end // t1.start < t2.start and t2.end < t1.end @Override public Object visit(final TContains contains, final Object data) { final TemporalConstraints leftResult = (TemporalConstraints) contains.getExpression1().accept(this, data); final TemporalConstraints rightResult = (TemporalConstraints) contains.getExpression2().accept(this, data); if (leftResult.isEmpty() || rightResult.isEmpty()) { return new TemporalConstraints(); } // property contains value if (leftResult instanceof ParameterTimeConstraint) { return new TemporalConstraints( new TemporalRange( TemporalRange.START_TIME, rightResult.getMaxOr(TemporalRange.END_TIME, -1)), leftResult.getName()); } else if (rightResult instanceof ParameterTimeConstraint) { return new ParameterTimeConstraint(leftResult.getRanges(), rightResult.getName()); } // property contains property return new TemporalConstraints(); } @Override public Object visit(final TEquals equals, final Object data) { final TemporalConstraints leftResult = (TemporalConstraints) equals.getExpression1().accept(this, data); final TemporalConstraints rightResult = (TemporalConstraints) equals.getExpression2().accept(this, data); if (leftResult.isEmpty() || rightResult.isEmpty()) { return new TemporalConstraints(); } // property contains value if (leftResult instanceof ParameterTimeConstraint) { return rightResult; } // value contains property if (rightResult instanceof ParameterTimeConstraint) { return leftResult; } // property contains property return new TemporalConstraints(); } // t1.start < t2.start and t1.end > t2.start and t1.end < t2.end @Override public Object visit(final TOverlaps overlaps, final Object data) { final TemporalConstraints leftResult = (TemporalConstraints) overlaps.getExpression1().accept(this, data); final TemporalConstraints rightResult = (TemporalConstraints) overlaps.getExpression2().accept(this, data); if (leftResult.isEmpty() || rightResult.isEmpty()) { return new TemporalConstraints(); } // according to geotools documentation this is exclusive even though // "overlaps" seems it should imply inclusive // property overlappedBy value if (leftResult instanceof ParameterTimeConstraint) { return new TemporalConstraints( new TemporalRange( TemporalRange.START_TIME, rightResult.getMaxOr(TemporalRange.END_TIME, -1)), leftResult.getName()); } else if (rightResult instanceof ParameterTimeConstraint) { return new ParameterTimeConstraint( new TemporalRange( leftResult.getMaxOr(TemporalRange.START_TIME, -1), TemporalRange.END_TIME), rightResult.getName()); } // property overlappedBy property return new TemporalConstraints(); } @Override public Object visit(final Id filter, final Object data) { return new TemporalConstraints(); } @Override public Object visit(final PropertyIsBetween filter, final Object data) { final TemporalConstraints propertyExp = (TemporalConstraints) filter.getExpression().accept(this, data); final TemporalConstraints lowerBound = (TemporalConstraints) filter.getLowerBoundary().accept(this, data); final TemporalConstraints upperBound = (TemporalConstraints) filter.getUpperBoundary().accept(this, data); if (propertyExp.isEmpty()) { return new TemporalConstraints(); } return new ParameterTimeConstraint( new TemporalRange( lowerBound.getStartRange().getStartTime(), upperBound.getEndRange().getEndTime()), propertyExp.getName()); } @Override public Object visit(final PropertyIsEqualTo filter, final Object data) { final TemporalConstraints leftResult = (TemporalConstraints) filter.getExpression1().accept(this, data); final TemporalConstraints rightResult = (TemporalConstraints) filter.getExpression2().accept(this, data); if (leftResult.isEmpty() || rightResult.isEmpty()) { return new TemporalConstraints(); } if (leftResult instanceof ParameterTimeConstraint) { return new ParameterTimeConstraint( new TemporalRange( rightResult.getStartRange().getStartTime(), rightResult.getEndRange().getEndTime()), leftResult.getName()); } else { return new ParameterTimeConstraint( new TemporalRange( leftResult.getStartRange().getStartTime(), leftResult.getEndRange().getEndTime()), rightResult.getName()); } } @Override public Object visit(final PropertyIsNotEqualTo filter, final Object data) { final TemporalConstraints leftResult = (TemporalConstraints) filter.getExpression1().accept(this, data); final TemporalConstraints rightResult = (TemporalConstraints) filter.getExpression2().accept(this, data); if (leftResult.isEmpty() || rightResult.isEmpty()) { return new TemporalConstraints(); } if (leftResult instanceof ParameterTimeConstraint) { final ParameterTimeConstraint constraints = new ParameterTimeConstraint( new TemporalRange( TemporalRange.START_TIME, rightResult.getStartRange().getStartTime()), leftResult.getName()); constraints.add( new TemporalRange(rightResult.getEndRange().getEndTime(), TemporalRange.END_TIME)); return constraints; } else { final ParameterTimeConstraint constraints = new ParameterTimeConstraint( new TemporalRange( TemporalRange.START_TIME, leftResult.getStartRange().getStartTime()), rightResult.getName()); constraints.add( new TemporalRange(leftResult.getEndRange().getEndTime(), TemporalRange.END_TIME)); return constraints; } } @Override public Object visit(final PropertyIsGreaterThan filter, final Object data) { final TemporalConstraints leftResult = (TemporalConstraints) filter.getExpression1().accept(this, data); final TemporalConstraints rightResult = (TemporalConstraints) filter.getExpression2().accept(this, data); if (leftResult.isEmpty() || rightResult.isEmpty()) { return new TemporalConstraints(); } if (leftResult instanceof ParameterTimeConstraint) { return new ParameterTimeConstraint( new TemporalRange( new Date(rightResult.getStartRange().getStartTime().getTime() + 1), TemporalRange.END_TIME), leftResult.getName()); } else { return new ParameterTimeConstraint( new TemporalRange( TemporalRange.START_TIME, new Date(leftResult.getStartRange().getStartTime().getTime() - 1)), rightResult.getName()); } } @Override public Object visit(final PropertyIsGreaterThanOrEqualTo filter, final Object data) { final TemporalConstraints leftResult = (TemporalConstraints) filter.getExpression1().accept(this, data); final TemporalConstraints rightResult = (TemporalConstraints) filter.getExpression2().accept(this, data); if (leftResult.isEmpty() || rightResult.isEmpty()) { return new TemporalConstraints(); } if (leftResult instanceof ParameterTimeConstraint) { return new ParameterTimeConstraint( new TemporalRange(rightResult.getStartRange().getStartTime(), TemporalRange.END_TIME), leftResult.getName()); } else { return new ParameterTimeConstraint( new TemporalRange(TemporalRange.START_TIME, leftResult.getStartRange().getStartTime()), rightResult.getName()); } } @Override public Object visit(final PropertyIsLessThan filter, final Object data) { final TemporalConstraints leftResult = (TemporalConstraints) filter.getExpression1().accept(this, data); final TemporalConstraints rightResult = (TemporalConstraints) filter.getExpression2().accept(this, data); if (leftResult.isEmpty() || rightResult.isEmpty()) { return new TemporalConstraints(); } if (leftResult instanceof ParameterTimeConstraint) { return new ParameterTimeConstraint( new TemporalRange( TemporalRange.START_TIME, new Date(rightResult.getStartRange().getStartTime().getTime() - 1)), leftResult.getName()); } else { return new ParameterTimeConstraint( new TemporalRange( new Date(leftResult.getStartRange().getStartTime().getTime() + 1), TemporalRange.END_TIME), rightResult.getName()); } } @Override public Object visit(final PropertyIsLessThanOrEqualTo filter, final Object data) { final TemporalConstraints leftResult = (TemporalConstraints) filter.getExpression1().accept(this, data); final TemporalConstraints rightResult = (TemporalConstraints) filter.getExpression2().accept(this, data); if (leftResult.isEmpty() || rightResult.isEmpty()) { return new TemporalConstraints(); } if (leftResult instanceof ParameterTimeConstraint) { return new ParameterTimeConstraint( new TemporalRange(TemporalRange.START_TIME, rightResult.getStartRange().getStartTime()), leftResult.getName()); } else { return new ParameterTimeConstraint( new TemporalRange(leftResult.getStartRange().getStartTime(), TemporalRange.END_TIME), rightResult.getName()); } } @Override public Object visit(final PropertyIsLike filter, final Object data) { return new TemporalConstraints(); } @Override public Object visit(final PropertyIsNull filter, final Object data) { return new TemporalConstraints(); } @Override public Object visit(final PropertyIsNil filter, final Object data) { return new TemporalConstraints(); } @Override public Object visit(final BBOX filter, final Object data) { return new TemporalConstraints(); } @Override public Object visit(final Beyond filter, final Object data) { return new TemporalConstraints(); } @Override public Object visit(final Contains filter, final Object data) { return new TemporalConstraints(); } @Override public Object visit(final Crosses filter, final Object data) { return new TemporalConstraints(); } @Override public Object visit(final Disjoint filter, final Object data) { return new TemporalConstraints(); } @Override public Object visit(final DWithin filter, final Object data) { return new TemporalConstraints(); } @Override public Object visit(final Equals filter, final Object data) { return new TemporalConstraints(); } @Override public Object visit(final Intersects filter, final Object data) { return new TemporalConstraints(); } @Override public Object visit(final Overlaps filter, final Object data) { return new TemporalConstraints(); } @Override public Object visit(final Touches filter, final Object data) { return new TemporalConstraints(); } @Override public Object visit(final Within filter, final Object data) { return new TemporalConstraints(); } @Override public Object visitNullFilter(final Object data) { return new TemporalConstraints(); } @Override public Object visit(final NilExpression expression, final Object data) { return new TemporalConstraints(); } @Override public Object visit(final Add expression, final Object data) { return expression.accept(this, data); } @Override public Object visit(final Divide expression, final Object data) { return expression.accept(this, data); } @Override public Object visit(final Function expression, final Object data) { // used force full range if the expression contains a time // property...which is correct? return new TemporalConstraints(); } private boolean validateName(final String name) { return true; } @Override public Object visit(final PropertyName expression, final Object data) { final String name = expression.getPropertyName(); if (validateName(expression.getPropertyName())) { // for (final String[] range : validParamRanges) { // if (range[0].equals(name) || range[1].equals(name)) { // return new ParameterTimeConstraint( // range[0] + "_" + range[1]); // } // } return new ParameterTimeConstraint(name); } return new TemporalConstraints(); } @Override public Object visit(final Subtract expression, final Object data) { return expression.accept(this, data); } private boolean expressionContainsTime(final Expression expression) { return !((TemporalConstraints) expression.accept(this, null)).isEmpty(); } private boolean containsTime(final Function function) { boolean yes = false; for (final Expression expression : function.getParameters()) { yes |= expressionContainsTime(expression); } return yes; } private static class ParameterTimeConstraint extends TemporalConstraints { public ParameterTimeConstraint(final String name) { super(TemporalConstraints.FULL_RANGE, name); } public ParameterTimeConstraint(final List ranges, final String name) { super(ranges, name); } public ParameterTimeConstraint(final TemporalRange range, final String name) { super(range, name); } public TemporalConstraints bounds(final TemporalConstraints other) { return other; } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/util/FilterToCQLTool.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.util; import org.geotools.filter.FilterFactoryImpl; import org.geotools.filter.IllegalFilterException; import org.geotools.filter.LiteralExpressionImpl; import org.geotools.filter.spatial.DWithinImpl; import org.geotools.filter.spatial.IntersectsImpl; import org.geotools.filter.text.cql2.CQLException; import org.geotools.filter.text.ecql.ECQL; import org.opengis.filter.Filter; import org.opengis.filter.MultiValuedFilter.MatchAction; import org.opengis.filter.expression.Expression; import org.opengis.filter.expression.PropertyName; import org.opengis.filter.spatial.DWithin; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.opengis.referencing.operation.TransformException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class FilterToCQLTool { private static Logger LOGGER = LoggerFactory.getLogger(FilterToCQLTool.class); public static Filter fixDWithin(final Filter filter) { final HasDWithinFilterVisitor dwithinCheck = new HasDWithinFilterVisitor(); filter.accept(dwithinCheck, null); if (dwithinCheck.hasDWithin()) { try { final Filter retVal = (Filter) filter.accept(new DWithinFilterVisitor(), null); // We do not have a way to transform a filter directly from one // to another. return FilterToCQLTool.toFilter(ECQL.toCQL(retVal)); } catch (final CQLException e) { LOGGER.trace("Filter is not a CQL Expression", e); } } return filter; } public static Filter toFilter(final String expression) throws CQLException { return ECQL.toFilter(expression, new FilterFactoryImpl() { @Override public DWithin dwithin( final Expression geometry1, final Expression geometry2, final double distance, final String units, final MatchAction matchAction) { try { return matchAction == null ? new FixedDWithinImpl(geometry1, geometry2, units, distance) : new FixedDWithinImpl(geometry1, geometry2, units, distance, matchAction); } catch (IllegalFilterException | TransformException e) { LOGGER.warn("Cannot convert DWithin Expression to work with WSG84", e); } final DWithinImpl impl = matchAction == null ? new DWithinImpl(geometry1, geometry2) : new DWithinImpl(geometry1, geometry2, matchAction); impl.setDistance(distance); impl.setUnits(units); return impl; } @Override public DWithin dwithin( final Expression geometry1, final Expression geometry2, final double distance, final String units) { return dwithin(geometry1, geometry2, distance, units, (MatchAction) null); } }); } public static final class FixedDWithinImpl extends IntersectsImpl implements DWithin { private final double distance; private final String units; public FixedDWithinImpl( final Expression e1, final Expression e2, final String units, final double distance) throws IllegalFilterException, TransformException { super( new LiteralExpressionImpl( GeometryUtils.buffer( getCRS(e1, e2), e1 instanceof PropertyName ? e2.evaluate(null, org.locationtech.jts.geom.Geometry.class) : e1.evaluate(null, org.locationtech.jts.geom.Geometry.class), units, distance).getLeft()), e1 instanceof PropertyName ? e1 : e2); this.units = units; this.distance = distance; } private static CoordinateReferenceSystem getCRS(final Expression e1, final Expression e2) { return GeometryUtils.getDefaultCRS(); } public FixedDWithinImpl( final Expression e1, final Expression e2, final String units, final double distance, final MatchAction matchAction) throws IllegalFilterException, TransformException { super( new LiteralExpressionImpl( GeometryUtils.buffer( getCRS(e1, e2), e1.evaluate(null, org.locationtech.jts.geom.Geometry.class), units, distance).getLeft()), e2, matchAction); this.units = units; this.distance = distance; } @Override public double getDistance() { return distance; } @Override public String getDistanceUnits() { return units; } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/util/GeometryUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.util; import java.awt.geom.Point2D; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Arrays; import java.util.BitSet; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import javax.annotation.Nullable; import javax.measure.Unit; import javax.measure.quantity.Length; import org.apache.commons.lang3.tuple.Pair; import org.geotools.factory.CommonFactoryFinder; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.geometry.jts.JTS; import org.geotools.geometry.jts.ReferencedEnvelope; import org.geotools.referencing.CRS; import org.geotools.referencing.GeodeticCalculator; import org.geotools.referencing.crs.DefaultGeographicCRS; import org.geotools.util.factory.GeoTools; import org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition; import org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition; import org.locationtech.geowave.core.geotime.store.dimension.CustomCRSBoundedSpatialDimensionX; import org.locationtech.geowave.core.geotime.store.dimension.CustomCRSBoundedSpatialDimensionY; import org.locationtech.geowave.core.geotime.store.dimension.CustomCRSUnboundedSpatialDimensionX; import org.locationtech.geowave.core.geotime.store.dimension.CustomCRSUnboundedSpatialDimensionY; import org.locationtech.geowave.core.geotime.store.dimension.CustomCrsIndexModel; import org.locationtech.geowave.core.index.GeoWaveSerializationException; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.numeric.BasicNumericDataset; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.index.numeric.NumericValue; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.data.field.FieldUtils; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintData; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintSet; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintsByClass; import org.locationtech.geowave.core.store.query.constraints.Constraints; import org.locationtech.geowave.core.store.util.ClasspathUtils; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryCollection; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.LineString; import org.locationtech.jts.geom.MultiPolygon; import org.locationtech.jts.geom.Point; import org.locationtech.jts.geom.Polygon; import org.locationtech.jts.geom.prep.PreparedGeometryFactory; import org.locationtech.jts.io.ParseException; import org.locationtech.jts.io.WKBReader; import org.locationtech.jts.io.WKBWriter; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.filter.FilterFactory2; import org.opengis.filter.spatial.SpatialOperator; import org.opengis.geometry.MismatchedDimensionException; import org.opengis.referencing.FactoryException; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.opengis.referencing.cs.CoordinateSystem; import org.opengis.referencing.cs.CoordinateSystemAxis; import org.opengis.referencing.operation.MathTransform; import org.opengis.referencing.operation.TransformException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.uzaygezen.core.BitSetMath; import si.uom.NonSI; import si.uom.SI; import systems.uom.common.USCustomary; import tech.units.indriya.AbstractUnit; import tech.units.indriya.function.Calculus; import tech.units.indriya.function.DefaultNumberSystem; import tech.units.indriya.unit.AlternateUnit; import tech.units.indriya.unit.BaseUnit; import tech.units.indriya.unit.Units; /** * This class contains a set of Geometry utility methods that are generally useful throughout the * GeoWave core codebase */ public class GeometryUtils { public static interface GeometryHandler { void handlePoint(Point point); void handleLineString(LineString lineString); void handlePolygon(Polygon polygon); } public static final GeometryFactory GEOMETRY_FACTORY = new GeometryFactory(); public static final PreparedGeometryFactory PREPARED_GEOMETRY_FACTORY = new PreparedGeometryFactory(); private static final Logger LOGGER = LoggerFactory.getLogger(GeometryUtils.class); private static final Object MUTEX = new Object(); private static final Object MUTEX_DEFAULT_CRS = new Object(); public static final String DEFAULT_CRS_STR = "EPSG:4326"; private static CoordinateReferenceSystem defaultCrsSingleton; private static boolean classLoaderInitialized = false; // Make sure GeoTools is properly initialized before we do anything static { initClassLoader(); } public static final Integer MAX_GEOMETRY_PRECISION = Integer.valueOf(TWKBUtils.MAX_COORD_PRECISION); public static SpatialOperator geometryToSpatialOperator( final Geometry jtsGeom, final String geometryAttributeName, final CoordinateReferenceSystem crs) { final FilterFactory2 factory = CommonFactoryFinder.getFilterFactory2(); if (jtsGeom.equalsTopo(jtsGeom.getEnvelope())) { return factory.bbox( factory.property(geometryAttributeName), new ReferencedEnvelope(jtsGeom.getEnvelopeInternal(), crs)); } // there apparently is no way to associate a CRS with a poly // intersection operation so it will have to assume the same CRS as the // feature type return factory.intersects(factory.property(geometryAttributeName), factory.literal(jtsGeom)); } public static void visitGeometry(final Geometry geom, final GeometryHandler geometryHandler) { if (geom == null) { return; } if (geom instanceof GeometryCollection) { final int numGeom = ((GeometryCollection) geom).getNumGeometries(); for (int i = 0; i < numGeom; i++) { visitGeometry(((GeometryCollection) geom).getGeometryN(i), geometryHandler); } } else if (geom instanceof LineString) { geometryHandler.handleLineString((LineString) geom); } else if (geom instanceof Polygon) { geometryHandler.handlePolygon((Polygon) geom); } else { final Point centroid = geom.getCentroid(); geometryHandler.handlePoint(centroid); } } public static CoordinateReferenceSystem decodeCRS(final String crsCode) { if (crsCode == null) { return getDefaultCRS(); } try { return CRS.decode(crsCode, true); } catch (final FactoryException e) { LOGGER.error("Unable to decode '" + crsCode + "' CRS", e); throw new RuntimeException("Unable to decode CRS: '" + crsCode + "'", e); } } @edu.umd.cs.findbugs.annotations.SuppressFBWarnings() public static CoordinateReferenceSystem getDefaultCRS() { if (defaultCrsSingleton == null) { // avoid sync penalty if we can synchronized (MUTEX_DEFAULT_CRS) { // have to do this inside the sync to avoid double init if (defaultCrsSingleton == null) { try { defaultCrsSingleton = CRS.decode(DEFAULT_CRS_STR, true); } catch (final Exception e) { LOGGER.error("Unable to decode " + DEFAULT_CRS_STR + " CRS", e); defaultCrsSingleton = DefaultGeographicCRS.WGS84; } } } } return defaultCrsSingleton; } public static boolean crsMatches(final String crsCode1, final String crsCode2) { if (isDefaultCrs(crsCode1)) { return isDefaultCrs(crsCode2); } else if (isDefaultCrs(crsCode2)) { return isDefaultCrs(crsCode1); } return crsCode1.equalsIgnoreCase(crsCode2); } public static boolean isDefaultCrs(final String crsCode) { return (crsCode == null) || crsCode.isEmpty() || crsCode.equalsIgnoreCase(GeometryUtils.DEFAULT_CRS_STR); } public static boolean isDefaultCrs(final CoordinateReferenceSystem crs) { return (crs == null) || crs.equals(getDefaultCRS()); } @edu.umd.cs.findbugs.annotations.SuppressFBWarnings() public static void initClassLoader() { if (!classLoaderInitialized) { synchronized (MUTEX) { if (!classLoaderInitialized) { // This fixes an issue with the use of SPI by the `tech.units.indriya` library. It only // uses the default class loader for the thread, which does not contain the appropriate // classes in the case of accumulo and hbase distributed processes. Manually setting the // number system before that library is loaded prevents that SPI from ever being utilized // by the library. Calculus.setCurrentNumberSystem(new DefaultNumberSystem()); final ClassLoader myCl = GeometryUtils.class.getClassLoader(); final ClassLoader classLoader = ClasspathUtils.transformClassLoader(myCl); if (classLoader != null) { GeoTools.addClassLoader(classLoader); } classLoaderInitialized = true; } } } } public static ConstraintsByClass basicConstraintsFromGeometry(final Geometry geometry) { final List set = new LinkedList<>(); constructListOfConstraintSetsFromGeometry(geometry, set, false); return new ConstraintsByClass(set); } /** * This utility method will convert a JTS geometry to contraints that can be used in a GeoWave * query. * * @return Constraints as a mapping of NumericData objects representing ranges for a latitude * dimension and a longitude dimension */ public static GeoConstraintsWrapper basicGeoConstraintsWrapperFromGeometry( final Geometry geometry) { final List set = new LinkedList<>(); final boolean geometryConstraintsExactMatch = constructListOfConstraintSetsFromGeometry(geometry, set, true); return new GeoConstraintsWrapper( new ConstraintsByClass(set), geometryConstraintsExactMatch, geometry); } /** * Recursively decompose geometry into a set of envelopes to create a single set. * * @param geometry * @param destinationListOfSets * @param checkTopoEquality */ private static boolean constructListOfConstraintSetsFromGeometry( final Geometry geometry, final List destinationListOfSets, final boolean checkTopoEquality) { // Get the envelope of the geometry being held final int n = geometry.getNumGeometries(); boolean retVal = true; if (n > 1) { retVal = false; for (int gi = 0; gi < n; gi++) { constructListOfConstraintSetsFromGeometry( geometry.getGeometryN(gi), destinationListOfSets, checkTopoEquality); } } else { final Envelope env = geometry.getEnvelopeInternal(); destinationListOfSets.add(basicConstraintSetFromEnvelope(env)); if (checkTopoEquality) { retVal = new GeometryFactory().toGeometry(env).equalsTopo(geometry); } } return retVal; } /** * This utility method will convert a JTS envelope to contraints that can be used in a GeoWave * query. * * @return Constraints as a mapping of NumericData objects representing ranges for a latitude * dimension and a longitude dimension */ public static ConstraintSet basicConstraintSetFromEnvelope(final Envelope env) { // Create a NumericRange object using the x axis final NumericRange rangeLongitude = new NumericRange(env.getMinX(), env.getMaxX()); // Create a NumericRange object using the y axis final NumericRange rangeLatitude = new NumericRange(env.getMinY(), env.getMaxY()); final Map, ConstraintData> constraintsPerDimension = new HashMap<>(); // Create and return a new IndexRange array with an x and y axis // range final ConstraintData xRange = new ConstraintData(rangeLongitude, false); final ConstraintData yRange = new ConstraintData(rangeLatitude, false); constraintsPerDimension.put(CustomCRSUnboundedSpatialDimensionX.class, xRange); constraintsPerDimension.put(CustomCRSUnboundedSpatialDimensionY.class, yRange); constraintsPerDimension.put(CustomCRSBoundedSpatialDimensionX.class, xRange); constraintsPerDimension.put(CustomCRSBoundedSpatialDimensionY.class, yRange); constraintsPerDimension.put(LongitudeDefinition.class, xRange); constraintsPerDimension.put(LatitudeDefinition.class, yRange); return new ConstraintSet(constraintsPerDimension); } /** * This utility method will convert a JTS envelope to contraints that can be used in a GeoWave * query. * * @return Constraints as a mapping of NumericData objects representing ranges for a latitude * dimension and a longitude dimension */ public static Constraints basicConstraintsFromEnvelope(final Envelope env) { return new ConstraintsByClass(basicConstraintSetFromEnvelope(env)); } /** * This utility method will convert a JTS envelope to that can be used in a GeoWave query. * * @return Constraints as a mapping of NumericData objects representing ranges for a latitude * dimension and a longitude dimension */ public static ConstraintSet basicConstraintsFromPoint( final double latitudeDegrees, final double longitudeDegrees) { // Create a NumericData object using the x axis final NumericData latitude = new NumericValue(latitudeDegrees); // Create a NumericData object using the y axis final NumericData longitude = new NumericValue(longitudeDegrees); final Map, ConstraintData> constraintsPerDimension = new HashMap<>(); // Create and return a new IndexRange array with an x and y axis // range constraintsPerDimension.put(LongitudeDefinition.class, new ConstraintData(longitude, false)); constraintsPerDimension.put(LatitudeDefinition.class, new ConstraintData(latitude, false)); return new ConstraintSet(constraintsPerDimension); } public static MultiDimensionalNumericData getBoundsFromEnvelope(final Envelope envelope) { final NumericRange[] boundsPerDimension = new NumericRange[2]; boundsPerDimension[0] = new NumericRange(envelope.getMinX(), envelope.getMaxX()); boundsPerDimension[1] = new NumericRange(envelope.getMinY(), envelope.getMaxY()); return new BasicNumericDataset(boundsPerDimension); } /** * Generate a longitude range from a JTS geometry * * @param geometry The JTS geometry * @return The x range */ public static NumericData xRangeFromGeometry(final Geometry geometry) { if ((geometry == null) || geometry.isEmpty()) { return new NumericValue(0); } // Get the envelope of the geometry being held final Envelope env = geometry.getEnvelopeInternal(); if (env.getWidth() <= 0) { return new NumericValue(env.getMinX()); } // Create a NumericRange object using the x axis return new NumericRange(env.getMinX(), env.getMaxX()); } /** * Generate a latitude range from a JTS geometry * * @param geometry The JTS geometry * @return The y range */ public static NumericData yRangeFromGeometry(final Geometry geometry) { if ((geometry == null) || geometry.isEmpty()) { return new NumericValue(0); } // Get the envelope of the geometry being held final Envelope env = geometry.getEnvelopeInternal(); if (env.getHeight() <= 0) { return new NumericValue(env.getMinY()); } // Create a NumericRange object using the y axis return new NumericRange(env.getMinY(), env.getMaxY()); } /** * Converts a JTS geometry to binary using JTS a Well Known Binary writer * * @param geometry The JTS geometry * @return The binary representation of the geometry */ public static byte[] geometryToBinary( final Geometry geometry, final @Nullable Integer precision) { if (precision == null) { return new WKBWriter().write(geometry); } return new TWKBWriter(precision).write(geometry); } /** * Converts a byte array as well-known binary to a JTS geometry * * @param binary The well known binary * @return The JTS geometry */ public static Geometry geometryFromBinary( final byte[] binary, final @Nullable Integer precision) { try { if (precision == null) { return new WKBReader().read(binary); } return new TWKBReader().read(binary); } catch (final ParseException e) { throw new GeoWaveSerializationException("Unable to deserialize geometry data", e); } } /** * Converts a byte array as well-known binary to a JTS geometry * * @param binary The well known binary * @return The JTS geometry */ public static Geometry geometryFromBinary( final byte[] binary, final @Nullable Integer precision, final byte serializationVersion) { if (serializationVersion < FieldUtils.SERIALIZATION_VERSION) { try { return new WKBReader().read(binary); } catch (final ParseException e) { LOGGER.warn("Unable to deserialize geometry data", e); throw new GeoWaveSerializationException(e); } } return geometryFromBinary(binary, precision); } /** * This mehtod returns an envelope between negative infinite and positive inifinity in both x and * y * * @return the infinite bounding box */ public static Geometry infinity() { // unless we make this synchronized, we will want to instantiate a new // geometry factory because geometry factories are not thread safe return new GeometryFactory().toGeometry( new Envelope( Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY)); } public static class GeoConstraintsWrapper { private final ConstraintsByClass constraints; private final boolean constraintsMatchGeometry; private final Geometry jtsBounds; public GeoConstraintsWrapper( final ConstraintsByClass constraints, final boolean constraintsMatchGeometry, final Geometry jtsBounds) { this.constraints = constraints; this.constraintsMatchGeometry = constraintsMatchGeometry; this.jtsBounds = jtsBounds; } public ConstraintsByClass getConstraints() { return constraints; } public boolean isConstraintsMatchGeometry() { return constraintsMatchGeometry; } public Geometry getGeometry() { return jtsBounds; } } public static CoordinateReferenceSystem getIndexCrs(final Index[] indices) { CoordinateReferenceSystem indexCrs = null; for (final Index primaryindx : indices) { // for first iteration if (indexCrs == null) { indexCrs = getIndexCrs(primaryindx); } else { if (primaryindx.getIndexModel() instanceof CustomCrsIndexModel) { // check if indexes have different CRS if (!indexCrs.equals(((CustomCrsIndexModel) primaryindx.getIndexModel()).getCrs())) { LOGGER.error("Multiple indices with different CRS is not supported"); throw new RuntimeException("Multiple indices with different CRS is not supported"); } else { if (!indexCrs.equals(getDefaultCRS())) { LOGGER.error("Multiple indices with different CRS is not supported"); throw new RuntimeException("Multiple indices with different CRS is not supported"); } } } } } return indexCrs; } public static CoordinateReferenceSystem getIndexCrs(final Index index) { CoordinateReferenceSystem indexCrs = null; if (index != null && index.getIndexModel() instanceof CustomCrsIndexModel) { indexCrs = ((CustomCrsIndexModel) index.getIndexModel()).getCrs(); } else { indexCrs = getDefaultCRS(); } return indexCrs; } public static String getCrsCode(final CoordinateReferenceSystem crs) { return (CRS.toSRS(crs)); } /** * Build a buffer around a geometry * * @param crs * @param geometry * @param distanceUnits * @param distance * @return the buffered geometry and the degrees that it was buffered * @throws TransformException */ public static final Pair buffer( final CoordinateReferenceSystem crs, final Geometry geometry, final String distanceUnits, final double distance) throws TransformException { Unit unit; try { unit = lookup(distanceUnits); } catch (final Exception e) { unit = Units.METRE; LOGGER.warn("Cannot lookup unit of measure " + distanceUnits, e); } final double meterDistance = unit.getConverterTo(Units.METRE).convert(distance); final double degrees = distanceToDegrees(crs, geometry, meterDistance); // buffer does not respect the CRS; it uses simple cartesian math. // nor does buffer handle dateline boundaries return Pair.of(adjustGeo(crs, geometry.buffer(degrees)), degrees); } public static Unit lookup(final String name) { final String lowerCaseName = name.toLowerCase(); Unit unit = lookup(SI.class, lowerCaseName); if (unit != null) { return unit; } unit = lookup(NonSI.class, lowerCaseName); if (unit != null) { return unit; } if (lowerCaseName.endsWith("s")) { return lookup(lowerCaseName.substring(0, lowerCaseName.length() - 1)); } if (lowerCaseName.startsWith("kilo") && (lowerCaseName.length() > 4)) { final Unit u = lookup(lowerCaseName.substring(4)); if (u != null) { return u.multiply(1000); } } // if we get here, try some aliases if (lowerCaseName.equals("feet")) { return USCustomary.FOOT; } // if we get here, try some aliases if (lowerCaseName.equals("meter")) { return Units.METRE; } if (lowerCaseName.equals("unity")) { return (Unit) AbstractUnit.ONE; } return null; } private static Unit lookup(final Class class1, final String name) { Unit unit = null; final Field[] fields = class1.getDeclaredFields(); for (int i = 0; i < fields.length; i++) { final Field field = fields[i]; final String name2 = field.getName(); if ((field.getType().isAssignableFrom(BaseUnit.class) || field.getType().isAssignableFrom(AlternateUnit.class)) && name2.equalsIgnoreCase(name)) { try { unit = (Unit) field.get(unit); return unit; } catch (final Exception e) { } } } return unit; } /** * Consume a geometry that may be over the ranges of the CRS (e.g date-line crossing). Adjust for * crossings with a multi-polygon instance where each contained polygon represents a portion of * the provided geometry longitude value. Clip hemisphere crossings (fix TBD). * * @param crs * @param geometry * @return the adjusted geometry */ public static Geometry adjustGeo(final CoordinateReferenceSystem crs, final Geometry geometry) { final List polygons = fixRangeOfCoordinates(crs, geometry); if (polygons.size() == 1) { return polygons.get(0); } return geometry.getFactory().createMultiPolygon(polygons.toArray(new Polygon[polygons.size()])); } /** * Adjust geometry so that coordinates fit into long/lat bounds. * *

Split date-line crossing polygons. * *

For now, clip hemisphere crossing portions of the polygon. * * @param geometry * @return list valid polygons */ public static List fixRangeOfCoordinates( final CoordinateReferenceSystem crs, final Geometry geometry) { final List replacements = new ArrayList<>(); if (geometry instanceof MultiPolygon) { final MultiPolygon multi = (MultiPolygon) geometry; for (int i = 0; i < multi.getNumGeometries(); i++) { final Geometry geo = multi.getGeometryN(i); replacements.addAll(fixRangeOfCoordinates(crs, geo)); } return replacements; } // collection is more general than multi-polygon else if (geometry instanceof GeometryCollection) { final GeometryCollection multi = (GeometryCollection) geometry; for (int i = 0; i < multi.getNumGeometries(); i++) { final Geometry geo = multi.getGeometryN(i); replacements.addAll(fixRangeOfCoordinates(crs, geo)); } return replacements; } final Coordinate[] geoCoords = geometry.getCoordinates(); final Coordinate modifier = findModifier(crs, geoCoords); replacements.addAll(constructGeometriesOverMapRegions(modifier, geometry)); return replacements; } /** * update modifier for each axis of the coordinate where the modifier's axis is less extreme than * the provides coordinate * * @param modifier * @param cood */ private static void updateModifier(final Coordinate coord, final Coordinate modifier) { for (int i = 0; i < 3; i++) { double coordOrdinateValue, modifierOrdinateValue; switch (i) { case 1: coordOrdinateValue = coord.getY(); modifierOrdinateValue = modifier.getY(); break; case 2: coordOrdinateValue = coord.getZ(); modifierOrdinateValue = modifier.getZ(); break; default: case 0: coordOrdinateValue = coord.getX(); modifierOrdinateValue = modifier.getX(); break; } if (!Double.isNaN(coordOrdinateValue) && !Double.isNaN(modifierOrdinateValue)) { if (Math.abs(modifierOrdinateValue) < Math.abs(coordOrdinateValue)) { modifier.setOrdinate(i, coord.getOrdinate(i)); } } } } /** * Build a modifier that, when added to the coordinates of a polygon, moves invalid sections of * the polygon to a valid portion of the map. * * @param crs * @param coords * @return */ private static Coordinate findModifier( final CoordinateReferenceSystem crs, final Coordinate[] coords) { final Coordinate maxModifier = new Coordinate(0, 0, 0); for (final Coordinate coord : coords) { final Coordinate modifier = diff(adjustCoordinateToFitInRange(crs, coord), coord); updateModifier(modifier, maxModifier); } return maxModifier; } /** * Produce a set of polygons for each region of the map corrected for date line and hemisphere * crossings. Due to the complexity of going around the hemisphere, clip the range. * *

Consider a polygon that cross both the hemisphere in the north and the date line in the * west (-182 92, -182 88, -178 88, -178 92, -182 92). The result is two polygons: (-180 90, -180 * 88, -178 88, -178 90, -180 90) (180 90, 180 88, 178 88, 178 90, 180 90) * * @param modifier * @param geometry - a geometry that may cross date line and/or hemispheres. * @return the set of polygons */ public static List constructGeometriesOverMapRegions( final Coordinate modifier, final Geometry geometry) { final Coordinate[] geoCoords = geometry.getCoordinates(); final List polygons = new LinkedList<>(); final Geometry world = world(geometry.getFactory(), GeometryUtils.getDefaultCRS()); // First do the polygon unchanged world final Geometry worldIntersections = world.intersection(geometry); for (int i = 0; i < worldIntersections.getNumGeometries(); i++) { final Polygon polyToAdd = (Polygon) worldIntersections.getGeometryN(i); if (!polygons.contains(polyToAdd)) { polygons.add(polyToAdd); } } // now use the modifier...but just the x axis for longitude // optimization...do not modify if 0 if (Math.abs(modifier.x) > 0.0000000001) { final Coordinate[] newCoords = new Coordinate[geoCoords.length]; int c = 0; for (final Coordinate geoCoord : geoCoords) { newCoords[c++] = new Coordinate(geoCoord.x + modifier.x, geoCoord.y, geoCoord.z); } final Polygon transposedPoly = geometry.getFactory().createPolygon(newCoords); final Geometry adjustedPolyWorldIntersections = world.intersection(transposedPoly); for (int i = 0; i < adjustedPolyWorldIntersections.getNumGeometries(); i++) { final Polygon polyToAdd = (Polygon) adjustedPolyWorldIntersections.getGeometryN(i); if (!polygons.contains(polyToAdd)) { polygons.add(polyToAdd); } } } return polygons; } /** * Make sure the coordinate falls in the range of provided coordinate reference systems's * coordinate system. 'x' coordinate is wrapped around date line. 'y' and 'z' coordinate are * clipped. At some point, this function will be adjusted to project 'y' appropriately. * * @param crs * @param coord * @return the adjusted coordinate */ public static Coordinate adjustCoordinateToFitInRange( final CoordinateReferenceSystem crs, final Coordinate coord) { return new Coordinate( adjustCoordinateDimensionToRange(coord.getX(), crs, 0), clipRange(coord.getY(), crs, 1), clipRange(coord.getZ(), crs, 2)); } /** * @param coord1 * @param coord2 subtracted from coord1 * @return a coordinate the supplies the difference of values for each axis between coord1 and * coord2 */ private static Coordinate diff(final Coordinate coord1, final Coordinate coord2) { return new Coordinate( coord1.getX() - coord2.getX(), coord1.getY() - coord2.getY(), coord1.getZ() - coord2.getZ()); } /** * @param val the value * @param crs * @param axis the coordinate axis * @return */ private static double clipRange( final double val, final CoordinateReferenceSystem crs, final int axis) { final CoordinateSystem coordinateSystem = crs.getCoordinateSystem(); if (coordinateSystem.getDimension() > axis) { final CoordinateSystemAxis coordinateAxis = coordinateSystem.getAxis(axis); if (val < coordinateAxis.getMinimumValue()) { return coordinateAxis.getMinimumValue(); } else if (val > coordinateAxis.getMaximumValue()) { return coordinateAxis.getMaximumValue(); } } return val; } /** * This is perhaps a brain dead approach to do this, but it does handle wrap around cases. Also * supports cases where the wrap around occurs many times. * * @param val the value * @param crs * @param axis the coordinate axis * @return the adjusted coordinate dimension */ public static double adjustCoordinateDimensionToRange( final double val, final CoordinateReferenceSystem crs, final int axis) { final CoordinateSystem coordinateSystem = crs.getCoordinateSystem(); if (coordinateSystem.getDimension() > axis) { final double lowerBound = coordinateSystem.getAxis(axis).getMinimumValue(); final double bound = coordinateSystem.getAxis(axis).getMaximumValue() - lowerBound; final double sign = sign(val); // re-scale to 0 to n, then determine how many times to 'loop // around' final double mult = Math.floor(Math.abs((val + (sign * (-1.0 * lowerBound))) / bound)); return val + (mult * bound * sign * (-1.0)); } return val; } private static double sign(final double val) { return val < 0 ? -1 : 1; } /** * Return a multi-polygon representing the bounded map regions split by the axis * * @param factory * @param crs * @return a world geometry */ public static Geometry world(final GeometryFactory factory, final CoordinateReferenceSystem crs) { return factory.createPolygon(toPolygonCoordinates(crs.getCoordinateSystem())); } private static Coordinate[] toPolygonCoordinates(final CoordinateSystem coordinateSystem) { final Coordinate[] coordinates = new Coordinate[(int) Math.pow(2, coordinateSystem.getDimension()) + 1]; final BitSet greyCode = new BitSet(coordinateSystem.getDimension()); final BitSet mask = getGreyCodeMask(coordinateSystem.getDimension()); for (int i = 0; i < coordinates.length; i++) { coordinates[i] = new Coordinate( getValue(greyCode, coordinateSystem.getAxis(0), 0), getValue(greyCode, coordinateSystem.getAxis(1), 1), coordinateSystem.getDimension() > 2 ? getValue(greyCode, coordinateSystem.getAxis(2), 2) : Double.NaN); grayCode(greyCode, mask); } return coordinates; } private static BitSet getGreyCodeMask(final int dims) { final BitSet mask = new BitSet(dims); for (int i = 0; i < dims; i++) { mask.set(i); } return mask; } private static void grayCode(final BitSet code, final BitSet mask) { BitSetMath.grayCodeInverse(code); BitSetMath.increment(code); code.and(mask); BitSetMath.grayCode(code); } private static double getValue( final BitSet set, final CoordinateSystemAxis axis, final int dimension) { return (set.get(dimension)) ? axis.getMaximumValue() : axis.getMinimumValue(); } /** * Convert meters to decimal degrees based on widest point * * @throws TransformException */ private static double distanceToDegrees( final CoordinateReferenceSystem crs, final Geometry geometry, final double meters) throws TransformException { final GeometryFactory factory = geometry.getFactory(); return (geometry instanceof Point) ? geometry.distance(farthestPoint(crs, (Point) geometry, meters)) : distanceToDegrees( crs, geometry.getEnvelopeInternal(), factory == null ? new GeometryFactory() : factory, meters); } private static double distanceToDegrees( final CoordinateReferenceSystem crs, final Envelope env, final GeometryFactory factory, final double meters) throws TransformException { return Collections.max( Arrays.asList( distanceToDegrees( crs, factory.createPoint(new Coordinate(env.getMaxX(), env.getMaxY())), meters), distanceToDegrees( crs, factory.createPoint(new Coordinate(env.getMaxX(), env.getMinY())), meters), distanceToDegrees( crs, factory.createPoint(new Coordinate(env.getMinX(), env.getMinY())), meters), distanceToDegrees( crs, factory.createPoint(new Coordinate(env.getMinX(), env.getMaxY())), meters))); } /** farther point in longitudinal axis given a latitude */ private static Point farthestPoint( final CoordinateReferenceSystem crs, final Point point, final double meters) { final GeodeticCalculator calc = new GeodeticCalculator(crs); calc.setStartingGeographicPoint(point.getX(), point.getY()); calc.setDirection(90, meters); Point2D dest2D = calc.getDestinationGeographicPoint(); // if this flips over the date line then try the other direction if (dest2D.getX() < point.getX()) { calc.setDirection(-90, meters); dest2D = calc.getDestinationGeographicPoint(); } return point.getFactory().createPoint(new Coordinate(dest2D.getX(), dest2D.getY())); } public static SimpleFeature crsTransform( final SimpleFeature entry, final SimpleFeatureType reprojectedType, final MathTransform transform) { SimpleFeature crsEntry = entry; if (transform != null) { // we can use the transform we have already calculated for this // feature try { // this will clone the feature and retype it to Index CRS crsEntry = SimpleFeatureBuilder.retype(entry, reprojectedType); // this will transform the geometry crsEntry.setDefaultGeometry( JTS.transform((Geometry) entry.getDefaultGeometry(), transform)); } catch (MismatchedDimensionException | TransformException e) { LOGGER.warn( "Unable to perform transform to specified CRS of the index, the feature geometry will remain in its original CRS", e); } } return crsEntry; } public static Geometry crsTransform(final Geometry geometry, final MathTransform transform) { if (transform != null) { try { return JTS.transform(geometry, transform); } catch (MismatchedDimensionException | TransformException e) { LOGGER.warn( "Unable to perform transform to specified CRS of the index, the feature geometry will remain in its original CRS", e); } } return null; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/util/HasDWithinFilterVisitor.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.util; import org.geotools.filter.visitor.NullFilterVisitor; import org.opengis.filter.spatial.DWithin; public class HasDWithinFilterVisitor extends NullFilterVisitor { private boolean hasDWithin = false; @Override public Object visit(final DWithin filter, final Object data) { hasDWithin = true; return super.visit(filter, data); } public boolean hasDWithin() { return hasDWithin; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/util/IndexOptimizationUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.util; import org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.geotime.store.InternalGeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.geotime.store.dimension.CustomCRSSpatialDimension; import org.locationtech.geowave.core.geotime.store.dimension.CustomCRSSpatialField; import org.locationtech.geowave.core.geotime.store.dimension.LatitudeField; import org.locationtech.geowave.core.geotime.store.dimension.LongitudeField; import org.locationtech.geowave.core.geotime.store.dimension.TimeField; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.dimension.NumericDimensionField; public class IndexOptimizationUtils { public static InternalGeotoolsFeatureDataAdapter unwrapGeotoolsFeatureDataAdapter( final DataTypeAdapter adapter) { if (adapter instanceof InternalGeotoolsFeatureDataAdapter) { return (InternalGeotoolsFeatureDataAdapter) adapter; } return null; } public static boolean hasAtLeastSpatial(final Index index) { if ((index == null) || (index.getIndexModel() == null) || (index.getIndexModel().getDimensions() == null)) { return false; } boolean hasLatitude = false; boolean hasLongitude = false; for (final NumericDimensionField dimension : index.getIndexModel().getDimensions()) { if (dimension instanceof LatitudeField) { hasLatitude = true; } if (dimension instanceof LongitudeField) { hasLongitude = true; } if (dimension instanceof CustomCRSSpatialField) { if (((CustomCRSSpatialDimension) dimension.getBaseDefinition()).getAxis() == 0) { hasLongitude = true; } else { hasLatitude = true; } } } return hasLatitude && hasLongitude; } public static boolean hasTime(final Index index, final GeotoolsFeatureDataAdapter adapter) { return hasTime(index) && adapter.hasTemporalConstraints(); } public static boolean hasTime(final Index index) { if ((index == null) || (index.getIndexModel() == null) || (index.getIndexModel().getDimensions() == null)) { return false; } for (final NumericDimensionField dimension : index.getIndexModel().getDimensions()) { if (dimension instanceof TimeField) { return true; } } return false; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/util/SimpleFeatureUserDataConfiguration.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.util; import org.locationtech.geowave.core.index.persist.Persistable; import org.opengis.feature.simple.SimpleFeatureType; import com.fasterxml.jackson.annotation.JsonTypeInfo; /** * A type of configuration data associated with attributes of a simple features such as statistics, * indexing constraints, etc. */ @JsonTypeInfo(use = JsonTypeInfo.Id.CLASS, include = JsonTypeInfo.As.PROPERTY, property = "@class") public interface SimpleFeatureUserDataConfiguration extends java.io.Serializable, Persistable { /** * Store configuration in user data of the feature type attributes. * * @param type */ public void updateType(final SimpleFeatureType type); /** * Extract configuration from user data of the feature type attributes. * * @param type */ public void configureFromType(final SimpleFeatureType type); } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/util/SpatialIndexUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.util; import org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition; import org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition; import org.locationtech.geowave.core.geotime.store.dimension.CustomCRSBoundedSpatialDimension; import org.locationtech.geowave.core.geotime.store.dimension.CustomCRSBoundedSpatialDimensionX; import org.locationtech.geowave.core.geotime.store.dimension.CustomCRSBoundedSpatialDimensionY; import org.locationtech.geowave.core.geotime.store.dimension.CustomCRSUnboundedSpatialDimensionX; import org.locationtech.geowave.core.geotime.store.dimension.CustomCRSUnboundedSpatialDimensionY; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.store.api.Index; /** * Provides helper functions for spatial indices. */ public class SpatialIndexUtils { /** * Determine if the given dimension represents longitude. * * @param dimension the dimension to check * @return {@code true} if the dimension represents longitude. */ public static boolean isLongitudeDimension(final NumericDimensionDefinition dimension) { return (dimension instanceof LongitudeDefinition) || (dimension instanceof CustomCRSUnboundedSpatialDimensionX) || (dimension instanceof CustomCRSBoundedSpatialDimensionX) || (dimension instanceof CustomCRSBoundedSpatialDimension && ((CustomCRSBoundedSpatialDimension) dimension).getAxis() == 0x0); } /** * Determine if the given dimension represents latitude. * * @param dimension the dimension to check * @return {@code true} if the dimension represents latitude. */ public static boolean isLatitudeDimension(final NumericDimensionDefinition dimension) { return (dimension instanceof LatitudeDefinition) || (dimension instanceof CustomCRSUnboundedSpatialDimensionY) || (dimension instanceof CustomCRSBoundedSpatialDimensionY) || (dimension instanceof CustomCRSBoundedSpatialDimension && ((CustomCRSBoundedSpatialDimension) dimension).getAxis() == 0x1); } /** * Determine if the given index has a latitude and longitude dimension. * * @param index the index to check * @return {@code true} if the index has spatial dimensions. */ public static boolean hasSpatialDimensions(final Index index) { boolean hasLat = false; boolean hasLon = false; if (index.getIndexStrategy() != null) { NumericDimensionDefinition[] indexDimensions = index.getIndexStrategy().getOrderedDimensionDefinitions(); if (indexDimensions != null && indexDimensions.length >= 2) { for (int i = 0; i < indexDimensions.length; i++) { hasLat = hasLat | isLatitudeDimension(indexDimensions[i]); hasLon = hasLon | isLongitudeDimension(indexDimensions[i]); } } } return hasLat && hasLon; } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/util/TWKBReader.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.util; import java.io.IOException; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryCollection; import org.locationtech.jts.geom.LineString; import org.locationtech.jts.geom.LinearRing; import org.locationtech.jts.geom.MultiLineString; import org.locationtech.jts.geom.MultiPoint; import org.locationtech.jts.geom.MultiPolygon; import org.locationtech.jts.geom.Point; import org.locationtech.jts.geom.Polygon; import org.locationtech.jts.io.ParseException; public class TWKBReader { public TWKBReader() {} public Geometry read(final byte[] bytes) throws ParseException { return read(ByteBuffer.wrap(bytes)); } public Geometry read(final ByteBuffer input) throws ParseException { try { final byte typeAndPrecision = input.get(); final byte type = (byte) (typeAndPrecision & 0x0F); final int basePrecision = TWKBUtils.zigZagDecode((typeAndPrecision & 0xF0) >> 4); final byte metadata = input.get(); PrecisionReader precision; if ((metadata & TWKBUtils.EXTENDED_DIMENSIONS) != 0) { final byte extendedDimensions = input.get(); precision = new ExtendedPrecisionReader(basePrecision, extendedDimensions); } else { precision = new PrecisionReader(basePrecision); } switch (type) { case TWKBUtils.POINT_TYPE: return readPoint(precision, metadata, input); case TWKBUtils.LINESTRING_TYPE: return readLineString(precision, metadata, input); case TWKBUtils.POLYGON_TYPE: return readPolygon(precision, metadata, input); case TWKBUtils.MULTIPOINT_TYPE: return readMultiPoint(precision, metadata, input); case TWKBUtils.MULTILINESTRING_TYPE: return readMultiLineString(precision, metadata, input); case TWKBUtils.MULTIPOLYGON_TYPE: return readMultiPolygon(precision, metadata, input); case TWKBUtils.GEOMETRYCOLLECTION_TYPE: return readGeometryCollection(input, metadata); } return null; } catch (final IOException e) { throw new ParseException("Error reading TWKB geometry.", e); } } private Point readPoint( final PrecisionReader precision, final byte metadata, final ByteBuffer input) throws IOException { if ((metadata & TWKBUtils.EMPTY_GEOMETRY) != 0) { return GeometryUtils.GEOMETRY_FACTORY.createPoint(); } final Coordinate coordinate = precision.readPoint(input); return GeometryUtils.GEOMETRY_FACTORY.createPoint(coordinate); } private LineString readLineString( final PrecisionReader precision, final byte metadata, final ByteBuffer input) throws IOException { if ((metadata & TWKBUtils.EMPTY_GEOMETRY) != 0) { return GeometryUtils.GEOMETRY_FACTORY.createLineString(); } final Coordinate[] coordinates = precision.readPointArray(input); return GeometryUtils.GEOMETRY_FACTORY.createLineString(coordinates); } private Polygon readPolygon( final PrecisionReader precision, final byte metadata, final ByteBuffer input) throws IOException { if ((metadata & TWKBUtils.EMPTY_GEOMETRY) != 0) { return GeometryUtils.GEOMETRY_FACTORY.createPolygon(); } final int numRings = VarintUtils.readUnsignedInt(input); final LinearRing exteriorRing = GeometryUtils.GEOMETRY_FACTORY.createLinearRing(precision.readPointArray(input)); final LinearRing[] interiorRings = new LinearRing[numRings - 1]; for (int i = 0; i < (numRings - 1); i++) { interiorRings[i] = GeometryUtils.GEOMETRY_FACTORY.createLinearRing(precision.readPointArray(input)); } return GeometryUtils.GEOMETRY_FACTORY.createPolygon(exteriorRing, interiorRings); } private MultiPoint readMultiPoint( final PrecisionReader precision, final byte metadata, final ByteBuffer input) throws IOException { if ((metadata & TWKBUtils.EMPTY_GEOMETRY) != 0) { return GeometryUtils.GEOMETRY_FACTORY.createMultiPoint(); } final Coordinate[] points = precision.readPointArray(input); return GeometryUtils.GEOMETRY_FACTORY.createMultiPointFromCoords(points); } private MultiLineString readMultiLineString( final PrecisionReader precision, final byte metadata, final ByteBuffer input) throws IOException { if ((metadata & TWKBUtils.EMPTY_GEOMETRY) != 0) { return GeometryUtils.GEOMETRY_FACTORY.createMultiLineString(); } final int numLines = VarintUtils.readUnsignedInt(input); final LineString[] lines = new LineString[numLines]; for (int i = 0; i < numLines; i++) { lines[i] = GeometryUtils.GEOMETRY_FACTORY.createLineString(precision.readPointArray(input)); } return GeometryUtils.GEOMETRY_FACTORY.createMultiLineString(lines); } private MultiPolygon readMultiPolygon( final PrecisionReader precision, final byte metadata, final ByteBuffer input) throws IOException { if ((metadata & TWKBUtils.EMPTY_GEOMETRY) != 0) { return GeometryUtils.GEOMETRY_FACTORY.createMultiPolygon(); } final int numPolygons = VarintUtils.readUnsignedInt(input); final Polygon[] polygons = new Polygon[numPolygons]; int numRings; for (int i = 0; i < numPolygons; i++) { numRings = VarintUtils.readUnsignedInt(input); if (numRings == 0) { polygons[i] = GeometryUtils.GEOMETRY_FACTORY.createPolygon(); continue; } final LinearRing exteriorRing = GeometryUtils.GEOMETRY_FACTORY.createLinearRing(precision.readPointArray(input)); final LinearRing[] interiorRings = new LinearRing[numRings - 1]; for (int j = 0; j < (numRings - 1); j++) { interiorRings[j] = GeometryUtils.GEOMETRY_FACTORY.createLinearRing(precision.readPointArray(input)); } polygons[i] = GeometryUtils.GEOMETRY_FACTORY.createPolygon(exteriorRing, interiorRings); } return GeometryUtils.GEOMETRY_FACTORY.createMultiPolygon(polygons); } private GeometryCollection readGeometryCollection(final ByteBuffer input, final byte metadata) throws ParseException { if ((metadata & TWKBUtils.EMPTY_GEOMETRY) != 0) { return GeometryUtils.GEOMETRY_FACTORY.createGeometryCollection(); } final int numGeometries = VarintUtils.readUnsignedInt(input); final Geometry[] geometries = new Geometry[numGeometries]; for (int i = 0; i < numGeometries; i++) { geometries[i] = read(input); } return GeometryUtils.GEOMETRY_FACTORY.createGeometryCollection(geometries); } private static class PrecisionReader { protected double precisionMultiplier; public PrecisionReader(final int precision) { precisionMultiplier = Math.pow(10, precision); } public Coordinate readPoint(final ByteBuffer input) throws IOException { return new Coordinate( (VarintUtils.readSignedLong(input)) / precisionMultiplier, (VarintUtils.readSignedLong(input)) / precisionMultiplier); } public Coordinate[] readPointArray(final ByteBuffer input) throws IOException { final int numCoordinates = VarintUtils.readUnsignedInt(input); final Coordinate[] coordinates = new Coordinate[numCoordinates]; long lastX = 0; long lastY = 0; for (int i = 0; i < numCoordinates; i++) { lastX = VarintUtils.readSignedLong(input) + lastX; lastY = VarintUtils.readSignedLong(input) + lastY; coordinates[i] = new Coordinate((lastX) / precisionMultiplier, (lastY) / precisionMultiplier); } return coordinates; } } private static class ExtendedPrecisionReader extends PrecisionReader { private boolean hasZ = false; private double zPrecisionMultiplier = 0; private boolean hasM = false; private double mPrecisionMultiplier = 0; public ExtendedPrecisionReader(final int precision, final byte extendedDimensions) { super(precision); if ((extendedDimensions & 0x1) != 0) { hasZ = true; zPrecisionMultiplier = Math.pow(10, TWKBUtils.zigZagDecode((extendedDimensions >> 2) & 0x7)); } if ((extendedDimensions & 0x2) != 0) { hasM = true; mPrecisionMultiplier = Math.pow(10, TWKBUtils.zigZagDecode((extendedDimensions >> 5) & 0x7)); } } @Override public Coordinate readPoint(final ByteBuffer input) throws IOException { final Coordinate coordinate = super.readPoint(input); if (hasZ) { coordinate.setZ(VarintUtils.readSignedLong(input) / zPrecisionMultiplier); } if (hasM) { coordinate.setM(VarintUtils.readSignedLong(input) / mPrecisionMultiplier); } return coordinate; } @Override public Coordinate[] readPointArray(final ByteBuffer input) throws IOException { final int numCoordinates = VarintUtils.readUnsignedInt(input); final Coordinate[] coordinates = new Coordinate[numCoordinates]; long lastX = 0; long lastY = 0; long lastZ = 0; long lastM = 0; for (int i = 0; i < numCoordinates; i++) { lastX = VarintUtils.readSignedLong(input) + lastX; lastY = VarintUtils.readSignedLong(input) + lastY; coordinates[i] = new Coordinate((lastX) / precisionMultiplier, (lastY) / precisionMultiplier); if (hasZ) { lastZ = VarintUtils.readSignedLong(input) + lastZ; coordinates[i].setZ((lastZ) / zPrecisionMultiplier); } if (hasM) { lastM = VarintUtils.readSignedLong(input) + lastM; coordinates[i].setM((lastM) / mPrecisionMultiplier); } } return coordinates; } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/util/TWKBUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.util; public class TWKBUtils { public static final byte POINT_TYPE = 1; public static final byte LINESTRING_TYPE = 2; public static final byte POLYGON_TYPE = 3; public static final byte MULTIPOINT_TYPE = 4; public static final byte MULTILINESTRING_TYPE = 5; public static final byte MULTIPOLYGON_TYPE = 6; public static final byte GEOMETRYCOLLECTION_TYPE = 7; public static final byte EXTENDED_DIMENSIONS = 1 << 3; public static final byte EMPTY_GEOMETRY = 1 << 4; public static final byte MAX_COORD_PRECISION = 7; public static final byte MIN_COORD_PRECISION = -8; public static final byte MAX_EXTENDED_PRECISION = 3; public static final byte MIN_EXTENDED_PRECISION = -4; public static int zigZagEncode(final int value) { return (value << 1) ^ (value >> 31); } public static int zigZagDecode(final int value) { final int temp = (((value << 31) >> 31) ^ value) >> 1; return temp ^ (value & (1 << 31)); } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/util/TWKBWriter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.util; import java.io.ByteArrayOutputStream; import java.io.DataOutput; import java.io.DataOutputStream; import java.io.IOException; import java.math.BigDecimal; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryCollection; import org.locationtech.jts.geom.LineString; import org.locationtech.jts.geom.MultiLineString; import org.locationtech.jts.geom.MultiPoint; import org.locationtech.jts.geom.MultiPolygon; import org.locationtech.jts.geom.Point; import org.locationtech.jts.geom.Polygon; import com.clearspring.analytics.util.Varint; public class TWKBWriter { private final int maxPrecision; public TWKBWriter() { this(TWKBUtils.MAX_COORD_PRECISION); } public TWKBWriter(final int maxPrecision) { this.maxPrecision = Math.min(TWKBUtils.MAX_COORD_PRECISION, maxPrecision); } public byte[] write(final Geometry geom) { try (final ByteArrayOutputStream out = new ByteArrayOutputStream()) { try (final DataOutputStream output = new DataOutputStream(out)) { write(geom, output); return out.toByteArray(); } } catch (final IOException e) { throw new RuntimeException("Error writing TWKB geometry.", e); } } public void write(final Geometry geom, final DataOutput output) throws IOException { final byte type = getType(geom); if (geom.isEmpty()) { output.writeByte(getTypeAndPrecisionByte(type, 0)); output.writeByte(TWKBUtils.EMPTY_GEOMETRY); return; } byte metadata = 0; final Coordinate[] coordinates = geom.getCoordinates(); PrecisionWriter precision; if (Double.isNaN(coordinates[0].getZ()) || Double.isNaN(coordinates[0].getM())) { metadata |= TWKBUtils.EXTENDED_DIMENSIONS; precision = new ExtendedPrecisionWriter().calculate(coordinates, maxPrecision); } else { precision = new PrecisionWriter().calculate(coordinates, maxPrecision); } output.writeByte(getTypeAndPrecisionByte(type, precision.precision)); output.writeByte(metadata); precision.writeExtendedPrecision(output); switch (type) { case TWKBUtils.POINT_TYPE: writePoint((Point) geom, precision, output); break; case TWKBUtils.LINESTRING_TYPE: writeLineString((LineString) geom, precision, output); break; case TWKBUtils.POLYGON_TYPE: writePolygon((Polygon) geom, precision, output); break; case TWKBUtils.MULTIPOINT_TYPE: writeMultiPoint((MultiPoint) geom, precision, output); break; case TWKBUtils.MULTILINESTRING_TYPE: writeMultiLineString((MultiLineString) geom, precision, output); break; case TWKBUtils.MULTIPOLYGON_TYPE: writeMultiPolygon((MultiPolygon) geom, precision, output); break; case TWKBUtils.GEOMETRYCOLLECTION_TYPE: writeGeometryCollection((GeometryCollection) geom, precision, output); break; default: break; } } private void writePoint( final Point point, final PrecisionWriter precision, final DataOutput output) throws IOException { precision.writePoint(point.getCoordinate(), output); } private void writeLineString( final LineString line, final PrecisionWriter precision, final DataOutput output) throws IOException { precision.writePointArray(line.getCoordinates(), output); } private void writePolygon( final Polygon polygon, final PrecisionWriter precision, final DataOutput output) throws IOException { Varint.writeUnsignedVarInt(polygon.getNumInteriorRing() + 1, output); precision.writePointArray(polygon.getExteriorRing().getCoordinates(), output); for (int i = 0; i < polygon.getNumInteriorRing(); i++) { precision.writePointArray(polygon.getInteriorRingN(i).getCoordinates(), output); } } private void writeMultiPoint( final MultiPoint multiPoint, final PrecisionWriter precision, final DataOutput output) throws IOException { precision.writePointArray(multiPoint.getCoordinates(), output); } private void writeMultiLineString( final MultiLineString multiLine, final PrecisionWriter precision, final DataOutput output) throws IOException { Varint.writeUnsignedVarInt(multiLine.getNumGeometries(), output); for (int i = 0; i < multiLine.getNumGeometries(); i++) { precision.writePointArray(multiLine.getGeometryN(i).getCoordinates(), output); } } private void writeMultiPolygon( final MultiPolygon multiPolygon, final PrecisionWriter precision, final DataOutput output) throws IOException { Varint.writeUnsignedVarInt(multiPolygon.getNumGeometries(), output); for (int i = 0; i < multiPolygon.getNumGeometries(); i++) { final Polygon polygon = (Polygon) multiPolygon.getGeometryN(i); if (polygon.isEmpty()) { Varint.writeUnsignedVarInt(0, output); continue; } Varint.writeUnsignedVarInt(polygon.getNumInteriorRing() + 1, output); precision.writePointArray(polygon.getExteriorRing().getCoordinates(), output); for (int j = 0; j < polygon.getNumInteriorRing(); j++) { precision.writePointArray(polygon.getInteriorRingN(j).getCoordinates(), output); } } } private void writeGeometryCollection( final GeometryCollection geoms, final PrecisionWriter precision, final DataOutput output) throws IOException { Varint.writeUnsignedVarInt(geoms.getNumGeometries(), output); for (int i = 0; i < geoms.getNumGeometries(); i++) { final Geometry geom = geoms.getGeometryN(i); write(geom, output); } } private byte getTypeAndPrecisionByte(final byte type, final int precision) { byte typeAndPrecision = type; typeAndPrecision |= TWKBUtils.zigZagEncode(precision) << 4; return typeAndPrecision; } private byte getType(final Geometry geom) { if (geom instanceof Point) { return TWKBUtils.POINT_TYPE; } else if (geom instanceof LineString) { return TWKBUtils.LINESTRING_TYPE; } else if (geom instanceof Polygon) { return TWKBUtils.POLYGON_TYPE; } else if (geom instanceof MultiPoint) { return TWKBUtils.MULTIPOINT_TYPE; } else if (geom instanceof MultiLineString) { return TWKBUtils.MULTILINESTRING_TYPE; } else if (geom instanceof MultiPolygon) { return TWKBUtils.MULTIPOLYGON_TYPE; } return TWKBUtils.GEOMETRYCOLLECTION_TYPE; } private static class PrecisionWriter { private int precision = TWKBUtils.MIN_COORD_PRECISION; protected double precisionMultiplier = 0; public PrecisionWriter calculate(final Coordinate[] coordinates, final int maxPrecision) { for (int i = 0; i < coordinates.length; i++) { checkCoordinate(coordinates[i]); } finalize(maxPrecision); return this; } protected void checkCoordinate(final Coordinate c) { final BigDecimal xCoord = new BigDecimal(Double.toString(c.getX())).stripTrailingZeros(); precision = Math.max(xCoord.scale(), precision); final BigDecimal yCoord = new BigDecimal(Double.toString(c.getY())).stripTrailingZeros(); precision = Math.max(yCoord.scale(), precision); } protected void finalize(final int maxPrecision) { precision = Math.min(maxPrecision, precision); precisionMultiplier = Math.pow(10, precision); } public void writeExtendedPrecision(final DataOutput output) throws IOException { return; } public void writePoint(final Coordinate coordinate, final DataOutput output) throws IOException { Varint.writeSignedVarLong(Math.round(coordinate.getX() * precisionMultiplier), output); Varint.writeSignedVarLong(Math.round(coordinate.getY() * precisionMultiplier), output); } public void writePointArray(final Coordinate[] coordinates, final DataOutput output) throws IOException { long lastX = 0; long lastY = 0; Varint.writeUnsignedVarInt(coordinates.length, output); for (final Coordinate c : coordinates) { final long x = Math.round(c.getX() * precisionMultiplier); final long y = Math.round(c.getY() * precisionMultiplier); Varint.writeSignedVarLong(x - lastX, output); Varint.writeSignedVarLong(y - lastY, output); lastX = x; lastY = y; } } } private static class ExtendedPrecisionWriter extends PrecisionWriter { private boolean hasZ = false; private int zPrecision = TWKBUtils.MIN_EXTENDED_PRECISION; private double zPrecisionMultiplier = 0; private boolean hasM = false; private int mPrecision = TWKBUtils.MIN_EXTENDED_PRECISION; private double mPrecisionMultiplier = 0; @Override public PrecisionWriter calculate(final Coordinate[] coordinates, final int maxPrecision) { hasZ = !Double.isNaN(coordinates[0].getZ()); hasM = !Double.isNaN(coordinates[0].getM()); super.calculate(coordinates, maxPrecision); return this; } @Override protected void checkCoordinate(final Coordinate c) { super.checkCoordinate(c); if (hasZ) { final BigDecimal zCoord = new BigDecimal(Double.toString(c.getZ())).stripTrailingZeros(); zPrecision = Math.max(zCoord.scale(), zPrecision); } if (hasM) { final BigDecimal mCoord = new BigDecimal(Double.toString(c.getM())).stripTrailingZeros(); mPrecision = Math.max(mCoord.scale(), mPrecision); } } @Override protected void finalize(final int maxPrecision) { super.finalize(maxPrecision); if (hasZ) { zPrecision = Math.min(TWKBUtils.MAX_EXTENDED_PRECISION, zPrecision); zPrecisionMultiplier = Math.pow(10, zPrecision); } if (hasM) { mPrecision = Math.min(TWKBUtils.MAX_EXTENDED_PRECISION, mPrecision); mPrecisionMultiplier = Math.pow(10, mPrecision); } } @Override public void writeExtendedPrecision(final DataOutput output) throws IOException { byte extendedDimensions = 0; if (hasZ) { extendedDimensions |= 0x1; extendedDimensions |= TWKBUtils.zigZagEncode(zPrecision) << 2; } if (hasM) { extendedDimensions |= 0x2; extendedDimensions |= TWKBUtils.zigZagEncode(mPrecision) << 5; } output.writeByte(extendedDimensions); } @Override public void writePoint(final Coordinate coordinate, final DataOutput output) throws IOException { super.writePoint(coordinate, output); if (hasZ) { Varint.writeSignedVarLong(Math.round(coordinate.getZ() * zPrecisionMultiplier), output); } if (hasM) { Varint.writeSignedVarLong(Math.round(coordinate.getM() * mPrecisionMultiplier), output); } } @Override public void writePointArray(final Coordinate[] coordinates, final DataOutput output) throws IOException { long lastX = 0; long lastY = 0; long lastZ = 0; long lastM = 0; Varint.writeUnsignedVarInt(coordinates.length, output); for (final Coordinate c : coordinates) { final long x = Math.round(c.getX() * precisionMultiplier); final long y = Math.round(c.getY() * precisionMultiplier); Varint.writeSignedVarLong(x - lastX, output); Varint.writeSignedVarLong(y - lastY, output); lastX = x; lastY = y; if (hasZ) { final long z = Math.round(c.getZ() * zPrecisionMultiplier); Varint.writeSignedVarLong(z - lastZ, output); lastZ = z; } if (hasM) { final long m = Math.round(c.getZ() * mPrecisionMultiplier); Varint.writeSignedVarLong(m - lastM, output); lastM = m; } } } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/util/TimeDescriptors.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.util; import java.nio.ByteBuffer; import java.util.BitSet; import java.util.Locale; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.AttributeDescriptor; /** Describes temporally indexed attributes associated with a feature type. */ public class TimeDescriptors { private AttributeDescriptor startRange; private AttributeDescriptor endRange; private AttributeDescriptor time; public TimeDescriptors() { super(); time = null; startRange = null; endRange = null; } public TimeDescriptors(final AttributeDescriptor time) { super(); this.time = time; startRange = null; endRange = null; } public TimeDescriptors( final SimpleFeatureType type, final TimeDescriptorConfiguration configuration) { update(type, configuration); } public TimeDescriptors(final AttributeDescriptor startRange, final AttributeDescriptor endRange) { super(); time = null; this.startRange = startRange; this.endRange = endRange; } public void update( final SimpleFeatureType type, final TimeDescriptorConfiguration configuration) { if (configuration.timeName != null) { time = type.getDescriptor(configuration.timeName); } if (configuration.startRangeName != null) { startRange = type.getDescriptor(configuration.startRangeName); } if (configuration.endRangeName != null) { endRange = type.getDescriptor(configuration.endRangeName); } } public void setStartRange(final AttributeDescriptor startRange) { this.startRange = startRange; } public void setEndRange(final AttributeDescriptor endRange) { this.endRange = endRange; } public void setTime(final AttributeDescriptor time) { this.time = time; } public AttributeDescriptor getStartRange() { return startRange; } public AttributeDescriptor getEndRange() { return endRange; } public AttributeDescriptor getTime() { return time; } public boolean hasTime() { return (time != null) || ((startRange != null) && (endRange != null)); } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((endRange == null) ? 0 : endRange.hashCode()); result = (prime * result) + ((startRange == null) ? 0 : startRange.hashCode()); result = (prime * result) + ((time == null) ? 0 : time.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final TimeDescriptors other = (TimeDescriptors) obj; if (endRange == null) { if (other.endRange != null) { return false; } } else if (!endRange.equals(other.endRange)) { return false; } if (startRange == null) { if (other.startRange != null) { return false; } } else if (!startRange.equals(other.startRange)) { return false; } if (time == null) { if (other.time != null) { return false; } } else if (!time.equals(other.time)) { return false; } return true; } public static class TimeDescriptorConfiguration implements SimpleFeatureUserDataConfiguration { private static final long serialVersionUID = 2870075684501325546L; private String startRangeName = null; private String endRangeName = null; private String timeName = null; public TimeDescriptorConfiguration() {} public TimeDescriptorConfiguration(final SimpleFeatureType type) { configureFromType(type); } public String getStartRangeName() { return startRangeName; } public void setStartRangeName(final String startRangeName) { this.startRangeName = startRangeName; } public String getEndRangeName() { return endRangeName; } public void setEndRangeName(final String endRangeName) { this.endRangeName = endRangeName; } public String getTimeName() { return timeName; } public void setTimeName(final String timeName) { this.timeName = timeName; } @Override public void updateType(final SimpleFeatureType persistType) { for (final AttributeDescriptor attrDesc : persistType.getAttributeDescriptors()) { final Class bindingClass = attrDesc.getType().getBinding(); if (TimeUtils.isTemporal(bindingClass)) { attrDesc.getUserData().put("time", Boolean.FALSE); } } if (startRangeName != null) { persistType.getDescriptor(startRangeName).getUserData().put("start", Boolean.TRUE); } if (endRangeName != null) { persistType.getDescriptor(endRangeName).getUserData().put("end", Boolean.TRUE); } if (timeName != null) { persistType.getDescriptor(timeName).getUserData().put("time", Boolean.TRUE); } } @Override public void configureFromType(final SimpleFeatureType persistType) { for (final AttributeDescriptor attrDesc : persistType.getAttributeDescriptors()) { final Class bindingClass = attrDesc.getType().getBinding(); if (TimeUtils.isTemporal(bindingClass)) { final Boolean isTime = (Boolean) attrDesc.getUserData().get("time"); if (isTime != null) { if (isTime.booleanValue()) { setTimeName(attrDesc.getLocalName()); setStartRangeName(null); setEndRangeName(null); break; } } final Boolean isStart = (Boolean) attrDesc.getUserData().get("start"); final Boolean isEnd = (Boolean) attrDesc.getUserData().get("end"); if ((isStart != null) && isStart.booleanValue()) { setStartRangeName(attrDesc.getLocalName()); } else if ((isStart == null) && (getStartRangeName() == null) && attrDesc.getLocalName().toLowerCase(Locale.ENGLISH).startsWith("start")) { setStartRangeName(attrDesc.getLocalName()); } else if ((isEnd != null) && isEnd.booleanValue()) { setEndRangeName(attrDesc.getLocalName()); } else if ((isEnd == null) && (getEndRangeName() == null) && attrDesc.getLocalName().toLowerCase(Locale.ENGLISH).startsWith("end")) { setEndRangeName(attrDesc.getLocalName()); } else if ((isTime == null) && (getTimeName() == null)) { setTimeName(attrDesc.getLocalName()); } } } if (getStartRangeName() != null) { if (getEndRangeName() != null) { setTimeName(null); } else { if (getTimeName() == null) { setTimeName(getStartRangeName()); } setStartRangeName(null); } } else if ((getEndRangeName() != null) && (getStartRangeName() == null)) { if (getTimeName() == null) { setTimeName(getEndRangeName()); } setEndRangeName(null); } } @Override public byte[] toBinary() { final BitSet bits = new BitSet(3); int length = 1; byte[] timeBytes, startRangeBytes, endRangeBytes; if (timeName != null) { bits.set(0); timeBytes = StringUtils.stringToBinary(timeName); length += VarintUtils.unsignedIntByteLength(timeBytes.length); length += timeBytes.length; } else { timeBytes = null; } if (startRangeName != null) { bits.set(1); startRangeBytes = StringUtils.stringToBinary(startRangeName); length += VarintUtils.unsignedIntByteLength(startRangeBytes.length); length += startRangeBytes.length; } else { startRangeBytes = null; } if (endRangeName != null) { bits.set(2); endRangeBytes = StringUtils.stringToBinary(endRangeName); length += VarintUtils.unsignedIntByteLength(endRangeBytes.length); length += endRangeBytes.length; } else { endRangeBytes = null; } final ByteBuffer buf = ByteBuffer.allocate(length); final byte[] bitMask = bits.toByteArray(); buf.put(bitMask.length > 0 ? bitMask[0] : (byte) 0); if (timeBytes != null) { VarintUtils.writeUnsignedInt(timeBytes.length, buf); buf.put(timeBytes); } if (startRangeBytes != null) { VarintUtils.writeUnsignedInt(startRangeBytes.length, buf); buf.put(startRangeBytes); } if (endRangeBytes != null) { VarintUtils.writeUnsignedInt(endRangeBytes.length, buf); buf.put(endRangeBytes); } return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final BitSet bitSet = BitSet.valueOf(new byte[] {buf.get()}); if (bitSet.get(0)) { final byte[] timeBytes = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); timeName = StringUtils.stringFromBinary(timeBytes); } else { timeName = null; } if (bitSet.get(1)) { final byte[] startRangeBytes = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); startRangeName = StringUtils.stringFromBinary(startRangeBytes); } else { startRangeName = null; } if (bitSet.get(2)) { final byte[] endRangeBytes = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); endRangeName = StringUtils.stringFromBinary(endRangeBytes); } else { endRangeName = null; } } } } ================================================ FILE: core/geotime/src/main/java/org/locationtech/geowave/core/geotime/util/TimeUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.util; import java.time.Instant; import java.util.Calendar; import java.util.Date; import java.util.TimeZone; import org.geotools.factory.CommonFactoryFinder; import org.geotools.temporal.object.DefaultInstant; import org.geotools.temporal.object.DefaultPeriod; import org.geotools.temporal.object.DefaultPosition; import org.locationtech.geowave.core.geotime.store.query.TemporalConstraints; import org.locationtech.geowave.core.geotime.store.query.TemporalConstraintsSet; import org.locationtech.geowave.core.geotime.util.TimeDescriptors.TimeDescriptorConfiguration; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.filter.Filter; import org.opengis.filter.FilterFactory2; import org.opengis.temporal.Period; import org.opengis.temporal.Position; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.threeten.extra.Interval; /** * This class contains a set of Temporal utility methods that are generally useful throughout the * GeoWave core codebase. */ public class TimeUtils { private static final Logger LOGGER = LoggerFactory.getLogger(TimeUtils.class); // because we use varint encoding we want it to be small enough to only take up a byte, but random // enough that its as unlikely as possible to be found as a "real" value public static long RESERVED_MILLIS_FOR_NULL = -113; /** * Convert a calendar object to a long in the form of milliseconds since the epoch of January 1, * 1970. The time is converted to GMT if it is not already in that timezone so that all times will * be in a standard timezone. * * @param cal The calendar object * @return The time in milliseconds */ public static long calendarToGMTMillis(final Calendar cal) { // get Date object representing this Calendar's time value, millisecond // offset from the Epoch, January 1, 1970 00:00:00.000 GMT (Gregorian) final Date date = cal.getTime(); // Returns the number of milliseconds since January 1, 1970, 00:00:00 // GMT represented by this Date object. final long time = date.getTime(); return time; } /** * @param startTimeMillis start time (inclusive) * @param endTimeMillis end time (exclusive) * @param singleTimeField * @return the during filter */ public static Filter toDuringFilter( final long startTimeMillis, final long endTimeMillis, final String singleTimeField) { final FilterFactory2 factory = CommonFactoryFinder.getFilterFactory2(); final Position ip1 = new DefaultPosition(new Date(startTimeMillis - 1)); final Position ip2 = new DefaultPosition(new Date(endTimeMillis)); final Period period = new DefaultPeriod(new DefaultInstant(ip1), new DefaultInstant(ip2)); return factory.during(factory.property(singleTimeField), factory.literal(period)); } public static Filter toFilter( final long startTimeMillis, final long endTimeMillis, final String startTimeField, final String endTimeField) { final FilterFactory2 factory = CommonFactoryFinder.getFilterFactory2(); if (startTimeField.equals(endTimeField)) { return factory.and( factory.greaterOrEqual( factory.property(startTimeField), factory.literal(new Date(startTimeMillis))), factory.lessOrEqual( factory.property(endTimeField), factory.literal(new Date(endTimeMillis)))); } // this looks redundant to use both start and end time fields, but it helps parsing logic return factory.and( factory.and( factory.greaterOrEqual( factory.property(startTimeField), factory.literal(new Date(startTimeMillis))), factory.lessOrEqual( factory.property(startTimeField), factory.literal(new Date(endTimeMillis)))), factory.and( factory.greaterOrEqual( factory.property(endTimeField), factory.literal(new Date(startTimeMillis))), factory.lessOrEqual( factory.property(endTimeField), factory.literal(new Date(endTimeMillis))))); } /** * Get the time in millis of this temporal object (either numeric interpreted as millisecond time * in GMT, Date, or Calendar) * * @param timeObj The temporal object * @return The time in milliseconds since the epoch in GMT */ public static long getTimeMillis(final Object timeObj) { // handle dates, calendars, and Numbers only if (timeObj != null) { if (timeObj instanceof Calendar) { return calendarToGMTMillis(((Calendar) timeObj)); } else if (timeObj instanceof Date) { return ((Date) timeObj).getTime(); } else if (timeObj instanceof Number) { return ((Number) timeObj).longValue(); } else { LOGGER.warn( "Time value '" + timeObj + "' of type '" + timeObj.getClass() + "' is not of expected temporal type"); } } return RESERVED_MILLIS_FOR_NULL; } /** * Determine if this class is a supported temporal class. Numeric classes are not determined to be * temporal in this case even though they can be interpreted as milliseconds because we do not * want to be over-selective and mis-interpret numeric fields * * @param bindingClass The binding class of the attribute * @return A flag indicating whether the class is temporal */ public static boolean isTemporal(final Class bindingClass) { // because Longs can also be numeric, just allow Dates and Calendars // class bindings to be temporal return (Calendar.class.isAssignableFrom(bindingClass) || Date.class.isAssignableFrom(bindingClass)); } /** * Instantiates the class type as a new object with the temporal value being the longVal * interpreted as milliseconds since the epoch in GMT * * @param bindingClass The class to try to instantiate for this time value. Currently * java.util.Calendar, java.util.Date, and java.lang.Long are supported. * @param longVal A value to be interpreted as milliseconds since the epoch in GMT * @return An instance of the binding class with the value interpreted from longVal */ public static Object getTimeValue(final Class bindingClass, final long longVal) { if (longVal == RESERVED_MILLIS_FOR_NULL) { // indicator that the time value is null; return null; } if (Calendar.class.isAssignableFrom(bindingClass)) { final Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("GMT")); cal.setTimeInMillis(longVal); return cal; } else if (Date.class.isAssignableFrom(bindingClass)) { return new Date(longVal); } else if (Long.class.isAssignableFrom(bindingClass)) { return Long.valueOf(longVal); } LOGGER.warn( "Numeric value '" + longVal + "' of type '" + bindingClass + "' is not of expected temporal type"); return null; } public static TemporalConstraints getTemporalConstraintsForDescriptors( final TimeDescriptors timeDescriptors, final TemporalConstraintsSet timeBoundsSet) { if ((timeBoundsSet == null) || timeBoundsSet.isEmpty()) { return new TemporalConstraints(); } if ((timeDescriptors.getStartRange() != null) && (timeDescriptors.getEndRange() != null)) { return composeRangeTemporalConstraints(timeDescriptors, timeBoundsSet); } else if ((timeDescriptors.getTime() != null) && timeBoundsSet.hasConstraintsFor(timeDescriptors.getTime().getLocalName())) { return timeBoundsSet.getConstraintsFor(timeDescriptors.getTime().getLocalName()); } return new TemporalConstraints(); } /** * Compose temporal constraints given the constraint set and the descriptors for the index. * * @param timeDescriptors * @param constraintsSet * @return null if the constraints does not have the fields required by the time descriptors */ public static TemporalConstraints composeRangeTemporalConstraints( final TimeDescriptors timeDescriptors, final TemporalConstraintsSet constraintsSet) { if ((timeDescriptors.getEndRange() != null) && (timeDescriptors.getStartRange() != null)) { final String ename = timeDescriptors.getEndRange().getLocalName(); final String sname = timeDescriptors.getStartRange().getLocalName(); if (constraintsSet.hasConstraintsForRange(sname, ename)) { return constraintsSet.getConstraintsForRange(sname, ename); } } else if ((timeDescriptors.getTime() != null) && constraintsSet.hasConstraintsFor(timeDescriptors.getTime().getLocalName())) { return constraintsSet.getConstraintsFor(timeDescriptors.getTime().getLocalName()); } return new TemporalConstraints(); } public static Interval getInterval(final SimpleFeature entry, final String fieldName) { return getInterval(entry.getAttribute(fieldName)); } public static Instant getInstant(final Object timeObject) { if (timeObject == null) { return null; } if (timeObject instanceof Instant) { return (Instant) timeObject; } if (timeObject instanceof org.opengis.temporal.Instant) { return ((org.opengis.temporal.Instant) timeObject).getPosition().getDate().toInstant(); } if (timeObject instanceof Date) { return Instant.ofEpochMilli(((Date) timeObject).getTime()); } else if (timeObject instanceof Calendar) { return Instant.ofEpochMilli(((Calendar) timeObject).getTimeInMillis()); } else if (timeObject instanceof Number) { return Instant.ofEpochMilli(((Number) timeObject).longValue()); } return null; } public static Interval getInterval(final Object timeObject) { if (timeObject instanceof Interval) { return (Interval) timeObject; } if (timeObject instanceof Period) { return Interval.of( ((Period) timeObject).getBeginning().getPosition().getDate().toInstant(), ((Period) timeObject).getEnding().getPosition().getDate().toInstant()); } final Instant time = getInstant(timeObject); if (time == null) { return null; } return Interval.of(time, time); } public static Interval getInterval(final Object startTimeObject, final Object endTimeObject) { final Instant startTime = getInstant(startTimeObject); final Instant endTime = getInstant(endTimeObject); if (startTime == null) { if (endTime != null) { return Interval.of(endTime, endTime); } return null; } if (endTime == null) { return Interval.of(startTime, startTime); } return Interval.of(startTime, endTime); } public static Instant getIntervalEnd(final Interval interval) { if (interval.isEmpty()) { return Instant.ofEpochMilli(interval.getStart().toEpochMilli() + 1); } return interval.getEnd(); } /** * Determine if a time or range descriptor is set. If so, then use it, otherwise infer. * * @param persistType - FeatureType that will be scanned for TimeAttributes * @return the time descriptors */ public static final TimeDescriptors inferTimeAttributeDescriptor( final SimpleFeatureType persistType) { final TimeDescriptorConfiguration config = new TimeDescriptorConfiguration(persistType); final TimeDescriptors timeDescriptors = new TimeDescriptors(persistType, config); // Up the meta-data so that it is clear and visible any inference that // has occurred here. Also, this is critical to // serialization/deserialization config.updateType(persistType); return timeDescriptors; } } ================================================ FILE: core/geotime/src/main/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi ================================================ org.locationtech.geowave.core.geotime.GeoTimePersistableRegistry ================================================ FILE: core/geotime/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi ================================================ org.locationtech.geowave.core.geotime.store.field.CalendarArraySerializationProvider org.locationtech.geowave.core.geotime.store.field.CalendarSerializationProvider org.locationtech.geowave.core.geotime.store.field.DateArraySerializationProvider org.locationtech.geowave.core.geotime.store.field.DateSerializationProvider org.locationtech.geowave.core.geotime.store.field.GeometryArraySerializationProvider org.locationtech.geowave.core.geotime.store.field.GeometrySerializationProvider org.locationtech.geowave.core.geotime.store.field.IntervalArraySerializationProvider org.locationtech.geowave.core.geotime.store.field.IntervalSerializationProvider ================================================ FILE: core/geotime/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.index.AttributeIndexProviderSpi ================================================ org.locationtech.geowave.core.geotime.index.SpatialAttributeIndexProvider org.locationtech.geowave.core.geotime.index.TemporalAttributeIndexProvider ================================================ FILE: core/geotime/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.index.IndexFieldMapperRegistrySPI ================================================ org.locationtech.geowave.core.geotime.adapter.GeotimeRegisteredIndexFieldMappers ================================================ FILE: core/geotime/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.query.gwql.GWQLExtensionRegistrySpi ================================================ org.locationtech.geowave.core.geotime.store.query.gwql.GWQLSpatialTemporalExtensions ================================================ FILE: core/geotime/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.spi.DimensionalityTypeProviderSpi ================================================ org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider org.locationtech.geowave.core.geotime.index.TemporalDimensionalityTypeProvider ================================================ FILE: core/geotime/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.statistics.StatisticsRegistrySPI ================================================ org.locationtech.geowave.core.geotime.store.statistics.GeotimeRegisteredStatistics ================================================ FILE: core/geotime/src/test/java/org/locationtech/geowave/core/geotime/TestGeoTimePersistableRegistry.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime; import org.locationtech.geowave.core.geotime.store.data.PersistenceEncodingTest.GeoObjDataAdapter; import org.locationtech.geowave.core.geotime.util.GeometryUtilsTest.ExampleNumericIndexStrategy; import org.locationtech.geowave.core.index.persist.InternalPersistableRegistry; import org.locationtech.geowave.core.index.persist.PersistableRegistrySpi; public class TestGeoTimePersistableRegistry implements PersistableRegistrySpi, InternalPersistableRegistry { @Override public PersistableIdAndConstructor[] getSupportedPersistables() { return new PersistableIdAndConstructor[] { new PersistableIdAndConstructor((short) 10300, ExampleNumericIndexStrategy::new), new PersistableIdAndConstructor((short) 10301, GeoObjDataAdapter::new),}; } } ================================================ FILE: core/geotime/src/test/java/org/locationtech/geowave/core/geotime/adapter/SpatialFieldDescriptorTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.adapter; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import org.geotools.referencing.CRS; import org.junit.Test; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.index.IndexDimensionHint; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.opengis.referencing.FactoryException; import org.opengis.referencing.NoSuchAuthorityCodeException; public class SpatialFieldDescriptorTest { @Test public void testFieldDescriptor() throws NoSuchAuthorityCodeException, FactoryException { final SpatialFieldDescriptor testDescriptor = new SpatialFieldDescriptorBuilder<>(String.class).fieldName("testFieldName").indexHint( new IndexDimensionHint("testDimensionHint")).crs(CRS.decode("EPSG:3857")).build(); assertEquals("testFieldName", testDescriptor.fieldName()); assertEquals(String.class, testDescriptor.bindingClass()); assertEquals(1, testDescriptor.indexHints().size()); assertEquals(CRS.decode("EPSG:3857"), testDescriptor.crs()); assertTrue(testDescriptor.indexHints().contains(new IndexDimensionHint("testDimensionHint"))); final byte[] fieldDescriptorBytes = PersistenceUtils.toBinary(testDescriptor); final SpatialFieldDescriptor deserialized = (SpatialFieldDescriptor) PersistenceUtils.fromBinary(fieldDescriptorBytes); assertEquals("testFieldName", deserialized.fieldName()); assertEquals(String.class, deserialized.bindingClass()); assertEquals(1, deserialized.indexHints().size()); assertEquals(CRS.decode("EPSG:3857"), testDescriptor.crs()); assertTrue(deserialized.indexHints().contains(new IndexDimensionHint("testDimensionHint"))); } @Test public void testNoCRS() { final SpatialFieldDescriptor testDescriptor = new SpatialFieldDescriptorBuilder<>(String.class).fieldName("testFieldName").indexHint( new IndexDimensionHint("testDimensionHint")).build(); assertEquals("testFieldName", testDescriptor.fieldName()); assertEquals(String.class, testDescriptor.bindingClass()); assertEquals(1, testDescriptor.indexHints().size()); assertEquals(GeometryUtils.getDefaultCRS(), testDescriptor.crs()); assertTrue(testDescriptor.indexHints().contains(new IndexDimensionHint("testDimensionHint"))); final byte[] fieldDescriptorBytes = PersistenceUtils.toBinary(testDescriptor); final SpatialFieldDescriptor deserialized = (SpatialFieldDescriptor) PersistenceUtils.fromBinary(fieldDescriptorBytes); assertEquals("testFieldName", deserialized.fieldName()); assertEquals(String.class, deserialized.bindingClass()); assertEquals(1, deserialized.indexHints().size()); assertEquals(GeometryUtils.getDefaultCRS(), testDescriptor.crs()); assertTrue(deserialized.indexHints().contains(new IndexDimensionHint("testDimensionHint"))); } } ================================================ FILE: core/geotime/src/test/java/org/locationtech/geowave/core/geotime/adapter/SpatialFieldMapperTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.adapter; import static org.junit.Assert.assertEquals; import org.geotools.referencing.CRS; import org.junit.Test; import org.locationtech.geowave.core.geotime.adapter.LatLonFieldMapper.DoubleLatLonFieldMapper; import org.locationtech.geowave.core.geotime.adapter.LatLonFieldMapper.FloatLatLonFieldMapper; import org.locationtech.geowave.core.geotime.store.dimension.SpatialField.SpatialIndexFieldOptions; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.jts.geom.Geometry; import org.opengis.referencing.FactoryException; import com.google.common.collect.Lists; public class SpatialFieldMapperTest { @Test public void testGeometryFieldMapper() throws FactoryException { FieldDescriptor testField = new SpatialFieldDescriptorBuilder<>(Geometry.class).crs( CRS.decode("EPSG:3857")).spatialIndexHint().fieldName("testField").build(); GeometryFieldMapper mapper = new GeometryFieldMapper(); mapper.init( "idx", Lists.newArrayList(testField), new SpatialIndexFieldOptions(CRS.decode("EPSG:4326"))); assertEquals(Geometry.class, mapper.indexFieldType()); assertEquals(Geometry.class, mapper.adapterFieldType()); assertEquals(1, mapper.adapterFieldCount()); assertEquals("testField", mapper.getAdapterFields()[0]); final byte[] mapperBinary = PersistenceUtils.toBinary(mapper); mapper = (GeometryFieldMapper) PersistenceUtils.fromBinary(mapperBinary); assertEquals(Geometry.class, mapper.indexFieldType()); assertEquals(Geometry.class, mapper.adapterFieldType()); assertEquals(1, mapper.adapterFieldCount()); assertEquals("testField", mapper.getAdapterFields()[0]); } @Test public void testDoubleLatLonFieldMapper() throws FactoryException { FieldDescriptor latitude = new SpatialFieldDescriptorBuilder<>(Double.class).crs( CRS.decode("EPSG:3857")).latitudeIndexHint().fieldName("lat").build(); FieldDescriptor longitude = new SpatialFieldDescriptorBuilder<>(Double.class).crs( CRS.decode("EPSG:3857")).longitudeIndexHint().fieldName("lon").build(); DoubleLatLonFieldMapper mapper = new DoubleLatLonFieldMapper(); mapper.init( "idx", Lists.newArrayList(latitude, longitude), new SpatialIndexFieldOptions(CRS.decode("EPSG:4326"))); assertEquals(Geometry.class, mapper.indexFieldType()); assertEquals(Double.class, mapper.adapterFieldType()); assertEquals(2, mapper.adapterFieldCount()); assertEquals("lat", mapper.getAdapterFields()[0]); assertEquals("lon", mapper.getAdapterFields()[1]); assertEquals(false, mapper.xAxisFirst); final byte[] mapperBinary = PersistenceUtils.toBinary(mapper); mapper = (DoubleLatLonFieldMapper) PersistenceUtils.fromBinary(mapperBinary); assertEquals(Geometry.class, mapper.indexFieldType()); assertEquals(Double.class, mapper.adapterFieldType()); assertEquals(2, mapper.adapterFieldCount()); assertEquals("lat", mapper.getAdapterFields()[0]); assertEquals("lon", mapper.getAdapterFields()[1]); assertEquals(false, mapper.xAxisFirst); } @Test public void testFloatLatLonFieldMapper() throws FactoryException { FieldDescriptor longitude = new SpatialFieldDescriptorBuilder<>(Float.class).crs( CRS.decode("EPSG:3857")).longitudeIndexHint().fieldName("lon").build(); FieldDescriptor latitude = new SpatialFieldDescriptorBuilder<>(Float.class).crs( CRS.decode("EPSG:3857")).latitudeIndexHint().fieldName("lat").build(); FloatLatLonFieldMapper mapper = new FloatLatLonFieldMapper(); mapper.init( "idx", Lists.newArrayList(longitude, latitude), new SpatialIndexFieldOptions(CRS.decode("EPSG:4326"))); assertEquals(Geometry.class, mapper.indexFieldType()); assertEquals(Float.class, mapper.adapterFieldType()); assertEquals(2, mapper.adapterFieldCount()); assertEquals("lon", mapper.getAdapterFields()[0]); assertEquals("lat", mapper.getAdapterFields()[1]); assertEquals(true, mapper.xAxisFirst); final byte[] mapperBinary = PersistenceUtils.toBinary(mapper); mapper = (FloatLatLonFieldMapper) PersistenceUtils.fromBinary(mapperBinary); assertEquals(Geometry.class, mapper.indexFieldType()); assertEquals(Float.class, mapper.adapterFieldType()); assertEquals(2, mapper.adapterFieldCount()); assertEquals("lon", mapper.getAdapterFields()[0]); assertEquals("lat", mapper.getAdapterFields()[1]); assertEquals(true, mapper.xAxisFirst); } } ================================================ FILE: core/geotime/src/test/java/org/locationtech/geowave/core/geotime/adapter/annotation/SpatialTemporalAnnotationsTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.adapter.annotation; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.util.Date; import org.geotools.referencing.CRS; import org.junit.Test; import org.locationtech.geowave.core.geotime.adapter.SpatialFieldDescriptor; import org.locationtech.geowave.core.geotime.store.dimension.SpatialField; import org.locationtech.geowave.core.geotime.store.dimension.TimeField; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.adapter.BasicDataTypeAdapter; import org.locationtech.geowave.core.store.adapter.annotation.GeoWaveDataType; import org.locationtech.geowave.core.store.adapter.annotation.GeoWaveField; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.opengis.referencing.FactoryException; import org.opengis.referencing.NoSuchAuthorityCodeException; import org.opengis.referencing.crs.CoordinateReferenceSystem; public class SpatialTemporalAnnotationsTest { private static final String TEST_CRS_CODE = "EPSG:3857"; @Test public void testSpatialTemporalAnnotations() throws NoSuchAuthorityCodeException, FactoryException { BasicDataTypeAdapter adapter = BasicDataTypeAdapter.newAdapter("myType", TestType.class, "name"); final CoordinateReferenceSystem testCRS = CRS.decode(TEST_CRS_CODE); assertEquals("myType", adapter.getTypeName()); assertEquals(TestType.class, adapter.getDataClass()); assertEquals(3, adapter.getFieldDescriptors().length); assertNotNull(adapter.getFieldDescriptor("name")); assertTrue(String.class.isAssignableFrom(adapter.getFieldDescriptor("name").bindingClass())); SpatialFieldDescriptor geometryDescriptor = (SpatialFieldDescriptor) adapter.getFieldDescriptor("geometry"); assertNotNull(geometryDescriptor); assertTrue(Geometry.class.isAssignableFrom(geometryDescriptor.bindingClass())); assertTrue(geometryDescriptor.indexHints().contains(SpatialField.LATITUDE_DIMENSION_HINT)); assertTrue(geometryDescriptor.indexHints().contains(SpatialField.LONGITUDE_DIMENSION_HINT)); assertEquals(testCRS, geometryDescriptor.crs()); assertNotNull(adapter.getFieldDescriptor("date")); assertTrue(Date.class.isAssignableFrom(adapter.getFieldDescriptor("date").bindingClass())); assertTrue( adapter.getFieldDescriptor("date").indexHints().contains(TimeField.TIME_DIMENSION_HINT)); final byte[] adapterBytes = PersistenceUtils.toBinary(adapter); adapter = (BasicDataTypeAdapter) PersistenceUtils.fromBinary(adapterBytes); assertEquals("myType", adapter.getTypeName()); assertEquals(TestType.class, adapter.getDataClass()); assertEquals(3, adapter.getFieldDescriptors().length); assertNotNull(adapter.getFieldDescriptor("name")); assertTrue(String.class.isAssignableFrom(adapter.getFieldDescriptor("name").bindingClass())); geometryDescriptor = (SpatialFieldDescriptor) adapter.getFieldDescriptor("geometry"); assertTrue(Geometry.class.isAssignableFrom(geometryDescriptor.bindingClass())); assertTrue(geometryDescriptor.indexHints().contains(SpatialField.LATITUDE_DIMENSION_HINT)); assertTrue(geometryDescriptor.indexHints().contains(SpatialField.LONGITUDE_DIMENSION_HINT)); assertEquals(testCRS, geometryDescriptor.crs()); assertNotNull(adapter.getFieldDescriptor("date")); assertTrue(Date.class.isAssignableFrom(adapter.getFieldDescriptor("date").bindingClass())); assertTrue( adapter.getFieldDescriptor("date").indexHints().contains(TimeField.TIME_DIMENSION_HINT)); final TestType testEntry = new TestType( GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(5, 5)), new Date(100), "id1"); assertEquals("id1", adapter.getFieldValue(testEntry, "name")); assertTrue( GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(5, 5)).equalsExact( (Geometry) adapter.getFieldValue(testEntry, "geometry"))); assertEquals(new Date(100), adapter.getFieldValue(testEntry, "date")); final Object[] fields = new Object[3]; for (int i = 0; i < fields.length; i++) { switch (adapter.getFieldDescriptors()[i].fieldName()) { case "name": fields[i] = "id1"; break; case "geometry": fields[i] = GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(10, 10)); break; case "date": fields[i] = new Date(500); break; } } final TestType builtEntry = adapter.buildObject("id1", fields); assertEquals("id1", adapter.getFieldValue(builtEntry, "name")); assertTrue( GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(10, 10)).equalsExact( (Geometry) adapter.getFieldValue(builtEntry, "geometry"))); assertEquals(new Date(500), adapter.getFieldValue(builtEntry, "date")); } @GeoWaveDataType private static class TestType { @GeoWaveSpatialField(crs = TEST_CRS_CODE, spatialIndexHint = true) private Geometry geometry; @GeoWaveTemporalField(timeIndexHint = true) private Date date; @GeoWaveField private String name; protected TestType() {} public TestType(final Geometry geometry, final Date date, final String name) { this.geometry = geometry; this.date = date; this.name = name; } public Geometry getGeometry() { return geometry; } public Date getDate() { return date; } public String getName() { return name; } } } ================================================ FILE: core/geotime/src/test/java/org/locationtech/geowave/core/geotime/binning/SpatialBinningTypeTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.binning; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.function.Function; import org.junit.Assert; import org.junit.Test; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.LinearRing; import com.google.common.collect.ImmutableMap; public class SpatialBinningTypeTest { private final static Map TYPE_TO_ERROR_THRESHOLD = ImmutableMap.of( SpatialBinningType.GEOHASH, 1E-14, SpatialBinningType.S2, 0.01, SpatialBinningType.H3, // H3 approximations can just be *bad* 0.25); @Test public void testPolygons() { testGeometry( GeometryUtils.GEOMETRY_FACTORY.createPolygon( new Coordinate[] { new Coordinate(33, 33), new Coordinate(34, 34), new Coordinate(33, 34), new Coordinate(33, 33)}), Geometry::getArea); testGeometry( GeometryUtils.GEOMETRY_FACTORY.createPolygon( new Coordinate[] { new Coordinate(0.5, 0.6), new Coordinate(0.7, 0.8), new Coordinate(1, 0.9), new Coordinate(0.8, 0.7), new Coordinate(0.5, 0.6)}), Geometry::getArea); testGeometry( GeometryUtils.GEOMETRY_FACTORY.createPolygon( GeometryUtils.GEOMETRY_FACTORY.createLinearRing( new Coordinate[] { new Coordinate(33, 33), new Coordinate(33, 34), new Coordinate(34, 34), new Coordinate(34, 33), new Coordinate(33, 33)}), new LinearRing[] { GeometryUtils.GEOMETRY_FACTORY.createLinearRing( new Coordinate[] { new Coordinate(33.25, 33.25), new Coordinate(33.75, 33.25), new Coordinate(33.75, 33.75), new Coordinate(33.25, 33.75), new Coordinate(33.25, 33.25)})}), Geometry::getArea); } @Test public void testLines() { testGeometry( GeometryUtils.GEOMETRY_FACTORY.createLineString( new Coordinate[] {new Coordinate(33, 33), new Coordinate(34, 34)}), Geometry::getLength); testGeometry( GeometryUtils.GEOMETRY_FACTORY.createLineString( new Coordinate[] { new Coordinate(33, 33), new Coordinate(33, 34), new Coordinate(34, 34), new Coordinate(34, 33), new Coordinate(33, 33)}), Geometry::getLength); testGeometry( GeometryUtils.GEOMETRY_FACTORY.createLineString( new Coordinate[] { new Coordinate(0.5, 0.6), new Coordinate(0.7, 0.8), new Coordinate(1, 0.9), new Coordinate(0.8, 0.7), new Coordinate(0.5, 0.6)}), Geometry::getLength); } private void testGeometry( final Geometry geom, final Function measurementFunction) { final double originalMeasurement = measurementFunction.apply(geom); for (final SpatialBinningType type : SpatialBinningType.values()) { final double errorThreshold = TYPE_TO_ERROR_THRESHOLD.get(type); for (int precision = 1; precision < 7; precision++) { final int finalPrecision = type.equals(SpatialBinningType.S2) ? precision * 2 : precision; final ByteArray[] bins = type.getSpatialBins(geom, finalPrecision); double weight = 0; final List cellGeoms = new ArrayList<>(); for (final ByteArray bin : bins) { final Geometry binGeom = type.getBinGeometry(bin, finalPrecision); cellGeoms.add(binGeom.intersection(geom)); final double intersectionMeasurement = measurementFunction.apply(binGeom.intersection(geom)); final double fieldWeight = intersectionMeasurement / originalMeasurement; weight += fieldWeight; } // cumulative weight should be 1, within the threshold of error Assert.assertEquals( String.format( "Combined weight is off by more than threshold for type '%s' with precision '%d' for geometry '%s'", type, finalPrecision, geom), 1, weight, errorThreshold); // the union of the geometries should be within the within the threshold of error of the // original measurement Assert.assertEquals( String.format( "Measurement on geometric union is off by more than threshold for type '%s' with precision '%d' for geometry '%s'", type, finalPrecision, geom), 1, measurementFunction.apply(GeometryUtils.GEOMETRY_FACTORY.buildGeometry(cellGeoms)) / originalMeasurement, errorThreshold); } } } } ================================================ FILE: core/geotime/src/test/java/org/locationtech/geowave/core/geotime/index/dimension/LongitudeDefinitionTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.index.dimension; import org.junit.Assert; import org.junit.Test; import org.locationtech.geowave.core.index.dimension.bin.BinRange; import org.locationtech.geowave.core.index.numeric.NumericRange; public class LongitudeDefinitionTest { private final double DELTA = 1e-15; @Test public void testNormalizeWithinBoundsRanges() { final double minRange = 10; final double maxRange = 100; final BinRange[] binRange = getNormalizedRanges(minRange, maxRange); Assert.assertEquals(minRange, binRange[0].getNormalizedMin(), DELTA); Assert.assertEquals(maxRange, binRange[0].getNormalizedMax(), DELTA); } @Test public void testNormalizeWithinBoundsValue() { final double easternNormalizedValue = -160; final double westernNormalizedValue = 160; final double easternValue = 200; final double westernValue = -200; Assert.assertEquals(easternNormalizedValue, getNormalizedLongitudeValue(easternValue), DELTA); Assert.assertEquals(westernNormalizedValue, getNormalizedLongitudeValue(westernValue), DELTA); } @Test public void testNormalizeDateLineCrossingEast() { final double minRange = 150; final double maxRange = 200; final int expectedBinCount = 2; final BinRange[] expectedBinRanges = new BinRange[] {new BinRange(-180, -160), new BinRange(150, 180)}; final BinRange[] binRange = getNormalizedRanges(minRange, maxRange); Assert.assertEquals(expectedBinCount, binRange.length); for (int i = 0; i < binRange.length; i++) { Assert.assertEquals( expectedBinRanges[i].getNormalizedMin(), binRange[i].getNormalizedMin(), DELTA); Assert.assertEquals( expectedBinRanges[i].getNormalizedMax(), binRange[i].getNormalizedMax(), DELTA); } } @Test public void testNormalizeDateLineCrossingWest() { final double minRange = -200; final double maxRange = -170; final int expectedBinCount = 2; final BinRange[] expectedBinRanges = new BinRange[] {new BinRange(-180, -170), new BinRange(160, 180)}; final BinRange[] binRange = getNormalizedRanges(minRange, maxRange); Assert.assertEquals(expectedBinCount, binRange.length); for (int i = 0; i < binRange.length; i++) { Assert.assertEquals( expectedBinRanges[i].getNormalizedMin(), binRange[i].getNormalizedMin(), DELTA); Assert.assertEquals( expectedBinRanges[i].getNormalizedMax(), binRange[i].getNormalizedMax(), DELTA); } } private BinRange[] getNormalizedRanges(final double minRange, final double maxRange) { return new LongitudeDefinition().getNormalizedRanges(new NumericRange(minRange, maxRange)); } private double getNormalizedLongitudeValue(final double value) { return LongitudeDefinition.getNormalizedLongitude(value); } } ================================================ FILE: core/geotime/src/test/java/org/locationtech/geowave/core/geotime/index/dimension/TemporalBinningStrategyTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.index.dimension; import java.util.Calendar; import java.util.TimeZone; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit; import org.locationtech.geowave.core.index.dimension.bin.BinRange; import org.locationtech.geowave.core.index.numeric.NumericRange; public class TemporalBinningStrategyTest { @Before public void setTimezoneToGMT() { TimeZone.setDefault(TimeZone.getTimeZone("GMT")); } @Test public void testLargeNumberOfDayBins() { internalTestBinsMatchExpectedCount(250000, Unit.DAY, 123456789876L); } @Test public void testLargeNumberOfMonthBins() { internalTestBinsMatchExpectedCount(250000, Unit.MONTH, 9876543210L); } @Test public void testLargeNumberOfYearBins() { // for years, use 250,000 to keep milli time values less than max long internalTestBinsMatchExpectedCount(250000, Unit.YEAR, 0L); } @Test public void testLargeNumberOfHourBins() { internalTestBinsMatchExpectedCount(250000, Unit.HOUR, 0L); } @Test public void testLargeNumberOfMinuteBins() { internalTestBinsMatchExpectedCount(250000, Unit.MINUTE, 0L); } private void internalTestBinsMatchExpectedCount( final int binCount, final Unit unit, final long arbitraryTime) { final BinRange[] ranges = getBinRangesUsingFullExtents(binCount, unit, arbitraryTime); Assert.assertEquals(binCount, ranges.length); } private BinRange[] getBinRangesUsingFullExtents( final int binCount, final Unit unit, final long arbitraryTime) { final Calendar startCal = Calendar.getInstance(); final long time = arbitraryTime; // hopefully these approaches work for // any arbitrary time, but allow a // caller to set the specific time // so tests are all entirely // reproducible startCal.setTimeInMillis(time); final Calendar endCal = Calendar.getInstance(); endCal.setTimeInMillis(time); final TemporalBinningStrategy binStrategy = new TemporalBinningStrategy(unit); binStrategy.setToEpoch(startCal); binStrategy.setToEpoch(endCal); endCal.add(unit.toCalendarEnum(), binCount); return binStrategy.getNormalizedRanges( new NumericRange(startCal.getTimeInMillis(), (double) endCal.getTimeInMillis() - 1)); } @Test public void testFullExtentOnSingleBin() { final BinRange[] ranges = getBinRangesUsingFullExtents(1, Unit.MONTH, 543210987654321L); Assert.assertEquals(1, ranges.length); Assert.assertTrue(ranges[0].isFullExtent()); } @Test public void testFullExtentOnMultipleBins() { final Calendar startCal = Calendar.getInstance(); final long time = 3456789012345L; startCal.setTimeInMillis(time); final Calendar endCal = Calendar.getInstance(); // theoretically should get 3 bins back the first and last not having // full extent and the middle one being full extent endCal.setTimeInMillis(time + (TemporalBinningStrategy.MILLIS_PER_DAY * 2)); final TemporalBinningStrategy binStrategy = new TemporalBinningStrategy(Unit.DAY); BinRange[] ranges = binStrategy.getNormalizedRanges( new NumericRange(startCal.getTimeInMillis(), endCal.getTimeInMillis())); Assert.assertEquals(3, ranges.length); Assert.assertTrue(!ranges[0].isFullExtent()); Assert.assertTrue(ranges[1].isFullExtent()); Assert.assertTrue(!ranges[2].isFullExtent()); final Calendar startCalOnEpoch = Calendar.getInstance(); startCalOnEpoch.setTimeInMillis(time); binStrategy.setToEpoch(startCalOnEpoch); ranges = binStrategy.getNormalizedRanges( new NumericRange(startCalOnEpoch.getTimeInMillis(), endCal.getTimeInMillis())); Assert.assertEquals(3, ranges.length); // now the first element should be full extent Assert.assertTrue(ranges[0].isFullExtent()); Assert.assertTrue(ranges[1].isFullExtent()); Assert.assertTrue(!ranges[2].isFullExtent()); final Calendar endCalOnMax = Calendar.getInstance(); // theoretically should get 3 bins back the first and last not having // full extent and the middle one being full extent endCalOnMax.setTimeInMillis(time + (TemporalBinningStrategy.MILLIS_PER_DAY * 3)); binStrategy.setToEpoch(endCalOnMax); endCalOnMax.add(Calendar.MILLISECOND, -1); ranges = binStrategy.getNormalizedRanges( new NumericRange(startCal.getTimeInMillis(), endCalOnMax.getTimeInMillis())); Assert.assertEquals(3, ranges.length); Assert.assertTrue(!ranges[0].isFullExtent()); Assert.assertTrue(ranges[1].isFullExtent()); // now the last element should be full extent Assert.assertTrue(ranges[2].isFullExtent()); } @Test public void testStartOnEpochMinusOneAndEndOnEpoch() { final Calendar startCal = Calendar.getInstance(); // final long time = 675849302912837456L; //this value would cause it to // fail because we lose precision in coverting to a double (the mantissa // of a double value is 52 bits and therefore the max long that it can // accurately represent is 2^52 before the ulp of the double becomes // greater than 1) final long time = 6758493029128L; startCal.setTimeInMillis(time); startCal.set(Calendar.MONTH, 0); // make sure its a month after one with 31 days final TemporalBinningStrategy binStrategy = new TemporalBinningStrategy(Unit.MONTH); binStrategy.setToEpoch(startCal); final Calendar endCal = Calendar.getInstance(); endCal.setTimeInMillis(time); endCal.set(Calendar.MONTH, 0); // make sure its a month after one with 31 days binStrategy.setToEpoch(endCal); final BinRange[] ranges = binStrategy.getNormalizedRanges( new NumericRange(startCal.getTimeInMillis() - 1, endCal.getTimeInMillis())); Assert.assertEquals(2, ranges.length); // the first range should be the max possible value and both the min and // max of the range should be equal Assert.assertTrue(ranges[0].getNormalizedMax() == binStrategy.getBinMax()); Assert.assertTrue(ranges[0].getNormalizedMin() == ranges[0].getNormalizedMax()); // the second range should be the min possible value and both the min // and max of the range should be equal Assert.assertTrue(ranges[1].getNormalizedMin() == binStrategy.getBinMin()); Assert.assertTrue(ranges[0].getNormalizedMax() == ranges[0].getNormalizedMin()); } @Test public void testStartAndEndEqual() { final long time = 123987564019283L; final TemporalBinningStrategy binStrategy = new TemporalBinningStrategy(Unit.YEAR); final BinRange[] ranges = binStrategy.getNormalizedRanges(new NumericRange(time, time)); Assert.assertEquals(1, ranges.length); // both the min and max of the range should be equal Assert.assertTrue(ranges[0].getNormalizedMin() == ranges[0].getNormalizedMax()); } @Test public void testEndLessThanStart() { final long time = 123987564019283L; final TemporalBinningStrategy binStrategy = new TemporalBinningStrategy(Unit.YEAR); final BinRange[] ranges = binStrategy.getNormalizedRanges(new NumericRange(time, time - 1)); Assert.assertEquals(0, ranges.length); } @Test public void testFeb28ToMarch1NonLeapYear() { final long time = 47920164930285667L; final Calendar startCal = Calendar.getInstance(); startCal.setTimeInMillis(time); startCal.set(Calendar.MONTH, 1); startCal.set(Calendar.YEAR, 2015); startCal.set(Calendar.DAY_OF_MONTH, 28); final Calendar endCal = Calendar.getInstance(); endCal.setTimeInMillis(time); endCal.set(Calendar.MONTH, 2); endCal.set(Calendar.YEAR, 2015); endCal.set(Calendar.DAY_OF_MONTH, 1); // test the day boundaries first - going from feb28 to march 1 should // give 2 bins TemporalBinningStrategy binStrategy = new TemporalBinningStrategy(Unit.DAY); BinRange[] ranges = binStrategy.getNormalizedRanges( new NumericRange(startCal.getTimeInMillis(), endCal.getTimeInMillis())); Assert.assertEquals(2, ranges.length); // now test the month boundaries - adding a day to feb28 for the end // time should give 2 bins binStrategy = new TemporalBinningStrategy(Unit.MONTH); ranges = binStrategy.getNormalizedRanges( new NumericRange( startCal.getTimeInMillis(), startCal.getTimeInMillis() + (TemporalBinningStrategy.MILLIS_PER_DAY))); Assert.assertEquals(2, ranges.length); } @Test public void testFeb28ToMarch1LeapYear() { final long time = 29374659120374656L; final Calendar startCal = Calendar.getInstance(); startCal.setTimeInMillis(time); startCal.set(Calendar.MONTH, 1); startCal.set(Calendar.YEAR, 2016); startCal.set(Calendar.DAY_OF_MONTH, 28); final Calendar endCal = Calendar.getInstance(); endCal.setTimeInMillis(time); endCal.set(Calendar.MONTH, 2); endCal.set(Calendar.YEAR, 2016); endCal.set(Calendar.DAY_OF_MONTH, 1); // test the day boundaries first - going from feb28 to march 1 should // give 3 bins TemporalBinningStrategy binStrategy = new TemporalBinningStrategy(Unit.DAY); BinRange[] ranges = binStrategy.getNormalizedRanges( new NumericRange(startCal.getTimeInMillis(), endCal.getTimeInMillis())); Assert.assertEquals(3, ranges.length); // now test the month boundaries - adding a day to feb28 for the end // time should give 1 bin, adding 2 days should give 2 bins binStrategy = new TemporalBinningStrategy(Unit.MONTH); ranges = binStrategy.getNormalizedRanges( new NumericRange( startCal.getTimeInMillis(), startCal.getTimeInMillis() + (TemporalBinningStrategy.MILLIS_PER_DAY))); Assert.assertEquals(1, ranges.length); // add 2 days and now we should end up with 2 bins ranges = binStrategy.getNormalizedRanges( new NumericRange( startCal.getTimeInMillis(), startCal.getTimeInMillis() + (TemporalBinningStrategy.MILLIS_PER_DAY * 2))); Assert.assertEquals(2, ranges.length); } @Test public void testNonLeapYear() { final long time = 75470203439504394L; final TemporalBinningStrategy binStrategy = new TemporalBinningStrategy(Unit.YEAR); final Calendar startCal = Calendar.getInstance(); startCal.setTimeInMillis(time); startCal.set(Calendar.YEAR, 2015); binStrategy.setToEpoch(startCal); // if we add 365 days to this we should get 2 year bins final BinRange[] ranges = binStrategy.getNormalizedRanges( new NumericRange( startCal.getTimeInMillis(), startCal.getTimeInMillis() + (TemporalBinningStrategy.MILLIS_PER_DAY * 365))); Assert.assertEquals(2, ranges.length); } @Test public void testLeapYear() { final long time = 94823024856598633L; final TemporalBinningStrategy binStrategy = new TemporalBinningStrategy(Unit.YEAR); final Calendar startCal = Calendar.getInstance(); startCal.setTimeInMillis(time); startCal.set(Calendar.YEAR, 2016); binStrategy.setToEpoch(startCal); // if we add 365 days to this we should get 1 year bin BinRange[] ranges = binStrategy.getNormalizedRanges( new NumericRange( startCal.getTimeInMillis(), startCal.getTimeInMillis() + (TemporalBinningStrategy.MILLIS_PER_DAY * 365))); Assert.assertEquals(1, ranges.length); // if we add 366 days to this we should get 2 year bins, and the second // bin should be the epoch ranges = binStrategy.getNormalizedRanges( new NumericRange( startCal.getTimeInMillis(), startCal.getTimeInMillis() + (TemporalBinningStrategy.MILLIS_PER_DAY * 366))); Assert.assertEquals(2, ranges.length); // the second bin should just contain the epoch Assert.assertTrue(ranges[1].getNormalizedMin() == ranges[1].getNormalizedMax()); Assert.assertTrue(ranges[1].getNormalizedMin() == binStrategy.getBinMin()); } } ================================================ FILE: core/geotime/src/test/java/org/locationtech/geowave/core/geotime/index/dimension/TimeDefinitionTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.index.dimension; import java.util.Calendar; import java.util.TimeZone; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit; import org.locationtech.geowave.core.index.dimension.bin.IndexBinningStrategy; public class TimeDefinitionTest { private final double DELTA = 1e-15; @Before public void setTimezoneToGMT() { TimeZone.setDefault(TimeZone.getTimeZone("GMT")); } @Test public void testTimeDefinitionMaxBinByDay() { final double expectedMin = 0.0; final double expectedMax = 86399999; final Calendar calendar = Calendar.getInstance(); calendar.set(Calendar.HOUR_OF_DAY, 23); calendar.set(Calendar.MINUTE, 59); calendar.set(Calendar.SECOND, 59); calendar.set(Calendar.MILLISECOND, 999); final IndexBinningStrategy bin = getStrategyByUnit(Unit.DAY); Assert.assertEquals(expectedMin, bin.getBinMin(), DELTA); Assert.assertEquals(expectedMax, bin.getBinMax(), DELTA); Assert.assertEquals( bin.getBinMax(), bin.getBinnedValue(calendar.getTimeInMillis()).getNormalizedValue(), DELTA); } @Test public void testTimeDefinitionMaxBinByMonth() { final double expectedMin = 0.0; final double expectedMax = 2678399999.0; final Calendar calendar = Calendar.getInstance(); calendar.set(Calendar.MONTH, 6); calendar.set(Calendar.DATE, 31); calendar.set(Calendar.HOUR_OF_DAY, 23); calendar.set(Calendar.MINUTE, 59); calendar.set(Calendar.SECOND, 59); calendar.set(Calendar.MILLISECOND, 999); final IndexBinningStrategy bin = getStrategyByUnit(Unit.MONTH); Assert.assertEquals(expectedMin, bin.getBinMin(), DELTA); Assert.assertEquals(expectedMax, bin.getBinMax(), DELTA); Assert.assertEquals( bin.getBinMax(), bin.getBinnedValue(calendar.getTimeInMillis()).getNormalizedValue(), DELTA); } @Test public void testTimeDefinitionMinBinByMonth() { final double expectedMin = 0.0; final double expectedMax = 2678399999.0; final Calendar calendar = Calendar.getInstance(); calendar.set(Calendar.MONTH, 6); calendar.set(Calendar.DATE, 1); calendar.set(Calendar.HOUR_OF_DAY, 0); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); calendar.set(Calendar.MILLISECOND, 0); final IndexBinningStrategy bin = getStrategyByUnit(Unit.MONTH); Assert.assertEquals(expectedMin, bin.getBinMin(), DELTA); Assert.assertEquals(expectedMax, bin.getBinMax(), DELTA); Assert.assertEquals( bin.getBinMin(), bin.getBinnedValue(calendar.getTimeInMillis()).getNormalizedValue(), DELTA); } @Test public void testTimeDefinitionMaxBinByYEAR() { final double expectedMin = 0.0; final double expectedMax = 31622399999.0; final Calendar calendar = Calendar.getInstance(); calendar.set(Calendar.YEAR, 2012); calendar.set(Calendar.MONTH, 11); calendar.set(Calendar.DATE, 31); calendar.set(Calendar.HOUR_OF_DAY, 23); calendar.set(Calendar.MINUTE, 59); calendar.set(Calendar.SECOND, 59); calendar.set(Calendar.MILLISECOND, 999); final IndexBinningStrategy bin = getStrategyByUnit(Unit.YEAR); Assert.assertEquals(expectedMin, bin.getBinMin(), DELTA); Assert.assertEquals(expectedMax, bin.getBinMax(), DELTA); Assert.assertEquals( bin.getBinMax(), bin.getBinnedValue(calendar.getTimeInMillis()).getNormalizedValue(), DELTA); } @Test public void testTimeDefinitionBinByHour() { final double expectedMin = 0.0; final double expectedMax = 3599999.0; final Calendar calendar = Calendar.getInstance(); calendar.set(Calendar.MINUTE, 59); calendar.set(Calendar.SECOND, 59); calendar.set(Calendar.MILLISECOND, 999); final IndexBinningStrategy bin = getStrategyByUnit(Unit.HOUR); Assert.assertEquals(expectedMin, bin.getBinMin(), DELTA); Assert.assertEquals(expectedMax, bin.getBinMax(), DELTA); Assert.assertEquals( bin.getBinMax(), bin.getBinnedValue(calendar.getTimeInMillis()).getNormalizedValue(), DELTA); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); calendar.set(Calendar.MILLISECOND, 0); Assert.assertEquals( bin.getBinMin(), bin.getBinnedValue(calendar.getTimeInMillis()).getNormalizedValue(), DELTA); } @Test public void testTimeDefinitionBinByMinute() { final double expectedMin = 0.0; final double expectedMax = 59999.0; final Calendar calendar = Calendar.getInstance(); calendar.set(Calendar.SECOND, 59); calendar.set(Calendar.MILLISECOND, 999); final IndexBinningStrategy bin = getStrategyByUnit(Unit.MINUTE); Assert.assertEquals(expectedMin, bin.getBinMin(), DELTA); Assert.assertEquals(expectedMax, bin.getBinMax(), DELTA); Assert.assertEquals( bin.getBinMax(), bin.getBinnedValue(calendar.getTimeInMillis()).getNormalizedValue(), DELTA); calendar.set(Calendar.SECOND, 0); calendar.set(Calendar.MILLISECOND, 0); Assert.assertEquals( bin.getBinMin(), bin.getBinnedValue(calendar.getTimeInMillis()).getNormalizedValue(), DELTA); } @Test public void testTimeDefinitionMaxBinByDecade() { final double expectedMin = 0.0; final double expectedMax = 315619199999.0; final Calendar calendar = Calendar.getInstance(); calendar.set(Calendar.YEAR, 2009); calendar.set(Calendar.MONTH, 11); calendar.set(Calendar.DATE, 31); calendar.set(Calendar.HOUR_OF_DAY, 23); calendar.set(Calendar.MINUTE, 59); calendar.set(Calendar.SECOND, 59); calendar.set(Calendar.MILLISECOND, 999); final IndexBinningStrategy bin = getStrategyByUnit(Unit.DECADE); Assert.assertEquals(expectedMin, bin.getBinMin(), DELTA); Assert.assertEquals(expectedMax, bin.getBinMax(), DELTA); Assert.assertEquals( bin.getBinMax(), bin.getBinnedValue(calendar.getTimeInMillis()).getNormalizedValue(), DELTA); } @Test public void testTimeDefinitionMaxBinByWeek() { final double expectedMin = 0.0; final double expectedMax = 604799999.0; final IndexBinningStrategy bin = getStrategyByUnit(Unit.WEEK); final Calendar calendar = Calendar.getInstance(); calendar.set(Calendar.DAY_OF_WEEK, calendar.getActualMaximum(Calendar.DAY_OF_WEEK)); calendar.set(Calendar.HOUR_OF_DAY, 23); calendar.set(Calendar.MINUTE, 59); calendar.set(Calendar.SECOND, 59); calendar.set(Calendar.MILLISECOND, 999); Assert.assertEquals(expectedMin, bin.getBinMin(), DELTA); Assert.assertEquals(expectedMax, bin.getBinMax(), DELTA); Assert.assertEquals( bin.getBinMax(), bin.getBinnedValue(calendar.getTimeInMillis()).getNormalizedValue(), DELTA); } private IndexBinningStrategy getStrategyByUnit(final Unit unit) { return new TimeDefinition(unit).getBinningStrategy(); } } ================================================ FILE: core/geotime/src/test/java/org/locationtech/geowave/core/geotime/index/sfc/hilbert/HilbertSFCTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.index.sfc.hilbert; import java.nio.ByteBuffer; import org.junit.Assert; import org.junit.Test; import org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition; import org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition; import org.locationtech.geowave.core.index.numeric.BasicNumericDataset; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.index.sfc.RangeDecomposition; import org.locationtech.geowave.core.index.sfc.SFCDimensionDefinition; import org.locationtech.geowave.core.index.sfc.SFCFactory; import org.locationtech.geowave.core.index.sfc.SFCFactory.SFCType; import org.locationtech.geowave.core.index.sfc.SpaceFillingCurve; import com.google.common.primitives.SignedBytes; public class HilbertSFCTest { @Test public void testGetId_2DSpatialMaxValue() throws Exception { final int LATITUDE_BITS = 31; final int LONGITUDE_BITS = 31; final Double[] testValues = new Double[] {90d, 180d}; final long expectedID = 3074457345618258602L; final SFCDimensionDefinition[] SPATIAL_DIMENSIONS = new SFCDimensionDefinition[] { new SFCDimensionDefinition(new LatitudeDefinition(), LATITUDE_BITS), new SFCDimensionDefinition(new LongitudeDefinition(), LONGITUDE_BITS)}; final SpaceFillingCurve hilbertSFC = SFCFactory.createSpaceFillingCurve(SPATIAL_DIMENSIONS, SFCType.HILBERT); Assert.assertEquals(expectedID, ByteBuffer.wrap(hilbertSFC.getId(testValues)).getLong()); } @Test public void testGetId_2DSpatialMinValue() throws Exception { final int LATITUDE_BITS = 31; final int LONGITUDE_BITS = 31; final Double[] testValues = new Double[] {-90d, -180d}; final long expectedID = 0L; final SFCDimensionDefinition[] SPATIAL_DIMENSIONS = new SFCDimensionDefinition[] { new SFCDimensionDefinition(new LatitudeDefinition(), LATITUDE_BITS), new SFCDimensionDefinition(new LongitudeDefinition(), LONGITUDE_BITS)}; final SpaceFillingCurve hilbertSFC = SFCFactory.createSpaceFillingCurve(SPATIAL_DIMENSIONS, SFCType.HILBERT); Assert.assertEquals(expectedID, ByteBuffer.wrap(hilbertSFC.getId(testValues)).getLong()); } @Test public void testGetId_2DSpatialCentroidValue() throws Exception { final int LATITUDE_BITS = 31; final int LONGITUDE_BITS = 31; final Double[] testValues = new Double[] {0d, 0d}; final long expectedID = 768614336404564650L; final SFCDimensionDefinition[] SPATIAL_DIMENSIONS = new SFCDimensionDefinition[] { new SFCDimensionDefinition(new LatitudeDefinition(), LATITUDE_BITS), new SFCDimensionDefinition(new LongitudeDefinition(), LONGITUDE_BITS)}; final SpaceFillingCurve hilbertSFC = SFCFactory.createSpaceFillingCurve(SPATIAL_DIMENSIONS, SFCType.HILBERT); Assert.assertEquals(expectedID, ByteBuffer.wrap(hilbertSFC.getId(testValues)).getLong()); } @Test public void testGetId_2DSpatialLexicographicOrdering() throws Exception { final int LATITUDE_BITS = 31; final int LONGITUDE_BITS = 31; final Double[] minValue = new Double[] {-90d, -180d}; final Double[] maxValue = new Double[] {90d, 180d}; final SFCDimensionDefinition[] SPATIAL_DIMENSIONS = new SFCDimensionDefinition[] { new SFCDimensionDefinition(new LatitudeDefinition(), LATITUDE_BITS), new SFCDimensionDefinition(new LongitudeDefinition(), LONGITUDE_BITS)}; final SpaceFillingCurve hilbertSFC = SFCFactory.createSpaceFillingCurve(SPATIAL_DIMENSIONS, SFCType.HILBERT); Assert.assertTrue( SignedBytes.lexicographicalComparator().compare( hilbertSFC.getId(minValue), hilbertSFC.getId(maxValue)) < 0); } // @Test(expected = IllegalArgumentException.class) public void testGetId_2DSpatialIllegalArgument() throws Exception { final int LATITUDE_BITS = 31; final int LONGITUDE_BITS = 31; final Double[] testValues = new Double[] {-100d, -180d}; final long expectedID = 0L; final SFCDimensionDefinition[] SPATIAL_DIMENSIONS = new SFCDimensionDefinition[] { new SFCDimensionDefinition(new LatitudeDefinition(), LATITUDE_BITS), new SFCDimensionDefinition(new LongitudeDefinition(), LONGITUDE_BITS)}; final SpaceFillingCurve hilbertSFC = SFCFactory.createSpaceFillingCurve(SPATIAL_DIMENSIONS, SFCType.HILBERT); Assert.assertEquals(expectedID, ByteBuffer.wrap(hilbertSFC.getId(testValues)).getLong()); } @Test public void testDecomposeQuery_2DSpatialOneIndexFilter() { final int LATITUDE_BITS = 31; final int LONGITUDE_BITS = 31; final SFCDimensionDefinition[] SPATIAL_DIMENSIONS = new SFCDimensionDefinition[] { new SFCDimensionDefinition(new LongitudeDefinition(), LONGITUDE_BITS), new SFCDimensionDefinition(new LatitudeDefinition(), LATITUDE_BITS)}; final SpaceFillingCurve hilbertSFC = SFCFactory.createSpaceFillingCurve(SPATIAL_DIMENSIONS, SFCType.HILBERT); // Create a IndexRange object using the x axis final NumericRange rangeX = new NumericRange(55, 57); // Create a IndexRange object using the y axis final NumericRange rangeY = new NumericRange(25, 27); final BasicNumericDataset spatialQuery = new BasicNumericDataset(new NumericData[] {rangeX, rangeY}); final RangeDecomposition rangeDecomposition = hilbertSFC.decomposeRange(spatialQuery, true, 1); Assert.assertEquals(1, rangeDecomposition.getRanges().length); } @Test public void testDecomposeQuery_2DSpatialTwentyIndexFilters() { final int LATITUDE_BITS = 31; final int LONGITUDE_BITS = 31; final SFCDimensionDefinition[] SPATIAL_DIMENSIONS = new SFCDimensionDefinition[] { new SFCDimensionDefinition(new LongitudeDefinition(), LONGITUDE_BITS), new SFCDimensionDefinition(new LatitudeDefinition(), LATITUDE_BITS)}; final SpaceFillingCurve hilbertSFC = SFCFactory.createSpaceFillingCurve(SPATIAL_DIMENSIONS, SFCType.HILBERT); // Create a IndexRange object using the x axis final NumericRange rangeX = new NumericRange(10, 57); // Create a IndexRange object using the y axis final NumericRange rangeY = new NumericRange(25, 50); final BasicNumericDataset spatialQuery = new BasicNumericDataset(new NumericData[] {rangeX, rangeY}); final RangeDecomposition rangeDecomposition = hilbertSFC.decomposeRange(spatialQuery, true, 20); Assert.assertEquals(20, rangeDecomposition.getRanges().length); } /* public void testDecomposeQuery_2DSpatialRanges() {} */ } ================================================ FILE: core/geotime/src/test/java/org/locationtech/geowave/core/geotime/index/sfc/hilbert/PrimitiveHilbertSFCTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.index.sfc.hilbert; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.junit.Assert; import org.junit.Test; import org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition; import org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition; import org.locationtech.geowave.core.index.dimension.BasicDimensionDefinition; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.index.sfc.RangeDecomposition; import org.locationtech.geowave.core.index.sfc.SFCDimensionDefinition; import org.locationtech.geowave.core.index.sfc.hilbert.PrimitiveHilbertSFCOperations; import org.locationtech.geowave.core.index.sfc.hilbert.UnboundedHilbertSFCOperations; import com.google.uzaygezen.core.CompactHilbertCurve; import com.google.uzaygezen.core.MultiDimensionalSpec; public class PrimitiveHilbertSFCTest { private static final NumericDimensionDefinition[] SPATIAL_DIMENSIONS = new NumericDimensionDefinition[] {new LongitudeDefinition(), new LatitudeDefinition(true)}; @Test public void testSpatialGetIdAndQueryDecomposition62BitsTotal() { final SFCDimensionDefinition[] sfcDimensions = new SFCDimensionDefinition[SPATIAL_DIMENSIONS.length]; int totalPrecision = 0; final List bitsPerDimension = new ArrayList<>(); for (int d = 0; d < SPATIAL_DIMENSIONS.length; d++) { final int bitsOfPrecision = 31; sfcDimensions[d] = new SFCDimensionDefinition(SPATIAL_DIMENSIONS[d], bitsOfPrecision); bitsPerDimension.add(bitsOfPrecision); totalPrecision += bitsOfPrecision; } final CompactHilbertCurve compactHilbertCurve = new CompactHilbertCurve(new MultiDimensionalSpec(bitsPerDimension)); final PrimitiveHilbertSFCOperations testOperations = new PrimitiveHilbertSFCOperations(); // assume the unbounded SFC is the true results, regardless they should // both produce the same results final UnboundedHilbertSFCOperations expectedResultOperations = new UnboundedHilbertSFCOperations(); testOperations.init(sfcDimensions); expectedResultOperations.init(sfcDimensions); final Double[] testValues1 = new Double[] {45d, 45d}; final Double[] testValues2 = new Double[] {0d, 0d}; final Double[] testValues3 = new Double[] {-1.235456, -67.9213546}; final Double[] testValues4 = new Double[] { -61.2354561024897435868943753568436598645436, 42.921354693742875894356895549054690704378590896}; Assert.assertArrayEquals( expectedResultOperations.convertToHilbert(testValues1, compactHilbertCurve, sfcDimensions), testOperations.convertToHilbert(testValues1, compactHilbertCurve, sfcDimensions)); Assert.assertArrayEquals( expectedResultOperations.convertToHilbert(testValues2, compactHilbertCurve, sfcDimensions), testOperations.convertToHilbert(testValues2, compactHilbertCurve, sfcDimensions)); Assert.assertArrayEquals( expectedResultOperations.convertToHilbert(testValues3, compactHilbertCurve, sfcDimensions), testOperations.convertToHilbert(testValues3, compactHilbertCurve, sfcDimensions)); Assert.assertArrayEquals( expectedResultOperations.convertToHilbert(testValues4, compactHilbertCurve, sfcDimensions), testOperations.convertToHilbert(testValues4, compactHilbertCurve, sfcDimensions)); final NumericRange rangeLongitude1 = new NumericRange(0, 1); final NumericRange rangeLatitude1 = new NumericRange(0, 1); final NumericRange rangeLongitude2 = new NumericRange(-21.324967549, 28.4285637846834432543); final NumericRange rangeLatitude2 = new NumericRange( -43.7894445665435346547657867847657654, 32.3254325834896543657895436543543659); final NumericRange rangeLongitude3 = new NumericRange(-10, 0); final NumericRange rangeLatitude3 = new NumericRange(-10, 0); final NumericRange rangeLongitude4 = new NumericRange(-Double.MIN_VALUE, 0); final NumericRange rangeLatitude4 = new NumericRange(0, Double.MIN_VALUE); final RangeDecomposition expectedResult1 = expectedResultOperations.decomposeRange( new NumericData[] {rangeLongitude1, rangeLatitude1}, compactHilbertCurve, sfcDimensions, totalPrecision, Integer.MAX_VALUE, true, true); final RangeDecomposition testResult1 = testOperations.decomposeRange( new NumericData[] {rangeLongitude1, rangeLatitude1}, compactHilbertCurve, sfcDimensions, totalPrecision, Integer.MAX_VALUE, true, true); Assert.assertTrue(expectedResult1.getRanges().length == testResult1.getRanges().length); for (int i = 0; i < expectedResult1.getRanges().length; i++) { Assert.assertTrue(expectedResult1.getRanges()[i].equals(testResult1.getRanges()[i])); } final RangeDecomposition expectedResult2 = expectedResultOperations.decomposeRange( new NumericData[] {rangeLongitude2, rangeLatitude2}, compactHilbertCurve, sfcDimensions, totalPrecision, Integer.MAX_VALUE, true, true); final RangeDecomposition testResult2 = testOperations.decomposeRange( new NumericData[] {rangeLongitude2, rangeLatitude2}, compactHilbertCurve, sfcDimensions, totalPrecision, Integer.MAX_VALUE, true, true); Assert.assertTrue(expectedResult2.getRanges().length == testResult2.getRanges().length); for (int i = 0; i < expectedResult2.getRanges().length; i++) { Assert.assertTrue(expectedResult2.getRanges()[i].equals(testResult2.getRanges()[i])); } final RangeDecomposition expectedResult3 = expectedResultOperations.decomposeRange( new NumericData[] {rangeLongitude3, rangeLatitude3}, compactHilbertCurve, sfcDimensions, totalPrecision, Integer.MAX_VALUE, true, false); final RangeDecomposition testResult3 = testOperations.decomposeRange( new NumericData[] {rangeLongitude3, rangeLatitude3}, compactHilbertCurve, sfcDimensions, totalPrecision, Integer.MAX_VALUE, true, false); Assert.assertTrue(expectedResult3.getRanges().length == testResult3.getRanges().length); for (int i = 0; i < expectedResult3.getRanges().length; i++) { Assert.assertTrue(expectedResult3.getRanges()[i].equals(testResult3.getRanges()[i])); } final RangeDecomposition expectedResult4 = expectedResultOperations.decomposeRange( new NumericData[] {rangeLongitude4, rangeLatitude4}, compactHilbertCurve, sfcDimensions, totalPrecision, Integer.MAX_VALUE, true, false); final RangeDecomposition testResult4 = testOperations.decomposeRange( new NumericData[] {rangeLongitude4, rangeLatitude4}, compactHilbertCurve, sfcDimensions, totalPrecision, Integer.MAX_VALUE, true, false); Assert.assertTrue(expectedResult4.getRanges().length == testResult4.getRanges().length); for (int i = 0; i < expectedResult4.getRanges().length; i++) { Assert.assertTrue(expectedResult4.getRanges()[i].equals(testResult4.getRanges()[i])); } } @Test public void testGetId48BitsPerDimension() { final SFCDimensionDefinition[] sfcDimensions = new SFCDimensionDefinition[20]; final List bitsPerDimension = new ArrayList<>(); for (int d = 0; d < sfcDimensions.length; d++) { final int bitsOfPrecision = 48; sfcDimensions[d] = new SFCDimensionDefinition(new BasicDimensionDefinition(0, 1), bitsOfPrecision); bitsPerDimension.add(bitsOfPrecision); } final CompactHilbertCurve compactHilbertCurve = new CompactHilbertCurve(new MultiDimensionalSpec(bitsPerDimension)); final PrimitiveHilbertSFCOperations testOperations = new PrimitiveHilbertSFCOperations(); // assume the unbounded SFC is the true results, regardless they should // both produce the same results final UnboundedHilbertSFCOperations expectedResultOperations = new UnboundedHilbertSFCOperations(); testOperations.init(sfcDimensions); expectedResultOperations.init(sfcDimensions); final Double[] testValues1 = new Double[20]; Arrays.fill(testValues1, Double.MIN_VALUE); final Double[] testValues2 = new Double[20]; Arrays.fill(testValues2, 0d); final Double[] testValues3 = new Double[20]; Arrays.fill(testValues3, 1d); final Double[] testValues4 = new Double[] { 0.2354561024897435868943753568436598645436, 0.921354693742875894657658678436546547657867869789780790890789356895549054690704378590896, 0.84754363905364783265784365843, 0.7896543436756437856046562640234, 0.3216819204957436913249032618969653, 0.327219038596576238101046563945864390685476054, 0.12189368934632894658343655436546754754665875784375308678932689368432, 0.000327489326493291328326493457437584375043, 0.3486563289543, 0.96896758943758, 0.98999897899879789789789789789789789789689, 0.1275785478325478265925864359, 0.124334325346554654, 0.1234565, 0.9876543, 0.76634328932, 0.64352843, 0.5432342321, 0.457686789, 0.2046543435}; Assert.assertArrayEquals( expectedResultOperations.convertToHilbert(testValues1, compactHilbertCurve, sfcDimensions), testOperations.convertToHilbert(testValues1, compactHilbertCurve, sfcDimensions)); Assert.assertArrayEquals( expectedResultOperations.convertToHilbert(testValues2, compactHilbertCurve, sfcDimensions), testOperations.convertToHilbert(testValues2, compactHilbertCurve, sfcDimensions)); Assert.assertArrayEquals( expectedResultOperations.convertToHilbert(testValues3, compactHilbertCurve, sfcDimensions), testOperations.convertToHilbert(testValues3, compactHilbertCurve, sfcDimensions)); Assert.assertArrayEquals( expectedResultOperations.convertToHilbert(testValues4, compactHilbertCurve, sfcDimensions), testOperations.convertToHilbert(testValues4, compactHilbertCurve, sfcDimensions)); } } ================================================ FILE: core/geotime/src/test/java/org/locationtech/geowave/core/geotime/index/sfc/hilbert/tiered/TieredSFCIndexStrategyTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.index.sfc.hilbert.tiered; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.Calendar; import java.util.HashSet; import java.util.List; import java.util.Set; import org.junit.Test; import org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialTemporalOptions; import org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition; import org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition; import org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit; import org.locationtech.geowave.core.geotime.index.dimension.TimeDefinition; import org.locationtech.geowave.core.index.ByteArrayRange; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.index.QueryRanges; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.numeric.BasicNumericDataset; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.index.numeric.NumericValue; import org.locationtech.geowave.core.index.sfc.SFCFactory.SFCType; import org.locationtech.geowave.core.index.sfc.tiered.TieredSFCIndexFactory; public class TieredSFCIndexStrategyTest { public static final int[] DEFINED_BITS_OF_PRECISION = new int[] {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 18, 31}; NumericDimensionDefinition[] SPATIAL_TEMPORAL_DIMENSIONS = new NumericDimensionDefinition[] { new LongitudeDefinition(), new LatitudeDefinition(true), new TimeDefinition(Unit.YEAR),}; private static final double QUERY_RANGE_EPSILON = 1E-12; @Test public void testSingleEntry() { final Calendar cal = Calendar.getInstance(); final NumericData[] dataPerDimension1 = new NumericData[SPATIAL_TEMPORAL_DIMENSIONS.length]; dataPerDimension1[0] = new NumericValue(45); dataPerDimension1[1] = new NumericValue(45); dataPerDimension1[2] = new NumericValue(cal.getTimeInMillis()); final int year = cal.get(Calendar.YEAR); cal.set(Calendar.DAY_OF_YEAR, 1); final NumericData[] dataPerDimension2 = new NumericData[SPATIAL_TEMPORAL_DIMENSIONS.length]; dataPerDimension2[0] = new NumericValue(45); dataPerDimension2[1] = new NumericValue(45); dataPerDimension2[2] = new NumericValue(cal.getTimeInMillis()); cal.set(Calendar.YEAR, year - 1); final NumericData[] dataPerDimension3 = new NumericData[SPATIAL_TEMPORAL_DIMENSIONS.length]; dataPerDimension3[0] = new NumericValue(45); dataPerDimension3[1] = new NumericValue(45); dataPerDimension3[2] = new NumericValue(cal.getTimeInMillis()); MultiDimensionalNumericData indexedData = new BasicNumericDataset(dataPerDimension1); final NumericIndexStrategy strategy = SpatialTemporalDimensionalityTypeProvider.createIndexFromOptions( new SpatialTemporalOptions()).getIndexStrategy(); final InsertionIds ids1 = strategy.getInsertionIds(indexedData); assertEquals(1, ids1.getCompositeInsertionIds().size()); assertEquals(13, ids1.getCompositeInsertionIds().get(0).length); // same bin indexedData = new BasicNumericDataset(dataPerDimension2); final InsertionIds ids2 = strategy.getInsertionIds(indexedData); assertEquals(1, ids2.getCompositeInsertionIds().size()); assertTrue( compare(ids1.getCompositeInsertionIds().get(0), ids2.getCompositeInsertionIds().get(0), 5)); // different bin indexedData = new BasicNumericDataset(dataPerDimension3); final InsertionIds ids3 = strategy.getInsertionIds(indexedData); assertEquals(1, ids3.getCompositeInsertionIds().size()); assertFalse( compare(ids1.getCompositeInsertionIds().get(0), ids3.getCompositeInsertionIds().get(0), 5)); } @Test public void testPredefinedSpatialEntries() throws Exception { final NumericIndexStrategy strategy = TieredSFCIndexFactory.createDefinedPrecisionTieredStrategy( new NumericDimensionDefinition[] { new LongitudeDefinition(), new LatitudeDefinition(true)}, new int[][] {DEFINED_BITS_OF_PRECISION.clone(), DEFINED_BITS_OF_PRECISION.clone()}, SFCType.HILBERT); for (int sfcIndex = 0; sfcIndex < DEFINED_BITS_OF_PRECISION.length; sfcIndex++) { final NumericData[] dataPerDimension = new NumericData[2]; final double precision = 360 / Math.pow(2, DEFINED_BITS_OF_PRECISION[sfcIndex]); if (precision > 180) { dataPerDimension[0] = new NumericRange(-180, 180); dataPerDimension[1] = new NumericRange(-90, 90); } else { dataPerDimension[0] = new NumericRange(0, precision); dataPerDimension[1] = new NumericRange(-precision, 0); } final MultiDimensionalNumericData indexedData = new BasicNumericDataset(dataPerDimension); final InsertionIds ids = strategy.getInsertionIds(indexedData); final NumericData[] queryRangePerDimension = new NumericData[2]; queryRangePerDimension[0] = new NumericRange( dataPerDimension[0].getMin() + QUERY_RANGE_EPSILON, dataPerDimension[0].getMax() - QUERY_RANGE_EPSILON); queryRangePerDimension[1] = new NumericRange( dataPerDimension[1].getMin() + QUERY_RANGE_EPSILON, dataPerDimension[1].getMax() - QUERY_RANGE_EPSILON); final MultiDimensionalNumericData queryData = new BasicNumericDataset(queryRangePerDimension); final QueryRanges queryRanges = strategy.getQueryRanges(queryData); final Set queryRangeTiers = new HashSet<>(); boolean rangeAtTierFound = false; for (final ByteArrayRange range : queryRanges.getCompositeQueryRanges()) { final byte tier = range.getStart()[0]; queryRangeTiers.add(range.getStart()[0]); if (tier == DEFINED_BITS_OF_PRECISION[sfcIndex]) { if (rangeAtTierFound) { throw new Exception("multiple ranges were found unexpectedly for tier " + tier); } assertArrayEquals( "this range is an exact fit, so it should have exactly one value for tier " + DEFINED_BITS_OF_PRECISION[sfcIndex], range.getStart(), range.getEnd()); rangeAtTierFound = true; } } if (!rangeAtTierFound) { throw new Exception( "no ranges were found at the expected exact fit tier " + DEFINED_BITS_OF_PRECISION[sfcIndex]); } // ensure the first byte is equal to the appropriate number of bits // of precision if ((ids.getCompositeInsertionIds().get(0)[0] == 0) || ((sfcIndex == (DEFINED_BITS_OF_PRECISION.length - 1)) || (DEFINED_BITS_OF_PRECISION[sfcIndex + 1] != (DEFINED_BITS_OF_PRECISION[sfcIndex] + 1)))) { assertEquals( "Insertion ID expected to be exact match at tier " + DEFINED_BITS_OF_PRECISION[sfcIndex], DEFINED_BITS_OF_PRECISION[sfcIndex], ids.getCompositeInsertionIds().get(0)[0]); assertEquals( "Insertion ID size expected to be 1 at tier " + DEFINED_BITS_OF_PRECISION[sfcIndex], 1, ids.getCompositeInsertionIds().size()); } else { assertEquals( "Insertion ID expected to be duplicated at tier " + DEFINED_BITS_OF_PRECISION[sfcIndex + 1], DEFINED_BITS_OF_PRECISION[sfcIndex + 1], ids.getCompositeInsertionIds().get(0)[0]); // if the precision is within the bounds of longitude but not // within latitude we will end up with 2 (rectangular // decomposition) // otherwise we will get a square decomposition of 4 ids final int expectedIds = (precision > 90) && (precision <= 180) ? 2 : 4; assertEquals( "Insertion ID size expected to be " + expectedIds + " at tier " + DEFINED_BITS_OF_PRECISION[sfcIndex + 1], expectedIds, ids.getCompositeInsertionIds().size()); } } } @Test public void testOneEstimatedDuplicateInsertion() throws Exception { final NumericIndexStrategy strategy = TieredSFCIndexFactory.createFullIncrementalTieredStrategy( new NumericDimensionDefinition[] { new LongitudeDefinition(), new LatitudeDefinition(true)}, new int[] {31, 31}, SFCType.HILBERT); for (final int element : DEFINED_BITS_OF_PRECISION) { final NumericData[] dataPerDimension = new NumericData[2]; final double precision = 360 / Math.pow(2, element); if (precision > 180) { dataPerDimension[0] = new NumericRange(-180, 180); dataPerDimension[1] = new NumericRange(-90, 90); } else { dataPerDimension[0] = new NumericRange(0, precision); dataPerDimension[1] = new NumericRange(-precision, 0); } final MultiDimensionalNumericData indexedData = new BasicNumericDataset(dataPerDimension); final InsertionIds ids = strategy.getInsertionIds(indexedData, 1); assertEquals( "Insertion ID size expected to be 1 at tier " + element, 1, ids.getCompositeInsertionIds().size()); // ensure the first byte is equal to the appropriate number of bits // of precision assertEquals( "Insertion ID expected to be exact match at tier " + element, element, ids.getCompositeInsertionIds().get(0)[0]); } } @Test public void testRegions() throws ParseException { final Calendar cal = Calendar.getInstance(); final Calendar calEnd = Calendar.getInstance(); final SimpleDateFormat format = new SimpleDateFormat("MM-dd-yyyy HH:mm:ss"); cal.setTime(format.parse("03-03-1999 11:01:01")); calEnd.setTime(format.parse("03-03-1999 11:05:01")); final NumericData[] dataPerDimension1 = new NumericData[SPATIAL_TEMPORAL_DIMENSIONS.length]; dataPerDimension1[0] = new NumericRange(45.170, 45.173); dataPerDimension1[1] = new NumericRange(50.190, 50.192); dataPerDimension1[2] = new NumericRange(cal.getTimeInMillis(), calEnd.getTimeInMillis()); final int year = cal.get(Calendar.YEAR); cal.set(Calendar.DAY_OF_YEAR, 1); final NumericData[] dataPerDimension2 = new NumericData[SPATIAL_TEMPORAL_DIMENSIONS.length]; dataPerDimension2[0] = new NumericRange(45, 50); dataPerDimension2[1] = new NumericRange(45, 50); dataPerDimension2[2] = new NumericRange(cal.getTimeInMillis(), calEnd.getTimeInMillis()); cal.set(Calendar.YEAR, year - 1); calEnd.set(Calendar.YEAR, year - 1); final NumericData[] dataPerDimension3 = new NumericData[SPATIAL_TEMPORAL_DIMENSIONS.length]; dataPerDimension3[0] = new NumericRange(45.1701, 45.1703); dataPerDimension3[1] = new NumericRange(50.1901, 50.1902); dataPerDimension3[2] = new NumericRange(cal.getTimeInMillis(), calEnd.getTimeInMillis()); MultiDimensionalNumericData indexedData = new BasicNumericDataset(dataPerDimension1); final NumericIndexStrategy strategy = TieredSFCIndexFactory.createEqualIntervalPrecisionTieredStrategy( SPATIAL_TEMPORAL_DIMENSIONS, new int[] {20, 20, 20}, SFCType.HILBERT, 4); final InsertionIds ids1 = strategy.getInsertionIds(indexedData); assertEquals(1, ids1.getCompositeInsertionIds().size()); assertEquals(10, ids1.getCompositeInsertionIds().get(0).length); // different bin bin indexedData = new BasicNumericDataset(dataPerDimension2); final InsertionIds ids2 = strategy.getInsertionIds(indexedData); assertEquals(1, ids2.getCompositeInsertionIds().size()); // different tier assertFalse( compare(ids1.getCompositeInsertionIds().get(0), ids2.getCompositeInsertionIds().get(0), 1)); // same time assertTrue( compare( ids1.getCompositeInsertionIds().get(0), ids2.getCompositeInsertionIds().get(0), 1, 5)); // different bin indexedData = new BasicNumericDataset(dataPerDimension3); final List ids3 = strategy.getInsertionIds(indexedData).getCompositeInsertionIds(); assertEquals(1, ids3.size()); assertFalse(compare(ids1.getCompositeInsertionIds().get(0), ids3.get(0), 1, 5)); } private boolean compare(final byte[] one, final byte[] two, final int start, final int stop) { return Arrays.equals( Arrays.copyOfRange(one, start, stop), Arrays.copyOfRange(two, start, stop)); } private boolean compare(final byte[] one, final byte[] two, final int length) { return Arrays.equals(Arrays.copyOf(one, length), Arrays.copyOf(two, length)); } } ================================================ FILE: core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/data/PersistenceEncodingTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.data; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.Date; import java.util.List; import java.util.Map; import java.util.TimeZone; import org.junit.Before; import org.junit.Test; import org.locationtech.geowave.core.geotime.adapter.SpatialFieldDescriptorBuilder; import org.locationtech.geowave.core.geotime.adapter.TemporalFieldDescriptorBuilder; import org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialTemporalOptions; import org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition; import org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition; import org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit; import org.locationtech.geowave.core.geotime.index.dimension.TimeDefinition; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.sfc.SFCFactory.SFCType; import org.locationtech.geowave.core.index.sfc.tiered.TieredSFCIndexFactory; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.adapter.FieldDescriptorBuilder; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.RowBuilder; import org.locationtech.geowave.core.store.base.BaseDataStoreUtils; import org.locationtech.geowave.core.store.index.CommonIndexModel; import org.locationtech.geowave.core.store.index.IndexImpl; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.PrecisionModel; public class PersistenceEncodingTest { private final GeometryFactory factory = new GeometryFactory(new PrecisionModel(PrecisionModel.FLOATING)); private static final NumericDimensionDefinition[] SPATIAL_TEMPORAL_DIMENSIONS = new NumericDimensionDefinition[] { new LongitudeDefinition(), new LatitudeDefinition(), new TimeDefinition(Unit.YEAR),}; private static final CommonIndexModel model = SpatialTemporalDimensionalityTypeProvider.createIndexFromOptions( new SpatialTemporalOptions()).getIndexModel(); private static final NumericIndexStrategy strategy = TieredSFCIndexFactory.createSingleTierStrategy( SPATIAL_TEMPORAL_DIMENSIONS, new int[] {16, 16, 16}, SFCType.HILBERT); private static final Index index = new IndexImpl(strategy, model); Date start = null, end = null; @Before public void setUp() throws ParseException { TimeZone.setDefault(TimeZone.getTimeZone("GMT")); final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.S"); start = dateFormat.parse("2012-04-03 13:30:23.304"); end = dateFormat.parse("2012-04-03 14:30:23.304"); } @Test public void testPoint() { final GeoObjDataAdapter adapter = new GeoObjDataAdapter(false); final AdapterToIndexMapping indexMapping = BaseDataStoreUtils.mapAdapterToIndex(adapter.asInternalAdapter((short) -1), index); final GeoObj entry = new GeoObj(factory.createPoint(new Coordinate(43.454, 28.232)), start, end, "g1"); final List ids = adapter.asInternalAdapter((short) -1).encode(entry, indexMapping, index).getInsertionIds( index).getCompositeInsertionIds(); assertEquals(1, ids.size()); } @Test public void testLine() { final GeoObjDataAdapter adapter = new GeoObjDataAdapter(false); final AdapterToIndexMapping indexMapping = BaseDataStoreUtils.mapAdapterToIndex(adapter.asInternalAdapter((short) -1), index); final GeoObj entry = new GeoObj( factory.createLineString( new Coordinate[] {new Coordinate(43.444, 28.232), new Coordinate(43.454, 28.242)}), start, end, "g1"); final List ids = adapter.asInternalAdapter((short) -1).encode(entry, indexMapping, index).getInsertionIds( index).getCompositeInsertionIds(); assertEquals(15, ids.size()); } @Test public void testLineWithPrecisionOnTheTileEdge() { final NumericIndexStrategy strategy = TieredSFCIndexFactory.createSingleTierStrategy( SPATIAL_TEMPORAL_DIMENSIONS, new int[] {14, 14, 14}, SFCType.HILBERT); final Index index = new IndexImpl(strategy, model); final GeoObjDataAdapter adapter = new GeoObjDataAdapter(false); final AdapterToIndexMapping indexMapping = BaseDataStoreUtils.mapAdapterToIndex(adapter.asInternalAdapter((short) -1), index); final GeoObj entry = new GeoObj( factory.createLineString( new Coordinate[] { new Coordinate(-99.22, 33.75000000000001), // notice // that // this gets // tiled as // 33.75 new Coordinate(-99.15, 33.75000000000001) // notice that this gets tiled as 33.75 }), new Date(352771200000l), new Date(352771200000l), "g1"); final List ids = adapter.asInternalAdapter((short) -1).encode(entry, indexMapping, index).getInsertionIds( index).getCompositeInsertionIds(); assertEquals(4, ids.size()); } @Test public void testPoly() { final GeoObjDataAdapter adapter = new GeoObjDataAdapter(false); final AdapterToIndexMapping indexMapping = BaseDataStoreUtils.mapAdapterToIndex(adapter.asInternalAdapter((short) -1), index); final GeoObj entry = new GeoObj( factory.createLineString( new Coordinate[] { new Coordinate(43.444, 28.232), new Coordinate(43.454, 28.242), new Coordinate(43.444, 28.252), new Coordinate(43.444, 28.232),}), start, end, "g1"); final List ids = adapter.asInternalAdapter((short) -1).encode(entry, indexMapping, index).getInsertionIds( index).getCompositeInsertionIds(); assertEquals(27, ids.size()); } @Test public void testPointRange() { final GeoObjDataAdapter adapter = new GeoObjDataAdapter(true); final AdapterToIndexMapping indexMapping = BaseDataStoreUtils.mapAdapterToIndex(adapter.asInternalAdapter((short) -1), index); final GeoObj entry = new GeoObj(factory.createPoint(new Coordinate(43.454, 28.232)), start, end, "g1"); final List ids = adapter.asInternalAdapter((short) -1).encode(entry, indexMapping, index).getInsertionIds( index).getCompositeInsertionIds(); assertEquals(8, ids.size()); } @Test public void testLineRnge() { final GeoObjDataAdapter adapter = new GeoObjDataAdapter(true); final AdapterToIndexMapping indexMapping = BaseDataStoreUtils.mapAdapterToIndex(adapter.asInternalAdapter((short) -1), index); final GeoObj entry = new GeoObj( factory.createLineString( new Coordinate[] {new Coordinate(43.444, 28.232), new Coordinate(43.454, 28.242)}), start, end, "g1"); final List ids = adapter.asInternalAdapter((short) -1).encode(entry, indexMapping, index).getInsertionIds( index).getCompositeInsertionIds(); assertTrue(ids.size() < 200); } private static final String GEOM = "myGeo"; private static final String ID = "myId"; private static final String START_TIME = "startTime"; private static final String END_TIME = "endTime"; private static final FieldDescriptor GEO_FIELD = new SpatialFieldDescriptorBuilder<>(Geometry.class).spatialIndexHint().fieldName( GEOM).build(); private static final FieldDescriptor ID_FIELD = new FieldDescriptorBuilder<>(String.class).fieldName(ID).build(); // Time fields for time instant tests private static final FieldDescriptor START_TIME_FIELD = new TemporalFieldDescriptorBuilder<>(Date.class).timeIndexHint().fieldName( START_TIME).build(); private static final FieldDescriptor END_TIME_FIELD = new TemporalFieldDescriptorBuilder<>(Date.class).fieldName(END_TIME).build(); // Time fields for time range tests private static final FieldDescriptor START_TIME_RANGE_FIELD = new TemporalFieldDescriptorBuilder<>(Date.class).startTimeIndexHint().fieldName( START_TIME).build(); private static final FieldDescriptor END_TIME_RANGE_FIELD = new TemporalFieldDescriptorBuilder<>(Date.class).endTimeIndexHint().fieldName( END_TIME).build(); private static final FieldDescriptor[] TIME_DESCRIPTORS = new FieldDescriptor[] {GEO_FIELD, ID_FIELD, START_TIME_FIELD, END_TIME_FIELD}; private static final FieldDescriptor[] TIME_RANGE_DESCRIPTORS = new FieldDescriptor[] {GEO_FIELD, ID_FIELD, START_TIME_RANGE_FIELD, END_TIME_RANGE_FIELD}; public static class GeoObjDataAdapter implements DataTypeAdapter { private boolean isTimeRange; public GeoObjDataAdapter() { this(false); } public GeoObjDataAdapter(final boolean isTimeRange) { super(); this.isTimeRange = isTimeRange; } @Override public String getTypeName() { return "geoobj"; } @Override public byte[] getDataId(final GeoObj entry) { return entry.id.getBytes(); } @Override public RowBuilder newRowBuilder(final FieldDescriptor[] outputFieldDescriptors) { return new RowBuilder() { private String id; private Geometry geom; private Date stime; private Date etime; @Override public void setField(final String id, final Object fieldValue) { if (id.equals(GEOM)) { geom = (Geometry) fieldValue; } else if (id.equals(ID)) { this.id = (String) fieldValue; } else if (id.equals(START_TIME)) { stime = (Date) fieldValue; } else { etime = (Date) fieldValue; } } @Override public void setFields(final Map values) { if (values.containsKey(GEOM)) { geom = (Geometry) values.get(GEOM); } if (values.containsKey(ID)) { id = (String) values.get(ID); } if (values.containsKey(START_TIME)) { stime = (Date) values.get(START_TIME); } if (values.containsKey(END_TIME)) { etime = (Date) values.get(END_TIME); } } @Override public GeoObj buildRow(final byte[] dataId) { return new GeoObj(geom, stime, etime, id); } }; } @Override public Object getFieldValue(final GeoObj entry, final String fieldName) { switch (fieldName) { case GEOM: return entry.geometry; case ID: return entry.id; case START_TIME: return entry.startTime; case END_TIME: return entry.endTime; } return null; } @Override public Class getDataClass() { return GeoObj.class; } @Override public byte[] toBinary() { return new byte[] {isTimeRange ? (byte) 1 : 0}; } @Override public void fromBinary(final byte[] bytes) { isTimeRange = bytes[0] == 1; } @Override public FieldDescriptor[] getFieldDescriptors() { return isTimeRange ? TIME_RANGE_DESCRIPTORS : TIME_DESCRIPTORS; } @Override public FieldDescriptor getFieldDescriptor(final String fieldName) { return Arrays.stream(isTimeRange ? TIME_RANGE_DESCRIPTORS : TIME_DESCRIPTORS).filter( field -> field.fieldName().equals(fieldName)).findFirst().orElse(null); } } private static class GeoObj { private final Geometry geometry; private final String id; private final Date startTime; private final Date endTime; public GeoObj( final Geometry geometry, final Date startTime, final Date endTime, final String id) { super(); this.geometry = geometry; this.startTime = startTime; this.endTime = endTime; this.id = id; } } } ================================================ FILE: core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/field/GeoTimeReaderWriterTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.field; import java.util.Arrays; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; import java.util.TimeZone; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.locationtech.geowave.core.store.data.field.FieldUtils; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; public class GeoTimeReaderWriterTest { private Geometry geometryExpected; private Geometry[] geometryArrayExpected; private Date dateExpected; private Date[] dateArrayExpected; private Calendar calendarExpected; private Calendar[] calendarArrayExpected; @Before public void init() { geometryExpected = new GeometryFactory().createPoint(new Coordinate(25, 32)); geometryArrayExpected = new Geometry[] { new GeometryFactory().createPoint(new Coordinate(25, 32)), new GeometryFactory().createPoint(new Coordinate(26, 33)), new GeometryFactory().createPoint(new Coordinate(27, 34)), new GeometryFactory().createPoint(new Coordinate(28, 35))}; dateExpected = new Date(); dateArrayExpected = new Date[] {new Date(), null, new Date(0), null}; calendarExpected = new GregorianCalendar(); calendarExpected.setTimeZone(TimeZone.getTimeZone("GMT")); final Calendar cal1 = new GregorianCalendar(); cal1.setTimeZone(TimeZone.getTimeZone("GMT")); final Calendar cal2 = new GregorianCalendar(); cal2.setTimeZone(TimeZone.getTimeZone("GMT")); calendarArrayExpected = new Calendar[] {cal1, null, cal2, null}; } @Test public void testGeoTimeReadWrite() { byte[] value; // test Geometry reader/writer value = FieldUtils.getDefaultWriterForClass(Geometry.class).writeField(geometryExpected); final Geometry geometryActual = FieldUtils.getDefaultReaderForClass(Geometry.class).readField(value); // TODO develop the "equals" test for Geometry Assert.assertEquals("FAILED test of Geometry reader/writer", geometryExpected, geometryActual); // test Geometry Array reader/writer value = FieldUtils.getDefaultWriterForClass(Geometry[].class).writeField(geometryArrayExpected); final Geometry[] geometryArrayActual = FieldUtils.getDefaultReaderForClass(Geometry[].class).readField(value); Assert.assertTrue( "FAILED test of String Array reader/writer", Arrays.deepEquals(geometryArrayExpected, geometryArrayActual)); // test Date reader/writer value = FieldUtils.getDefaultWriterForClass(Date.class).writeField(dateExpected); final Date dateActual = FieldUtils.getDefaultReaderForClass(Date.class).readField(value); Assert.assertEquals("FAILED test of Date reader/writer", dateExpected, dateActual); // test Date Array reader/writer value = FieldUtils.getDefaultWriterForClass(Date[].class).writeField(dateArrayExpected); final Date[] dateArrayActual = FieldUtils.getDefaultReaderForClass(Date[].class).readField(value); Assert.assertTrue( "FAILED test of Date Array reader/writer", Arrays.deepEquals(dateArrayExpected, dateArrayActual)); // test Calendar reader/writer value = FieldUtils.getDefaultWriterForClass(Calendar.class).writeField(calendarExpected); final Calendar calendarActual = FieldUtils.getDefaultReaderForClass(Calendar.class).readField(value); Assert.assertEquals("FAILED test of Calendar reader/writer", calendarExpected, calendarActual); // test Calendar Array reader/writer value = FieldUtils.getDefaultWriterForClass(Calendar[].class).writeField(calendarArrayExpected); final Calendar[] calendarArrayActual = FieldUtils.getDefaultReaderForClass(Calendar[].class).readField(value); Assert.assertTrue( "FAILED test of Calendar Array reader/writer", Arrays.deepEquals(calendarArrayExpected, calendarArrayActual)); } } ================================================ FILE: core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/query/BasicQueryTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query; import static org.junit.Assert.assertEquals; import java.text.ParseException; import java.text.SimpleDateFormat; import java.time.Instant; import java.util.Date; import org.junit.Test; import org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialTemporalOptions; import org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit; import org.locationtech.geowave.core.geotime.index.dimension.TimeDefinition; import org.locationtech.geowave.core.geotime.store.dimension.TimeField; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding; import org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset; import org.locationtech.geowave.core.store.data.PersistentDataset; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintData; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintSet; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintsByClass; import org.locationtech.geowave.core.store.query.filter.BasicQueryFilter.BasicQueryCompareOperation; import org.locationtech.geowave.core.store.query.filter.QueryFilter; import org.threeten.extra.Interval; public class BasicQueryTest { SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssz"); private CommonIndexedPersistenceEncoding createData(final Date start, final Date end) { final PersistentDataset commonData = new MultiFieldPersistentDataset<>(); commonData.addValue( new TimeField(Unit.YEAR).getFieldName(), Interval.of(Instant.ofEpochMilli(start.getTime()), Instant.ofEpochMilli(end.getTime()))); return new CommonIndexedPersistenceEncoding( (short) 1, StringUtils.stringToBinary("1"), StringUtils.stringToBinary("1"), StringUtils.stringToBinary("1"), 1, commonData, new MultiFieldPersistentDataset()); } public void performOp(final BasicQueryCompareOperation op, final boolean[] expectedResults) throws ParseException { // query time range final ConstraintData constrainData = new ConstraintData( new NumericRange( df.parse("2017-02-22T12:00:00GMT-00:00").getTime(), df.parse("2017-02-22T13:00:00GMT-00:00").getTime()), true); final ConstraintsByClass constaints = new ConstraintsByClass(new ConstraintSet(TimeDefinition.class, constrainData)); final BasicQueryByClass query = new BasicQueryByClass(constaints, op); final CommonIndexedPersistenceEncoding[] data = new CommonIndexedPersistenceEncoding[] { // same exact time range as the query createData( df.parse("2017-02-22T12:00:00GMT-00:00"), df.parse("2017-02-22T13:00:00GMT-00:00")), // partial overlap createData( df.parse("2017-02-22T11:00:00GMT-00:00"), df.parse("2017-02-22T12:30:00GMT-00:00")), // time range completely within the query createData( df.parse("2017-02-22T12:30:00GMT-00:00"), df.parse("2017-02-22T12:50:00GMT-00:00")), // time range touching each other createData( df.parse("2017-02-22T11:00:00GMT-00:00"), df.parse("2017-02-22T12:00:00GMT-00:00")), // no intersection between ranges createData( df.parse("2017-02-22T11:00:00GMT-00:00"), df.parse("2017-02-22T11:59:00GMT-00:00")), // time range contains complete query range createData( df.parse("2017-02-22T11:00:00GMT-00:00"), df.parse("2017-02-22T14:00:00GMT-00:00"))}; final Index index = SpatialTemporalDimensionalityTypeProvider.createIndexFromOptions( new SpatialTemporalOptions()); int pos = 0; for (final CommonIndexedPersistenceEncoding dataItem : data) { for (final QueryFilter filter : query.createFilters(index)) { assertEquals( "result: " + pos, expectedResults[pos++], filter.accept(index.getIndexModel(), dataItem)); } } } @Test public void testContains() throws ParseException { performOp( BasicQueryCompareOperation.CONTAINS, new boolean[] {true, false, true, false, false, false}); } @Test public void testOverlaps() throws ParseException { performOp( BasicQueryCompareOperation.OVERLAPS, new boolean[] {false, true, false, false, false, false}); } @Test public void testIntersects() throws ParseException { performOp( BasicQueryCompareOperation.INTERSECTS, new boolean[] {true, true, true, true, false, true}); } @Test public void testEquals() throws ParseException { performOp( BasicQueryCompareOperation.EQUALS, new boolean[] {true, false, false, false, false, false}); } @Test public void testDisjoint() throws ParseException { performOp( BasicQueryCompareOperation.DISJOINT, new boolean[] {false, false, false, false, true, false}); } @Test public void testWithin() throws ParseException { performOp( BasicQueryCompareOperation.WITHIN, new boolean[] {true, false, false, false, false, true}); } @Test public void testCrosses() throws ParseException { performOp( BasicQueryCompareOperation.CROSSES, new boolean[] {false, false, false, false, false, false}); } @Test public void testTouches() throws ParseException { performOp( BasicQueryCompareOperation.TOUCHES, new boolean[] {false, false, false, true, false, false}); } } ================================================ FILE: core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/query/SpatialQueryTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query; import static org.junit.Assert.assertEquals; import org.junit.Test; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.core.geotime.store.dimension.SpatialField; import org.locationtech.geowave.core.geotime.store.query.filter.SpatialQueryFilter.CompareOperation; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding; import org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset; import org.locationtech.geowave.core.store.data.PersistentDataset; import org.locationtech.geowave.core.store.query.filter.QueryFilter; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; public class SpatialQueryTest { @Test public void test() { final GeometryFactory factory = new GeometryFactory(); final ExplicitSpatialQuery query = new ExplicitSpatialQuery( factory.createPolygon( new Coordinate[] { new Coordinate(24, 33), new Coordinate(28, 33), new Coordinate(28, 31), new Coordinate(24, 31), new Coordinate(24, 33)})); final ExplicitSpatialQuery queryCopy = new ExplicitSpatialQuery(); queryCopy.fromBinary(query.toBinary()); assertEquals(queryCopy.getQueryGeometry(), query.getQueryGeometry()); } private IndexedPersistenceEncoding createData(final Geometry geomData) { final PersistentDataset commonData = new MultiFieldPersistentDataset<>(); commonData.addValue(SpatialField.DEFAULT_GEOMETRY_FIELD_NAME, geomData); return new IndexedPersistenceEncoding( (short) 1, StringUtils.stringToBinary("1"), StringUtils.stringToBinary("1"), StringUtils.stringToBinary("1"), 1, commonData, new MultiFieldPersistentDataset()); } public void performOp(final CompareOperation op, final boolean[] expectedResults) { final GeometryFactory factory = new GeometryFactory(); // query geometry for testing final Coordinate[] queryCoord = new Coordinate[] { new Coordinate(24, 33), new Coordinate(28, 33), new Coordinate(28, 37), new Coordinate(24, 37), new Coordinate(24, 33)}; // create spatial query object with geometric relationship operator final ExplicitSpatialQuery query = new ExplicitSpatialQuery(factory.createPolygon(queryCoord), op); final ExplicitSpatialQuery queryCopy = new ExplicitSpatialQuery(); queryCopy.fromBinary(query.toBinary()); // This line is crossing query polygon final Coordinate[] line1 = new Coordinate[] {new Coordinate(22, 32), new Coordinate(25, 36)}; // This line is completely within the query polygon final Coordinate[] line2 = new Coordinate[] {new Coordinate(25, 33.5), new Coordinate(26, 34)}; // This line is completely outside of the query polygon final Coordinate[] line3 = new Coordinate[] {new Coordinate(21, 33.5), new Coordinate(23, 34)}; // This line is touching one of the corner of the query polygon final Coordinate[] line4 = new Coordinate[] {new Coordinate(28, 33), new Coordinate(30, 34)}; // this polygon is completely contained within the query polygon final Coordinate[] smallPolygon = new Coordinate[] { new Coordinate(25, 34), new Coordinate(27, 34), new Coordinate(27, 36), new Coordinate(25, 36), new Coordinate(25, 34)}; // this polygon is same as query polygon final Coordinate[] dataPolygon = queryCoord.clone(); final IndexedPersistenceEncoding[] data = new IndexedPersistenceEncoding[] { createData(factory.createLineString(line1)), createData(factory.createLineString(line2)), createData(factory.createLineString(line3)), createData(factory.createLineString(line4)), createData(factory.createPolygon(smallPolygon)), createData(factory.createPolygon(dataPolygon))}; int pos = 0; final Index index = SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()); for (final IndexedPersistenceEncoding dataItem : data) { for (final QueryFilter filter : queryCopy.createFilters(index)) { assertEquals( "result: " + pos, expectedResults[pos++], filter.accept(index.getIndexModel(), dataItem)); } } } @Test public void testContains() { performOp(CompareOperation.CONTAINS, new boolean[] {false, true, false, false, true, true}); } @Test public void testOverlaps() { performOp(CompareOperation.OVERLAPS, new boolean[] {false, false, false, false, false, false}); } @Test public void testIntersects() { performOp(CompareOperation.INTERSECTS, new boolean[] {true, true, false, true, true, true}); } @Test public void testDisjoint() { performOp(CompareOperation.DISJOINT, new boolean[] {false, false, true, false, false, false}); } @Test public void testTouches() { performOp(CompareOperation.TOUCHES, new boolean[] {false, false, false, true, false, false}); } @Test public void testCrosses() { performOp(CompareOperation.CROSSES, new boolean[] {true, false, false, false, false, false}); } @Test public void testWithin() { performOp(CompareOperation.WITHIN, new boolean[] {false, false, false, false, false, true}); } @Test public void testEquals() { performOp(CompareOperation.EQUALS, new boolean[] {false, false, false, false, false, true}); } } ================================================ FILE: core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/query/SpatialTemporalQueryTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query; import static org.junit.Assert.assertEquals; import java.text.ParseException; import java.text.SimpleDateFormat; import java.time.Instant; import java.util.Date; import org.junit.Test; import org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialTemporalOptions; import org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit; import org.locationtech.geowave.core.geotime.store.dimension.SpatialField; import org.locationtech.geowave.core.geotime.store.dimension.TimeField; import org.locationtech.geowave.core.geotime.store.query.filter.SpatialQueryFilter.CompareOperation; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding; import org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset; import org.locationtech.geowave.core.store.data.PersistentDataset; import org.locationtech.geowave.core.store.query.filter.QueryFilter; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.GeometryFactory; import org.threeten.extra.Interval; public class SpatialTemporalQueryTest { SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssz"); @Test public void test() throws ParseException { final GeometryFactory factory = new GeometryFactory(); final ExplicitSpatialTemporalQuery query = new ExplicitSpatialTemporalQuery( df.parse("2005-05-17T19:32:56GMT-00:00"), df.parse("2005-05-17T22:32:56GMT-00:00"), factory.createPolygon( new Coordinate[] { new Coordinate(24, 33), new Coordinate(28, 33), new Coordinate(28, 31), new Coordinate(24, 31), new Coordinate(24, 33)})); final ExplicitSpatialTemporalQuery queryCopy = new ExplicitSpatialTemporalQuery(); queryCopy.fromBinary(query.toBinary()); assertEquals(queryCopy.getQueryGeometry(), query.getQueryGeometry()); } private CommonIndexedPersistenceEncoding createData( final Date start, final Date end, final Coordinate[] coordinates) { final GeometryFactory factory = new GeometryFactory(); final PersistentDataset commonData = new MultiFieldPersistentDataset<>(); commonData.addValue( SpatialField.DEFAULT_GEOMETRY_FIELD_NAME, factory.createLineString(coordinates)); commonData.addValue( new TimeField(Unit.YEAR).getFieldName(), Interval.of(Instant.ofEpochMilli(start.getTime()), Instant.ofEpochMilli(end.getTime()))); return new CommonIndexedPersistenceEncoding( (short) 1, StringUtils.stringToBinary("1"), StringUtils.stringToBinary("1"), StringUtils.stringToBinary("1"), 1, commonData, new MultiFieldPersistentDataset()); } public void performOp(final CompareOperation op, final boolean[] expectedResults) throws ParseException { final GeometryFactory factory = new GeometryFactory(); final ExplicitSpatialTemporalQuery query = new ExplicitSpatialTemporalQuery( df.parse("2005-05-17T19:32:56GMT-00:00"), df.parse("2005-05-17T22:32:56GMT-00:00"), factory.createPolygon( new Coordinate[] { new Coordinate(24, 33), new Coordinate(28, 33), new Coordinate(28, 37), new Coordinate(24, 37), new Coordinate(24, 33)}), op); final ExplicitSpatialQuery queryCopy = new ExplicitSpatialQuery(); queryCopy.fromBinary(query.toBinary()); final CommonIndexedPersistenceEncoding[] data = new CommonIndexedPersistenceEncoding[] { createData( df.parse("2005-05-17T19:32:56GMT-00:00"), df.parse("2005-05-17T22:32:56GMT-00:00"), new Coordinate[] {new Coordinate(25, 33.5), new Coordinate(26, 34)}), createData( df.parse("2005-05-17T17:32:56GMT-00:00"), df.parse("2005-05-17T21:32:56GMT-00:00"), new Coordinate[] {new Coordinate(25, 33.5), new Coordinate(26, 34)}), createData( df.parse("2005-05-17T19:33:56GMT-00:00"), df.parse("2005-05-17T20:32:56GMT-00:00"), new Coordinate[] {new Coordinate(25, 33.5), new Coordinate(26, 34)}), createData( df.parse("2005-05-17T16:32:56GMT-00:00"), df.parse("2005-05-17T21:32:56GMT-00:00"), new Coordinate[] {new Coordinate(25, 33.5), new Coordinate(26, 34)}), createData( df.parse("2005-05-17T22:33:56GMT-00:00"), df.parse("2005-05-17T22:34:56GMT-00:00"), new Coordinate[] {new Coordinate(25, 33.5), new Coordinate(26, 34)})}; final Index index = SpatialTemporalDimensionalityTypeProvider.createIndexFromOptions( new SpatialTemporalOptions()); int pos = 0; for (final CommonIndexedPersistenceEncoding dataItem : data) { for (final QueryFilter filter : queryCopy.createFilters(index)) { assertEquals( "result: " + pos, expectedResults[pos++], filter.accept(index.getIndexModel(), dataItem)); } } } @Test public void testContains() throws ParseException { performOp(CompareOperation.CONTAINS, new boolean[] {true, false, true, false, false}); } @Test public void testOverlaps() throws ParseException { performOp(CompareOperation.OVERLAPS, new boolean[] {false, false, false, false, false}); } @Test public void testIntersects() throws ParseException { performOp(CompareOperation.INTERSECTS, new boolean[] {true, true, true, true, false}); } } ================================================ FILE: core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/query/TemporalConstraintsTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query; import static org.junit.Assert.assertEquals; import java.util.Date; import org.junit.Test; public class TemporalConstraintsTest { @Test public void test() { final TemporalConstraints constraints = new TemporalConstraints(); constraints.add(new TemporalRange(new Date(1000), new Date(100002))); final byte[] b = constraints.toBinary(); final TemporalConstraints constraintsDup = new TemporalConstraints(); constraintsDup.fromBinary(b); assertEquals(constraints, constraintsDup); } } ================================================ FILE: core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/query/TemporalRangeTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.util.Date; import org.junit.Test; import org.locationtech.geowave.core.index.numeric.NumericRange; public class TemporalRangeTest { @Test public void test() { final TemporalRange range = new TemporalRange(new Date(100), new Date(1000)); assertFalse(range.isWithin(new Date(10))); assertFalse(range.isWithin(new Date(100000))); assertTrue(range.isWithin(new Date(800))); assertFalse(range.isWithin(new NumericRange(20, 99))); assertFalse(range.isWithin(new NumericRange(1001, 9900))); assertTrue(range.isWithin(new NumericRange(998, 9900))); assertTrue(range.isWithin(new NumericRange(20, 199))); assertTrue(range.isWithin(new NumericRange(150, 199))); assertTrue(check(new NumericRange(-1, 1), new NumericRange(-1, 1))); assertFalse(check(new NumericRange(9, 19), new NumericRange(20, 30))); assertTrue(check(new NumericRange(11, 21), new NumericRange(20, 30))); assertTrue(check(new NumericRange(20, 30), new NumericRange(20, 30))); assertFalse(check(new NumericRange(9, 19), new NumericRange(20, 30))); assertTrue(check(new NumericRange(11, 21), new NumericRange(20, 30))); assertTrue(check(new NumericRange(21, 29), new NumericRange(20, 30))); assertTrue(check(new NumericRange(20, 30), new NumericRange(21, 29))); assertTrue(check(new NumericRange(20, 30), new NumericRange(11, 21))); assertFalse(check(new NumericRange(20, 30), new NumericRange(9, 19))); assertTrue(check(new NumericRange(-3, -1), new NumericRange(-2, 0))); assertTrue(check(new NumericRange(-2, 0), new NumericRange(-3, -1))); assertFalse(check(new NumericRange(-3, 1), new NumericRange(2, 4))); assertTrue(check(new NumericRange(-3, 1), new NumericRange(-2, 0))); assertTrue(check(new NumericRange(-2, 0), new NumericRange(-3, 1))); assertTrue(check(new NumericRange(-2, 0), new NumericRange(-3, -1))); assertTrue(check(new NumericRange(-3, -1), new NumericRange(-2, 0))); assertTrue(check(new NumericRange(-2, 0), new NumericRange(-1, 1))); assertTrue(check(new NumericRange(-1, 3), new NumericRange(0, 2))); assertFalse(check(new NumericRange(-1, -0.5), new NumericRange(0, 2))); assertTrue(check(new NumericRange(0, 2), new NumericRange(-1, 3))); assertTrue(check(new NumericRange(0, 2), new NumericRange(-1, 3))); assertFalse(check(new NumericRange(-1, 2), new NumericRange(3, 4))); assertFalse(check(new NumericRange(-1, 2), new NumericRange(3, 6))); assertTrue(check(new NumericRange(-1, 2), new NumericRange(1, 4))); } public static boolean check(final NumericRange r1, final NumericRange r2) { final double t0 = r1.getMax() - r2.getMin(); final double t1 = r2.getMax() - r1.getMin(); return !(Math.abs(t0 - t1) > (t0 + t1)); } } ================================================ FILE: core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/query/aggregate/AbstractVectorAggregationTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.aggregate; import java.util.Date; import java.util.List; import org.locationtech.geowave.core.geotime.adapter.annotation.GeoWaveSpatialField; import org.locationtech.geowave.core.geotime.adapter.annotation.GeoWaveTemporalField; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.store.adapter.BasicDataTypeAdapter; import org.locationtech.geowave.core.store.adapter.annotation.GeoWaveDataType; import org.locationtech.geowave.core.store.adapter.annotation.GeoWaveField; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.query.aggregate.AbstractAggregationTest; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Point; import com.google.common.collect.Lists; public class AbstractVectorAggregationTest extends AbstractAggregationTest { protected static final String ID_COLUMN = "id"; protected static final String GEOMETRY_COLUMN = "geometry"; protected static final String TIMESTAMP_COLUMN = "timestamp"; protected static final String LATITUDE_COLUMN = "latitude"; protected static final String LONGITUDE_COLUMN = "longitude"; protected static final String VALUE_COLUMN = "value"; protected static final String ODDS_NULL_COLUMN = "oddsNull"; protected static final String ALL_NULL_COLUMN = "allNull"; protected DataTypeAdapter adapter = BasicDataTypeAdapter.newAdapter("testType", SpatialTestType.class, "id"); public static SpatialTestType createFeature( final int featureId, final int longitude, final int latitude) { return new SpatialTestType( String.valueOf(featureId), GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(longitude, latitude)), new Date(), latitude, longitude, featureId, featureId % 2 == 0 ? "NotNull" : null, null); } public static List generateFeatures() { final List features = Lists.newArrayList(); int featureId = 0; for (int longitude = -180; longitude <= 180; longitude += 1) { for (int latitude = -90; latitude <= 90; latitude += 1) { features.add(createFeature(featureId, longitude, latitude)); featureId++; } } return features; } @GeoWaveDataType protected static class SpatialTestType { @GeoWaveField private String id; @GeoWaveSpatialField private Point geometry; @GeoWaveTemporalField private Date timestamp; @GeoWaveField private double latitude; @GeoWaveField private double longitude; @GeoWaveField private long value; @GeoWaveField private String oddsNull; @GeoWaveField private String allNull; public SpatialTestType() {} public SpatialTestType( final String id, final Point geometry, final Date timestamp, final double latitude, final double longitude, final long value, final String oddsNull, final String allNull) { this.id = id; this.geometry = geometry; this.timestamp = timestamp; this.latitude = latitude; this.longitude = longitude; this.value = value; this.oddsNull = oddsNull; this.allNull = allNull; } } } ================================================ FILE: core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/query/aggregate/CompositeAggregationTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.aggregate; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import org.junit.Test; import org.locationtech.geowave.core.geotime.binning.SpatialBinningType; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.persist.PersistableList; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.query.aggregate.BinningAggregation; import org.locationtech.geowave.core.store.query.aggregate.BinningAggregationOptions; import org.locationtech.geowave.core.store.query.aggregate.CompositeAggregation; import org.locationtech.geowave.core.store.query.aggregate.FieldNameParam; import org.locationtech.geowave.core.store.query.aggregate.OptimalCountAggregation.FieldCountAggregation; public class CompositeAggregationTest extends AbstractVectorAggregationTest { @Test public void testCompositeAggregation() { final List features = generateFeatures(); final CompositeAggregation aggregation = new CompositeAggregation<>(); aggregation.add(new FieldCountAggregation<>(null)); aggregation.add(new FieldCountAggregation<>(new FieldNameParam(GEOMETRY_COLUMN))); aggregation.add(new FieldCountAggregation<>(new FieldNameParam(ALL_NULL_COLUMN))); aggregation.add(new FieldCountAggregation<>(new FieldNameParam(ODDS_NULL_COLUMN))); final List result = aggregateObjects(adapter, aggregation, features); assertEquals(4, result.size()); assertTrue(result.get(0) instanceof Long); assertEquals(Long.valueOf(features.size()), result.get(0)); assertTrue(result.get(1) instanceof Long); assertEquals(Long.valueOf(features.size()), result.get(1)); assertTrue(result.get(2) instanceof Long); assertEquals(Long.valueOf(0L), result.get(2)); assertTrue(result.get(3) instanceof Long); assertEquals(Long.valueOf((features.size() / 2) + 1), result.get(3)); } @Test public void testCompositeAggregationWithBinning() { final List features = generateFeatures(); final CompositeAggregation compositeAggregation = new CompositeAggregation<>(); compositeAggregation.add(new FieldCountAggregation<>(null)); compositeAggregation.add(new FieldCountAggregation<>(new FieldNameParam(GEOMETRY_COLUMN))); compositeAggregation.add(new FieldCountAggregation<>(new FieldNameParam(ALL_NULL_COLUMN))); compositeAggregation.add(new FieldCountAggregation<>(new FieldNameParam(ODDS_NULL_COLUMN))); final Aggregation, Map>, SpatialTestType> compositeBinningAggregation = new BinningAggregation<>( compositeAggregation, new SpatialFieldBinningStrategy<>(SpatialBinningType.S2, 4, true, GEOMETRY_COLUMN), -1); final Aggregation, Map, SpatialTestType> simpleBinningAggregation = new BinningAggregation<>( new FieldCountAggregation<>(new FieldNameParam(GEOMETRY_COLUMN)), new SpatialFieldBinningStrategy<>(SpatialBinningType.S2, 4, true, GEOMETRY_COLUMN), -1); final Map> compositeBinningResult = aggregateObjects(adapter, compositeBinningAggregation, features); final Map simpleBinningResult = aggregateObjects(adapter, simpleBinningAggregation, features); final List compositeResult = aggregateObjects(adapter, compositeAggregation, features); // first make sure each key for simple binning match the count of the corresponding composite // binning field assertEquals(simpleBinningResult.size(), compositeBinningResult.size()); List aggregateBinningResult = null; for (final Entry> obj : compositeBinningResult.entrySet()) { final Long simpleResult = simpleBinningResult.get(obj.getKey()); assertEquals(simpleResult, obj.getValue().get(1)); if (aggregateBinningResult == null) { aggregateBinningResult = new ArrayList<>(obj.getValue()); } else { aggregateBinningResult = compositeAggregation.merge(aggregateBinningResult, obj.getValue()); } } // then make sure that aggregating the keys on the composite binning matches the non-binning // result assertEquals(compositeResult, aggregateBinningResult); } } ================================================ FILE: core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/query/aggregate/GeohashBinningStrategyTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.aggregate; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.not; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; import java.util.UUID; import org.geotools.data.DataUtilities; import org.geotools.feature.SchemaException; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.junit.Test; import org.locationtech.geowave.core.geotime.binning.SpatialBinningType; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.geotime.store.dimension.SpatialField; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding; import org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset; import org.locationtech.geowave.core.store.data.PersistentDataset; import org.locationtech.geowave.core.store.data.SingleFieldPersistentDataset; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.Point; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import com.github.davidmoten.geo.GeoHash; public class GeohashBinningStrategyTest { private static final GeometryFactory geoFactory = new GeometryFactory(); private static final SimpleFeatureType schema; static { try { schema = DataUtilities.createType("testGeo", "location:Point:srid=4326,name:String"); } catch (final SchemaException e) { throw new RuntimeException(e); } } private static SimpleFeature createSimpleFeature(final Coordinate c) { final String name = UUID.randomUUID().toString(); return SimpleFeatureBuilder.build( GeohashBinningStrategyTest.schema, new Object[] {geoFactory.createPoint(c), name}, name); } private static CommonIndexedPersistenceEncoding createCommonIndexData( final Coordinate coordinate) { final PersistentDataset commonData = new SingleFieldPersistentDataset<>(); commonData.addValue( SpatialField.DEFAULT_GEOMETRY_FIELD_NAME, geoFactory.createPoint(coordinate)); return new CommonIndexedPersistenceEncoding( (short) 1, StringUtils.stringToBinary("1"), StringUtils.stringToBinary("1"), StringUtils.stringToBinary("1"), 1, commonData, new MultiFieldPersistentDataset<>()); } @Test public void testPrecisionConstructor() { for (int i = 0; i < 100; i++) { assertThat( new SpatialSimpleFeatureBinningStrategy( SpatialBinningType.GEOHASH, i, true).getPrecision(), is(i)); assertThat( new SpatialCommonIndexedBinningStrategy( SpatialBinningType.GEOHASH, i, true).getPrecision(), is(i)); } } @Test public void testNoGeometry() throws SchemaException { final SimpleFeatureType noGeoType = DataUtilities.createType("testNoGeo", "name:String"); final SimpleFeature noGeoFeature = SimpleFeatureBuilder.build(noGeoType, new Object[] {"NAME!"}, "NAME!"); final SpatialBinningStrategy sfStrat = new SpatialSimpleFeatureBinningStrategy(SpatialBinningType.GEOHASH, 4, true); // If the feature does not have a geometry, null is returned by binEntry. ByteArray[] bin = sfStrat.getBins(null, noGeoFeature); assertNull(bin); final SpatialBinningStrategy ciStrat = new SpatialCommonIndexedBinningStrategy( SpatialBinningType.GEOHASH, 4, true, "NotTheGeoField"); // we are looking in the wrong field for the geometry type here, so therefore no Geometry will // be found. bin = ciStrat.getBins(null, createCommonIndexData(new Coordinate(1, 1, 1))); assertNull(bin); } @Test public void testEncodeToGeohash() { final Coordinate coord = new Coordinate(49.619, -5.821); final Point point = geoFactory.createPoint(coord); // calculated this beforehand. final String hash = "mngqch76nwb"; for (int i = 1; i < hash.length(); i++) { assertThat(hash.substring(0, i), is(GeoHash.encodeHash(point.getY(), point.getX(), i))); } } @Test public void testBinSimpleFeature() { SimpleFeature feature1 = createSimpleFeature(new Coordinate(0, 0)); // same coord, but different name, make sure it still works in this simple case SimpleFeature feature2 = createSimpleFeature(new Coordinate(40, 40)); final SimpleFeature feature3 = createSimpleFeature(new Coordinate(40, 40)); SpatialBinningStrategy strat = new SpatialSimpleFeatureBinningStrategy(SpatialBinningType.GEOHASH, 4, true); ByteArray bin1 = strat.getBins(null, feature1)[0]; ByteArray bin2 = strat.getBins(null, feature2)[0]; final ByteArray bin3 = strat.getBins(null, feature3)[0]; assertThat(bin1, is(not(bin2))); assertThat(bin2, is(bin3)); strat = new SpatialSimpleFeatureBinningStrategy(SpatialBinningType.GEOHASH, 1, true); feature1 = createSimpleFeature(new Coordinate(0, 0)); feature2 = createSimpleFeature(new Coordinate(0.01, 0.01)); bin1 = strat.getBins(null, feature1)[0]; bin2 = strat.getBins(null, feature2)[0]; // even though they are different coords, they are binned together due to precision. assertThat(bin1, is(bin2)); } @Test public void testBinCommonIndexModel() { SpatialBinningStrategy strat = new SpatialCommonIndexedBinningStrategy(SpatialBinningType.GEOHASH, 4, true); CommonIndexedPersistenceEncoding data1 = createCommonIndexData(new Coordinate(0, 0)); CommonIndexedPersistenceEncoding data2 = createCommonIndexData(new Coordinate(40, 40)); ByteArray bin1 = strat.getBins(null, data1)[0]; ByteArray bin2 = strat.getBins(null, data2)[0]; assertThat(bin1, is(not(bin2))); strat = new SpatialCommonIndexedBinningStrategy(SpatialBinningType.GEOHASH, 1, true); data1 = createCommonIndexData(new Coordinate(0, 0)); data2 = createCommonIndexData(new Coordinate(0.01, 0.01)); bin1 = strat.getBins(null, data1)[0]; bin2 = strat.getBins(null, data2)[0]; // even though they are different coords, they are binned together. assertThat(bin1, is(bin2)); } @Test public void testSerialize() { SpatialBinningStrategy strat = new SpatialSimpleFeatureBinningStrategy(); byte[] stratBytes = PersistenceUtils.toBinary(strat); SpatialBinningStrategy roundtrip = (SpatialSimpleFeatureBinningStrategy) PersistenceUtils.fromBinary(stratBytes); assertThat(strat.getPrecision(), is(roundtrip.getPrecision())); strat = new SpatialCommonIndexedBinningStrategy(); stratBytes = PersistenceUtils.toBinary(strat); roundtrip = (SpatialCommonIndexedBinningStrategy) PersistenceUtils.fromBinary(stratBytes); assertThat(strat.getPrecision(), is(roundtrip.getPrecision())); } } ================================================ FILE: core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/query/aggregate/VectorBoundingBoxAggregationTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.aggregate; import static org.junit.Assert.assertEquals; import java.util.List; import org.junit.Test; import org.locationtech.geowave.core.store.query.aggregate.FieldNameParam; import org.locationtech.jts.geom.Envelope; public class VectorBoundingBoxAggregationTest extends AbstractVectorAggregationTest { @Test public void testVectorCountAggregation() { final List features = generateFeatures(); VectorBoundingBoxAggregation aggregation = new VectorBoundingBoxAggregation<>(null); final Envelope expected = new Envelope(-180, 180, -90, 90); Envelope result = aggregateObjects(adapter, aggregation, features); assertEquals(expected, result); aggregation = new VectorBoundingBoxAggregation<>(new FieldNameParam(GEOMETRY_COLUMN)); result = aggregateObjects(adapter, aggregation, features); assertEquals(expected, result); } } ================================================ FILE: core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/query/aggregate/VectorCountAggregationTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.aggregate; import static org.junit.Assert.assertEquals; import java.util.List; import org.junit.Test; import org.locationtech.geowave.core.store.query.aggregate.FieldNameParam; import org.locationtech.geowave.core.store.query.aggregate.OptimalCountAggregation.FieldCountAggregation; public class VectorCountAggregationTest extends AbstractVectorAggregationTest { @Test public void testVectorCountAggregation() { final List features = generateFeatures(); FieldCountAggregation aggregation = new FieldCountAggregation<>(null); Long result = aggregateObjects(adapter, aggregation, features); assertEquals(Long.valueOf(features.size()), result); aggregation = new FieldCountAggregation<>(new FieldNameParam(GEOMETRY_COLUMN)); result = aggregateObjects(adapter, aggregation, features); assertEquals(Long.valueOf(features.size()), result); aggregation = new FieldCountAggregation<>(new FieldNameParam(ALL_NULL_COLUMN)); result = aggregateObjects(adapter, aggregation, features); assertEquals(Long.valueOf(0L), result); aggregation = new FieldCountAggregation<>(new FieldNameParam(ODDS_NULL_COLUMN)); result = aggregateObjects(adapter, aggregation, features); assertEquals(Long.valueOf((features.size() / 2) + 1), result); } } ================================================ FILE: core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/query/aggregate/VectorMathAggregationTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.aggregate; import static org.junit.Assert.assertEquals; import java.math.BigDecimal; import java.util.List; import org.junit.Test; import org.locationtech.geowave.core.store.query.aggregate.FieldMaxAggregation; import org.locationtech.geowave.core.store.query.aggregate.FieldMinAggregation; import org.locationtech.geowave.core.store.query.aggregate.FieldNameParam; import org.locationtech.geowave.core.store.query.aggregate.FieldSumAggregation; public class VectorMathAggregationTest extends AbstractVectorAggregationTest { @Test public void testVectorMaxAggregation() { final List features = generateFeatures(); FieldMaxAggregation aggregation = new FieldMaxAggregation<>(new FieldNameParam(LATITUDE_COLUMN)); BigDecimal result = aggregateObjects(adapter, aggregation, features); assertEquals(90L, result.longValue()); aggregation = new FieldMaxAggregation<>(new FieldNameParam(LONGITUDE_COLUMN)); result = aggregateObjects(adapter, aggregation, features); assertEquals(180L, result.longValue()); } @Test public void testVectorMinAggregation() { final List features = generateFeatures(); FieldMinAggregation aggregation = new FieldMinAggregation<>(new FieldNameParam(LATITUDE_COLUMN)); BigDecimal result = aggregateObjects(adapter, aggregation, features); assertEquals(-90L, result.longValue()); aggregation = new FieldMinAggregation<>(new FieldNameParam(LONGITUDE_COLUMN)); result = aggregateObjects(adapter, aggregation, features); assertEquals(-180L, result.longValue()); } @Test public void testVectorSumAggregation() { final List features = generateFeatures(); FieldSumAggregation aggregation = new FieldSumAggregation<>(new FieldNameParam(LATITUDE_COLUMN)); BigDecimal result = aggregateObjects(adapter, aggregation, features); assertEquals(0, result.longValue()); aggregation = new FieldSumAggregation<>(new FieldNameParam(LONGITUDE_COLUMN)); result = aggregateObjects(adapter, aggregation, features); assertEquals(0, result.longValue()); aggregation = new FieldSumAggregation<>(new FieldNameParam(VALUE_COLUMN)); result = aggregateObjects(adapter, aggregation, features); assertEquals(features.size() * (features.size() / 2), result.longValue()); } } ================================================ FILE: core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/CQLToGeoWaveFilterTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.text.ParseException; import java.time.Instant; import org.geotools.filter.text.cql2.CQLException; import org.geotools.filter.text.ecql.ECQL; import org.junit.Test; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.BBox; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Crosses; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Disjoint; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Intersects; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Overlaps; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialContains; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialEqualTo; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialFieldValue; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialLiteral; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Touches; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.UnpreparedFilterGeometry; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Within; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.After; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.Before; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.BeforeOrDuring; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.During; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.DuringOrAfter; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalBetween; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalFieldValue; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalLiteral; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.base.BaseDataStoreUtils; import org.locationtech.geowave.core.store.index.AttributeDimensionalityTypeProvider; import org.locationtech.geowave.core.store.query.filter.expression.And; import org.locationtech.geowave.core.store.query.filter.expression.BooleanFieldValue; import org.locationtech.geowave.core.store.query.filter.expression.BooleanLiteral; import org.locationtech.geowave.core.store.query.filter.expression.ComparisonOperator.CompareOp; import org.locationtech.geowave.core.store.query.filter.expression.Exclude; import org.locationtech.geowave.core.store.query.filter.expression.Filter; import org.locationtech.geowave.core.store.query.filter.expression.FilterConstraints; import org.locationtech.geowave.core.store.query.filter.expression.GenericEqualTo; import org.locationtech.geowave.core.store.query.filter.expression.Include; import org.locationtech.geowave.core.store.query.filter.expression.IndexFieldConstraints; import org.locationtech.geowave.core.store.query.filter.expression.IndexFieldConstraints.DimensionConstraints; import org.locationtech.geowave.core.store.query.filter.expression.IsNull; import org.locationtech.geowave.core.store.query.filter.expression.Not; import org.locationtech.geowave.core.store.query.filter.expression.Or; import org.locationtech.geowave.core.store.query.filter.expression.numeric.Add; import org.locationtech.geowave.core.store.query.filter.expression.numeric.Divide; import org.locationtech.geowave.core.store.query.filter.expression.numeric.Multiply; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericBetween; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericComparisonOperator; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldValue; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericLiteral; import org.locationtech.geowave.core.store.query.filter.expression.numeric.Subtract; import org.locationtech.geowave.core.store.query.filter.expression.text.Contains; import org.locationtech.geowave.core.store.query.filter.expression.text.EndsWith; import org.locationtech.geowave.core.store.query.filter.expression.text.StartsWith; import org.locationtech.geowave.core.store.query.filter.expression.text.TextComparisonOperator; import org.locationtech.geowave.core.store.query.filter.expression.text.TextFieldValue; import org.locationtech.geowave.core.store.query.filter.expression.text.TextLiteral; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; import org.threeten.extra.Interval; import com.google.common.collect.Sets; public class CQLToGeoWaveFilterTest { private static final double EPSILON = 0.0000001; private static DataTypeAdapter adapter = new SpatialTemporalFilterExpressionTest.TestTypeBasicDataAdapter("test"); @Test public void testCQLtoGeoWaveFilter() throws CQLException, ParseException { Filter f = fromCQL("EMPLOYED < 15000000"); assertTrue(f instanceof NumericComparisonOperator); assertEquals(CompareOp.LESS_THAN, ((NumericComparisonOperator) f).getCompareOp()); assertTrue(((NumericComparisonOperator) f).getExpression1() instanceof NumericFieldValue); assertEquals( "EMPLOYED", ((NumericFieldValue) ((NumericComparisonOperator) f).getExpression1()).getFieldName()); assertTrue(((NumericComparisonOperator) f).getExpression2() instanceof NumericLiteral); assertEquals( 15000000L, ((NumericLiteral) ((NumericComparisonOperator) f).getExpression2()).getValue().longValue()); f = fromCQL("EMPLOYED BETWEEN 1000000 AND 3000000"); assertTrue(f instanceof NumericBetween); assertTrue(((NumericBetween) f).getValue() instanceof NumericFieldValue); assertEquals("EMPLOYED", ((NumericFieldValue) ((NumericBetween) f).getValue()).getFieldName()); assertTrue(((NumericBetween) f).getLowerBound() instanceof NumericLiteral); assertEquals( 1000000L, ((NumericLiteral) ((NumericBetween) f).getLowerBound()).getValue().longValue()); assertTrue(((NumericBetween) f).getUpperBound() instanceof NumericLiteral); assertEquals( 3000000L, ((NumericLiteral) ((NumericBetween) f).getUpperBound()).getValue().longValue()); f = fromCQL("name = 'California'"); assertTrue(f instanceof TextComparisonOperator); assertEquals(CompareOp.EQUAL_TO, ((TextComparisonOperator) f).getCompareOp()); assertTrue(((TextComparisonOperator) f).getExpression1() instanceof TextFieldValue); assertEquals( "name", ((TextFieldValue) ((TextComparisonOperator) f).getExpression1()).getFieldName()); assertTrue(((TextComparisonOperator) f).getExpression2() instanceof TextLiteral); assertEquals( "California", ((TextLiteral) ((TextComparisonOperator) f).getExpression2()).getValue()); f = fromCQL("UNEMPLOY / (EMPLOYED + UNEMPLOY) > 0.07"); assertTrue(f instanceof NumericComparisonOperator); assertEquals(CompareOp.GREATER_THAN, ((NumericComparisonOperator) f).getCompareOp()); assertTrue(((NumericComparisonOperator) f).getExpression1() instanceof Divide); Divide divide = (Divide) ((NumericComparisonOperator) f).getExpression1(); assertTrue(divide.getExpression1() instanceof NumericFieldValue); assertEquals("UNEMPLOY", ((NumericFieldValue) divide.getExpression1()).getFieldName()); assertTrue(divide.getExpression2() instanceof Add); Add add = (Add) divide.getExpression2(); assertTrue(add.getExpression1() instanceof NumericFieldValue); assertEquals("EMPLOYED", ((NumericFieldValue) add.getExpression1()).getFieldName()); assertTrue(add.getExpression2() instanceof NumericFieldValue); assertEquals("UNEMPLOY", ((NumericFieldValue) add.getExpression2()).getFieldName()); assertTrue(((NumericComparisonOperator) f).getExpression2() instanceof NumericLiteral); assertEquals( 0.07, ((NumericLiteral) ((NumericComparisonOperator) f).getExpression2()).getValue(), EPSILON); f = fromCQL("A <> B AND B <= 8.1"); assertTrue(f instanceof And); assertTrue(((And) f).getChildren().length == 2); assertTrue(((And) f).getChildren()[0] instanceof Not); assertTrue(((Not) ((And) f).getChildren()[0]).getFilter() instanceof NumericComparisonOperator); NumericComparisonOperator equalTo = (NumericComparisonOperator) ((Not) ((And) f).getChildren()[0]).getFilter(); assertEquals(CompareOp.EQUAL_TO, equalTo.getCompareOp()); assertTrue(equalTo.getExpression1() instanceof NumericFieldValue); assertEquals("A", ((NumericFieldValue) equalTo.getExpression1()).getFieldName()); assertTrue(equalTo.getExpression2() instanceof NumericFieldValue); assertEquals("B", ((NumericFieldValue) equalTo.getExpression2()).getFieldName()); assertTrue(((And) f).getChildren()[1] instanceof NumericComparisonOperator); NumericComparisonOperator lessThan = (NumericComparisonOperator) ((And) f).getChildren()[1]; assertEquals(CompareOp.LESS_THAN_OR_EQUAL, lessThan.getCompareOp()); assertTrue(lessThan.getExpression1() instanceof NumericFieldValue); assertEquals("B", ((NumericFieldValue) lessThan.getExpression1()).getFieldName()); assertTrue(lessThan.getExpression2() instanceof NumericLiteral); assertEquals(8.1, ((NumericLiteral) lessThan.getExpression2()).getValue(), EPSILON); // Order of operations should be preserved f = fromCQL("A + B - (C * D) / 8.5 >= E"); assertTrue(f instanceof NumericComparisonOperator); assertEquals(CompareOp.GREATER_THAN_OR_EQUAL, ((NumericComparisonOperator) f).getCompareOp()); assertTrue(((NumericComparisonOperator) f).getExpression1() instanceof Subtract); Subtract subtract = (Subtract) ((NumericComparisonOperator) f).getExpression1(); assertTrue(subtract.getExpression1() instanceof Add); add = (Add) subtract.getExpression1(); assertTrue(add.getExpression1() instanceof NumericFieldValue); assertEquals("A", ((NumericFieldValue) add.getExpression1()).getFieldName()); assertTrue(add.getExpression2() instanceof NumericFieldValue); assertEquals("B", ((NumericFieldValue) add.getExpression2()).getFieldName()); assertTrue(subtract.getExpression2() instanceof Divide); divide = (Divide) subtract.getExpression2(); assertTrue(divide.getExpression1() instanceof Multiply); Multiply multiply = (Multiply) divide.getExpression1(); assertTrue(multiply.getExpression1() instanceof NumericFieldValue); assertEquals("C", ((NumericFieldValue) multiply.getExpression1()).getFieldName()); assertTrue(multiply.getExpression2() instanceof NumericFieldValue); assertEquals("D", ((NumericFieldValue) multiply.getExpression2()).getFieldName()); assertTrue(divide.getExpression2() instanceof NumericLiteral); assertEquals(8.5, ((NumericLiteral) divide.getExpression2()).getValue(), EPSILON); assertTrue(((NumericComparisonOperator) f).getExpression2() instanceof NumericFieldValue); assertEquals( "E", ((NumericFieldValue) ((NumericComparisonOperator) f).getExpression2()).getFieldName()); f = fromCQL("BBOX(geom, -90, 40, -60, 45)"); assertTrue(f instanceof BBox); assertTrue(((BBox) f).getExpression1() instanceof SpatialFieldValue); assertEquals("geom", ((SpatialFieldValue) ((BBox) f).getExpression1()).getFieldName()); assertTrue(((BBox) f).getExpression2() instanceof SpatialLiteral); SpatialLiteral spatialLit = (SpatialLiteral) ((BBox) f).getExpression2(); assertTrue(spatialLit.getValue() instanceof UnpreparedFilterGeometry); Geometry geom = ((UnpreparedFilterGeometry) spatialLit.getValue()).getGeometry(); assertTrue( geom.equalsTopo(GeometryUtils.GEOMETRY_FACTORY.toGeometry(new Envelope(-90, -60, 40, 45)))); f = fromCQL("DISJOINT(geom, POLYGON((-90 40, -90 45, -60 45, -60 40, -90 40)))"); assertTrue(f instanceof Disjoint); assertTrue(((Disjoint) f).getExpression1() instanceof SpatialFieldValue); assertEquals("geom", ((SpatialFieldValue) ((Disjoint) f).getExpression1()).getFieldName()); assertTrue(((Disjoint) f).getExpression2() instanceof SpatialLiteral); spatialLit = (SpatialLiteral) ((Disjoint) f).getExpression2(); assertTrue(spatialLit.getValue() instanceof UnpreparedFilterGeometry); geom = ((UnpreparedFilterGeometry) spatialLit.getValue()).getGeometry(); assertTrue( geom.equalsTopo( GeometryUtils.GEOMETRY_FACTORY.createPolygon( new Coordinate[] { new Coordinate(-90, 40), new Coordinate(-90, 45), new Coordinate(-60, 45), new Coordinate(-60, 40), new Coordinate(-90, 40)}))); f = fromCQL("EQUALS(geom, POLYGON((-90 40, -90 45, -60 45, -60 40, -90 40)))"); assertTrue(f instanceof SpatialEqualTo); assertTrue(((SpatialEqualTo) f).getExpression1() instanceof SpatialFieldValue); assertEquals( "geom", ((SpatialFieldValue) ((SpatialEqualTo) f).getExpression1()).getFieldName()); assertTrue(((SpatialEqualTo) f).getExpression2() instanceof SpatialLiteral); spatialLit = (SpatialLiteral) ((SpatialEqualTo) f).getExpression2(); assertTrue(spatialLit.getValue() instanceof UnpreparedFilterGeometry); geom = ((UnpreparedFilterGeometry) spatialLit.getValue()).getGeometry(); assertTrue( geom.equalsTopo( GeometryUtils.GEOMETRY_FACTORY.createPolygon( new Coordinate[] { new Coordinate(-90, 40), new Coordinate(-90, 45), new Coordinate(-60, 45), new Coordinate(-60, 40), new Coordinate(-90, 40)}))); f = fromCQL("CONTAINS(geom, POLYGON((-90 40, -90 45, -60 45, -60 40, -90 40)))"); assertTrue(f instanceof SpatialContains); assertTrue(((SpatialContains) f).getExpression1() instanceof SpatialFieldValue); assertEquals( "geom", ((SpatialFieldValue) ((SpatialContains) f).getExpression1()).getFieldName()); assertTrue(((SpatialContains) f).getExpression2() instanceof SpatialLiteral); spatialLit = (SpatialLiteral) ((SpatialContains) f).getExpression2(); assertTrue(spatialLit.getValue() instanceof UnpreparedFilterGeometry); geom = ((UnpreparedFilterGeometry) spatialLit.getValue()).getGeometry(); assertTrue( geom.equalsTopo( GeometryUtils.GEOMETRY_FACTORY.createPolygon( new Coordinate[] { new Coordinate(-90, 40), new Coordinate(-90, 45), new Coordinate(-60, 45), new Coordinate(-60, 40), new Coordinate(-90, 40)}))); f = fromCQL("CROSSES(geom, POLYGON((-90 40, -90 45, -60 45, -60 40, -90 40)))"); assertTrue(f instanceof Crosses); assertTrue(((Crosses) f).getExpression1() instanceof SpatialFieldValue); assertEquals("geom", ((SpatialFieldValue) ((Crosses) f).getExpression1()).getFieldName()); assertTrue(((Crosses) f).getExpression2() instanceof SpatialLiteral); spatialLit = (SpatialLiteral) ((Crosses) f).getExpression2(); assertTrue(spatialLit.getValue() instanceof UnpreparedFilterGeometry); geom = ((UnpreparedFilterGeometry) spatialLit.getValue()).getGeometry(); assertTrue( geom.equalsTopo( GeometryUtils.GEOMETRY_FACTORY.createPolygon( new Coordinate[] { new Coordinate(-90, 40), new Coordinate(-90, 45), new Coordinate(-60, 45), new Coordinate(-60, 40), new Coordinate(-90, 40)}))); f = fromCQL("INTERSECTS(geom, POLYGON((-90 40, -90 45, -60 45, -60 40, -90 40)))"); assertTrue(f instanceof Intersects); assertTrue(((Intersects) f).getExpression1() instanceof SpatialFieldValue); assertEquals("geom", ((SpatialFieldValue) ((Intersects) f).getExpression1()).getFieldName()); assertTrue(((Intersects) f).getExpression2() instanceof SpatialLiteral); spatialLit = (SpatialLiteral) ((Intersects) f).getExpression2(); assertTrue(spatialLit.getValue() instanceof UnpreparedFilterGeometry); geom = ((UnpreparedFilterGeometry) spatialLit.getValue()).getGeometry(); assertTrue( geom.equalsTopo( GeometryUtils.GEOMETRY_FACTORY.createPolygon( new Coordinate[] { new Coordinate(-90, 40), new Coordinate(-90, 45), new Coordinate(-60, 45), new Coordinate(-60, 40), new Coordinate(-90, 40)}))); f = fromCQL("OVERLAPS(geom, POLYGON((-90 40, -90 45, -60 45, -60 40, -90 40)))"); assertTrue(f instanceof Overlaps); assertTrue(((Overlaps) f).getExpression1() instanceof SpatialFieldValue); assertEquals("geom", ((SpatialFieldValue) ((Overlaps) f).getExpression1()).getFieldName()); assertTrue(((Overlaps) f).getExpression2() instanceof SpatialLiteral); spatialLit = (SpatialLiteral) ((Overlaps) f).getExpression2(); assertTrue(spatialLit.getValue() instanceof UnpreparedFilterGeometry); geom = ((UnpreparedFilterGeometry) spatialLit.getValue()).getGeometry(); assertTrue( geom.equalsTopo( GeometryUtils.GEOMETRY_FACTORY.createPolygon( new Coordinate[] { new Coordinate(-90, 40), new Coordinate(-90, 45), new Coordinate(-60, 45), new Coordinate(-60, 40), new Coordinate(-90, 40)}))); f = fromCQL("TOUCHES(geom, POLYGON((-90 40, -90 45, -60 45, -60 40, -90 40)))"); assertTrue(f instanceof Touches); assertTrue(((Touches) f).getExpression1() instanceof SpatialFieldValue); assertEquals("geom", ((SpatialFieldValue) ((Touches) f).getExpression1()).getFieldName()); assertTrue(((Touches) f).getExpression2() instanceof SpatialLiteral); spatialLit = (SpatialLiteral) ((Touches) f).getExpression2(); assertTrue(spatialLit.getValue() instanceof UnpreparedFilterGeometry); geom = ((UnpreparedFilterGeometry) spatialLit.getValue()).getGeometry(); assertTrue( geom.equalsTopo( GeometryUtils.GEOMETRY_FACTORY.createPolygon( new Coordinate[] { new Coordinate(-90, 40), new Coordinate(-90, 45), new Coordinate(-60, 45), new Coordinate(-60, 40), new Coordinate(-90, 40)}))); f = fromCQL("WITHIN(geom, POLYGON((-90 40, -90 45, -60 45, -60 40, -90 40)))"); assertTrue(f instanceof Within); assertTrue(((Within) f).getExpression1() instanceof SpatialFieldValue); assertEquals("geom", ((SpatialFieldValue) ((Within) f).getExpression1()).getFieldName()); assertTrue(((Within) f).getExpression2() instanceof SpatialLiteral); spatialLit = (SpatialLiteral) ((Within) f).getExpression2(); assertTrue(spatialLit.getValue() instanceof UnpreparedFilterGeometry); geom = ((UnpreparedFilterGeometry) spatialLit.getValue()).getGeometry(); assertTrue( geom.equalsTopo( GeometryUtils.GEOMETRY_FACTORY.createPolygon( new Coordinate[] { new Coordinate(-90, 40), new Coordinate(-90, 45), new Coordinate(-60, 45), new Coordinate(-60, 40), new Coordinate(-90, 40)}))); final Instant date1 = Instant.parse("2020-01-25T00:28:32Z"); final Instant date2 = Instant.parse("2021-03-02T13:08:45Z"); f = fromCQL("date AFTER 2020-01-25T00:28:32Z"); assertTrue(f instanceof After); assertTrue(((After) f).getExpression1() instanceof TemporalFieldValue); assertEquals("date", ((TemporalFieldValue) ((After) f).getExpression1()).getFieldName()); assertTrue(((After) f).getExpression2() instanceof TemporalLiteral); Interval interval = ((TemporalLiteral) ((After) f).getExpression2()).getValue(); assertEquals(date1.getEpochSecond(), interval.getStart().getEpochSecond()); assertEquals(date1.getEpochSecond(), interval.getEnd().getEpochSecond()); f = fromCQL("date > 2020-01-25T00:28:32Z"); assertTrue(f instanceof After); assertTrue(((After) f).getExpression1() instanceof TemporalFieldValue); assertEquals("date", ((TemporalFieldValue) ((After) f).getExpression1()).getFieldName()); assertTrue(((After) f).getExpression2() instanceof TemporalLiteral); interval = ((TemporalLiteral) ((After) f).getExpression2()).getValue(); assertEquals(date1.getEpochSecond(), interval.getStart().getEpochSecond()); assertEquals(date1.getEpochSecond(), interval.getEnd().getEpochSecond()); f = fromCQL("date BEFORE 2021-03-02T13:08:45Z"); assertTrue(f instanceof Before); assertTrue(((Before) f).getExpression1() instanceof TemporalFieldValue); assertEquals("date", ((TemporalFieldValue) ((Before) f).getExpression1()).getFieldName()); assertTrue(((Before) f).getExpression2() instanceof TemporalLiteral); interval = ((TemporalLiteral) ((Before) f).getExpression2()).getValue(); assertEquals(date2.getEpochSecond(), interval.getStart().getEpochSecond()); assertEquals(date2.getEpochSecond(), interval.getEnd().getEpochSecond()); f = fromCQL("date < 2021-03-02T13:08:45Z"); assertTrue(f instanceof Before); assertTrue(((Before) f).getExpression1() instanceof TemporalFieldValue); assertEquals("date", ((TemporalFieldValue) ((Before) f).getExpression1()).getFieldName()); assertTrue(((Before) f).getExpression2() instanceof TemporalLiteral); interval = ((TemporalLiteral) ((Before) f).getExpression2()).getValue(); assertEquals(date2.getEpochSecond(), interval.getStart().getEpochSecond()); assertEquals(date2.getEpochSecond(), interval.getEnd().getEpochSecond()); f = fromCQL("date DURING 2020-01-25T00:28:32Z/2021-03-02T13:08:45Z"); assertTrue(f instanceof During); assertTrue(((During) f).getExpression1() instanceof TemporalFieldValue); assertEquals("date", ((TemporalFieldValue) ((During) f).getExpression1()).getFieldName()); assertTrue(((During) f).getExpression2() instanceof TemporalLiteral); interval = ((TemporalLiteral) ((During) f).getExpression2()).getValue(); assertEquals(date1.getEpochSecond(), interval.getStart().getEpochSecond()); assertEquals(date2.getEpochSecond(), interval.getEnd().getEpochSecond()); // GeoWave has a BeforeOrDuring class, but the CQL filter translates it using OR f = fromCQL("date BEFORE OR DURING 2020-01-25T00:28:32Z/2021-03-02T13:08:45Z"); assertTrue(f instanceof Or); assertTrue(((Or) f).getChildren().length == 2); assertTrue(((Or) f).getChildren()[0] instanceof Before); Before before = (Before) ((Or) f).getChildren()[0]; assertTrue(before.getExpression1() instanceof TemporalFieldValue); assertEquals("date", ((TemporalFieldValue) before.getExpression1()).getFieldName()); assertTrue(before.getExpression2() instanceof TemporalLiteral); interval = ((TemporalLiteral) before.getExpression2()).getValue(); assertEquals(date1.getEpochSecond(), interval.getStart().getEpochSecond()); assertEquals(date1.getEpochSecond(), interval.getEnd().getEpochSecond()); assertTrue(((Or) f).getChildren()[1] instanceof During); During during = (During) ((Or) f).getChildren()[1]; assertTrue(during.getExpression1() instanceof TemporalFieldValue); assertEquals("date", ((TemporalFieldValue) during.getExpression1()).getFieldName()); assertTrue(during.getExpression2() instanceof TemporalLiteral); interval = ((TemporalLiteral) during.getExpression2()).getValue(); assertEquals(date1.getEpochSecond(), interval.getStart().getEpochSecond()); assertEquals(date2.getEpochSecond(), interval.getEnd().getEpochSecond()); f = fromCQL("date DURING OR AFTER 2020-01-25T00:28:32Z/2021-03-02T13:08:45Z"); assertTrue(f instanceof Or); assertTrue(((Or) f).getChildren().length == 2); assertTrue(((Or) f).getChildren()[0] instanceof During); during = (During) ((Or) f).getChildren()[0]; assertTrue(during.getExpression1() instanceof TemporalFieldValue); assertEquals("date", ((TemporalFieldValue) during.getExpression1()).getFieldName()); assertTrue(during.getExpression2() instanceof TemporalLiteral); interval = ((TemporalLiteral) during.getExpression2()).getValue(); assertEquals(date1.getEpochSecond(), interval.getStart().getEpochSecond()); assertEquals(date2.getEpochSecond(), interval.getEnd().getEpochSecond()); assertTrue(((Or) f).getChildren()[1] instanceof After); After after = (After) ((Or) f).getChildren()[1]; assertTrue(after.getExpression1() instanceof TemporalFieldValue); assertEquals("date", ((TemporalFieldValue) after.getExpression1()).getFieldName()); assertTrue(after.getExpression2() instanceof TemporalLiteral); interval = ((TemporalLiteral) after.getExpression2()).getValue(); assertEquals(date2.getEpochSecond(), interval.getStart().getEpochSecond()); assertEquals(date2.getEpochSecond(), interval.getEnd().getEpochSecond()); f = fromCQL("date <= 2020-01-25T00:28:32Z"); assertTrue(f instanceof BeforeOrDuring); assertTrue(((BeforeOrDuring) f).getExpression1() instanceof TemporalFieldValue); assertEquals( "date", ((TemporalFieldValue) ((BeforeOrDuring) f).getExpression1()).getFieldName()); assertTrue(((BeforeOrDuring) f).getExpression2() instanceof TemporalLiteral); interval = ((TemporalLiteral) ((BeforeOrDuring) f).getExpression2()).getValue(); assertEquals(date1.getEpochSecond(), interval.getStart().getEpochSecond()); assertEquals(date1.getEpochSecond(), interval.getEnd().getEpochSecond()); f = fromCQL("date >= 2020-01-25T00:28:32Z"); assertTrue(f instanceof DuringOrAfter); assertTrue(((DuringOrAfter) f).getExpression1() instanceof TemporalFieldValue); assertEquals( "date", ((TemporalFieldValue) ((DuringOrAfter) f).getExpression1()).getFieldName()); assertTrue(((DuringOrAfter) f).getExpression2() instanceof TemporalLiteral); interval = ((TemporalLiteral) ((DuringOrAfter) f).getExpression2()).getValue(); assertEquals(date1.getEpochSecond(), interval.getStart().getEpochSecond()); assertEquals(date1.getEpochSecond(), interval.getEnd().getEpochSecond()); f = fromCQL("date BETWEEN 2020-01-25T00:28:32Z AND 2021-03-02T13:08:45Z"); assertTrue(f instanceof TemporalBetween); assertTrue(((TemporalBetween) f).getValue() instanceof TemporalFieldValue); assertEquals("date", ((TemporalFieldValue) ((TemporalBetween) f).getValue()).getFieldName()); assertTrue(((TemporalBetween) f).getLowerBound() instanceof TemporalLiteral); interval = ((TemporalLiteral) (((TemporalBetween) f).getLowerBound())).getValue(); assertEquals(date1.getEpochSecond(), interval.getStart().getEpochSecond()); assertEquals(date1.getEpochSecond(), interval.getEnd().getEpochSecond()); assertTrue(((TemporalBetween) f).getUpperBound() instanceof TemporalLiteral); interval = ((TemporalLiteral) (((TemporalBetween) f).getUpperBound())).getValue(); assertEquals(date2.getEpochSecond(), interval.getStart().getEpochSecond()); assertEquals(date2.getEpochSecond(), interval.getEnd().getEpochSecond()); f = fromCQL("date IS NULL"); assertTrue(f instanceof IsNull); assertTrue(((IsNull) f).getExpression() instanceof TemporalFieldValue); assertEquals("date", ((TemporalFieldValue) ((IsNull) f).getExpression()).getFieldName()); f = fromCQL("date IS NOT NULL"); assertTrue(f instanceof Not); assertTrue(((Not) f).getFilter() instanceof IsNull); assertTrue(((IsNull) ((Not) f).getFilter()).getExpression() instanceof TemporalFieldValue); assertEquals( "date", ((TemporalFieldValue) ((IsNull) ((Not) f).getFilter()).getExpression()).getFieldName()); f = fromCQL("INCLUDE"); assertTrue(f instanceof Include); f = fromCQL("EXCLUDE"); assertTrue(f instanceof Exclude); f = fromCQL("bool = TRUE"); assertTrue(f instanceof GenericEqualTo); assertTrue(((GenericEqualTo) f).getExpression1() instanceof BooleanFieldValue); assertEquals( "bool", ((BooleanFieldValue) ((GenericEqualTo) f).getExpression1()).getFieldName()); assertTrue(((GenericEqualTo) f).getExpression2() instanceof BooleanLiteral); assertTrue((boolean) ((BooleanLiteral) ((GenericEqualTo) f).getExpression2()).getValue()); f = fromCQL("name LIKE '%value'"); assertTrue(f instanceof EndsWith); assertTrue(((EndsWith) f).getExpression1() instanceof TextFieldValue); assertEquals("name", ((TextFieldValue) ((EndsWith) f).getExpression1()).getFieldName()); assertTrue(((EndsWith) f).getExpression2() instanceof TextLiteral); assertEquals("value", ((TextLiteral) ((EndsWith) f).getExpression2()).getValue()); f = fromCQL("name LIKE 'value%'"); assertTrue(f instanceof StartsWith); assertTrue(((StartsWith) f).getExpression1() instanceof TextFieldValue); assertEquals("name", ((TextFieldValue) ((StartsWith) f).getExpression1()).getFieldName()); assertTrue(((StartsWith) f).getExpression2() instanceof TextLiteral); assertEquals("value", ((TextLiteral) ((StartsWith) f).getExpression2()).getValue()); f = fromCQL("name LIKE '%value%'"); assertTrue(f instanceof Contains); assertTrue(((Contains) f).getExpression1() instanceof TextFieldValue); assertEquals("name", ((TextFieldValue) ((Contains) f).getExpression1()).getFieldName()); assertTrue(((Contains) f).getExpression2() instanceof TextLiteral); assertEquals("value", ((TextLiteral) ((Contains) f).getExpression2()).getValue()); f = fromCQL("name LIKE 'a\\_value'"); assertTrue(f instanceof TextComparisonOperator); assertEquals(CompareOp.EQUAL_TO, ((TextComparisonOperator) f).getCompareOp()); assertTrue(((TextComparisonOperator) f).getExpression1() instanceof TextFieldValue); assertEquals( "name", ((TextFieldValue) ((TextComparisonOperator) f).getExpression1()).getFieldName()); assertTrue(((TextComparisonOperator) f).getExpression2() instanceof TextLiteral); assertEquals( "a_value", ((TextLiteral) ((TextComparisonOperator) f).getExpression2()).getValue()); try { // _ is a single character wild card, so this is not supported f = fromCQL("name LIKE 'a_value'"); fail(); } catch (CQLToGeoWaveConversionException e) { // expected } } @Test public void testComplexConstraints() throws CQLException { final Filter f = fromCQL( "BBOX(geom, 5, 20, 8, 30) AND ((A BETWEEN 5 AND 10 AND B < 10) OR (A BETWEEN 15 AND 20 AND B > 5)) AND name LIKE 'aBc%'"); // This filter should result in the following constraints: // A -> [5, 10], [15, 20] // B -> [null, null] // B > 5 OR B < 10 is a full scan // geom -> [5, 8] // geom dimension 0 // [20, 30] // geom dimension 1 // str -> ["aBc", "aBd") // "aBd" is exclusive assertTrue(f instanceof And); assertEquals(2, ((And) f).getChildren().length); assertTrue(((And) f).getChildren()[0] instanceof And); assertEquals(2, ((And) ((And) f).getChildren()[0]).getChildren().length); assertTrue(((And) ((And) f).getChildren()[0]).getChildren()[0] instanceof BBox); final BBox bbox = (BBox) ((And) ((And) f).getChildren()[0]).getChildren()[0]; assertTrue(bbox.getExpression1() instanceof SpatialFieldValue); assertEquals("geom", ((SpatialFieldValue) bbox.getExpression1()).getFieldName()); assertTrue(bbox.getExpression2() instanceof SpatialLiteral); SpatialLiteral spatialLit = (SpatialLiteral) bbox.getExpression2(); assertTrue(spatialLit.getValue() instanceof UnpreparedFilterGeometry); Geometry geom = ((UnpreparedFilterGeometry) spatialLit.getValue()).getGeometry(); assertTrue( geom.equalsTopo(GeometryUtils.GEOMETRY_FACTORY.toGeometry(new Envelope(5, 8, 20, 30)))); assertTrue(((And) ((And) f).getChildren()[0]).getChildren()[1] instanceof Or); final Or or = (Or) ((And) ((And) f).getChildren()[0]).getChildren()[1]; assertEquals(2, or.getChildren().length); assertTrue(or.getChildren()[0] instanceof And); And and = (And) or.getChildren()[0]; assertEquals(2, and.getChildren().length); assertTrue(and.getChildren()[0] instanceof NumericBetween); NumericBetween between = (NumericBetween) and.getChildren()[0]; assertTrue(between.getValue() instanceof NumericFieldValue); assertEquals("A", ((NumericFieldValue) between.getValue()).getFieldName()); assertTrue(between.getLowerBound() instanceof NumericLiteral); assertEquals(5L, ((NumericLiteral) between.getLowerBound()).getValue().longValue()); assertTrue(between.getUpperBound() instanceof NumericLiteral); assertEquals(10L, ((NumericLiteral) between.getUpperBound()).getValue().longValue()); assertTrue(and.getChildren()[1] instanceof NumericComparisonOperator); NumericComparisonOperator compareOp = (NumericComparisonOperator) and.getChildren()[1]; assertEquals(CompareOp.LESS_THAN, compareOp.getCompareOp()); assertTrue(compareOp.getExpression1() instanceof NumericFieldValue); assertEquals("B", ((NumericFieldValue) compareOp.getExpression1()).getFieldName()); assertTrue(compareOp.getExpression2() instanceof NumericLiteral); assertEquals(10L, ((NumericLiteral) compareOp.getExpression2()).getValue().longValue()); assertTrue(or.getChildren()[1] instanceof And); and = (And) or.getChildren()[1]; assertEquals(2, and.getChildren().length); assertTrue(and.getChildren()[0] instanceof NumericBetween); between = (NumericBetween) and.getChildren()[0]; assertTrue(between.getValue() instanceof NumericFieldValue); assertEquals("A", ((NumericFieldValue) between.getValue()).getFieldName()); assertTrue(between.getLowerBound() instanceof NumericLiteral); assertEquals(15L, ((NumericLiteral) between.getLowerBound()).getValue().longValue()); assertTrue(between.getUpperBound() instanceof NumericLiteral); assertEquals(20L, ((NumericLiteral) between.getUpperBound()).getValue().longValue()); assertTrue(and.getChildren()[1] instanceof NumericComparisonOperator); compareOp = (NumericComparisonOperator) and.getChildren()[1]; assertEquals(CompareOp.GREATER_THAN, compareOp.getCompareOp()); assertTrue(compareOp.getExpression1() instanceof NumericFieldValue); assertEquals("B", ((NumericFieldValue) compareOp.getExpression1()).getFieldName()); assertTrue(compareOp.getExpression2() instanceof NumericLiteral); assertEquals(5L, ((NumericLiteral) compareOp.getExpression2()).getValue().longValue()); assertTrue(((And) f).getChildren()[1] instanceof StartsWith); final StartsWith startsWith = (StartsWith) ((And) f).getChildren()[1]; assertTrue(startsWith.getExpression1() instanceof TextFieldValue); assertEquals("name", ((TextFieldValue) startsWith.getExpression1()).getFieldName()); assertTrue(startsWith.getExpression2() instanceof TextLiteral); assertEquals("aBc", ((TextLiteral) startsWith.getExpression2()).getValue()); // Check geom constraints final Index spatialIndex = SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()); AdapterToIndexMapping mapping = BaseDataStoreUtils.mapAdapterToIndex(adapter.asInternalAdapter((short) 0), spatialIndex); FilterConstraints constraints = f.getConstraints( Double.class, null, adapter, mapping, spatialIndex, Sets.newHashSet("geom")); IndexFieldConstraints fieldConstraints = constraints.getFieldConstraints("geom"); assertNotNull(fieldConstraints); DimensionConstraints dimRanges = fieldConstraints.getDimensionRanges(0); assertNotNull(dimRanges); assertEquals(1, dimRanges.getRanges().size()); assertEquals(5L, ((Double) dimRanges.getRanges().get(0).getStart()).longValue()); assertTrue(dimRanges.getRanges().get(0).isStartInclusive()); assertEquals(8L, ((Double) dimRanges.getRanges().get(0).getEnd()).longValue()); assertTrue(dimRanges.getRanges().get(0).isEndInclusive()); assertFalse(dimRanges.getRanges().get(0).isExact()); dimRanges = fieldConstraints.getDimensionRanges(1); assertNotNull(dimRanges); assertEquals(1, dimRanges.getRanges().size()); assertEquals(20L, ((Double) dimRanges.getRanges().get(0).getStart()).longValue()); assertTrue(dimRanges.getRanges().get(0).isStartInclusive()); assertEquals(30L, ((Double) dimRanges.getRanges().get(0).getEnd()).longValue()); assertTrue(dimRanges.getRanges().get(0).isEndInclusive()); assertFalse(dimRanges.getRanges().get(0).isExact()); // Check A constraints final Index aIndex = AttributeDimensionalityTypeProvider.createIndexForDescriptor( adapter, adapter.getFieldDescriptor("A"), "aIndex"); mapping = BaseDataStoreUtils.mapAdapterToIndex(adapter.asInternalAdapter((short) 0), aIndex); constraints = f.getConstraints(Double.class, null, adapter, mapping, aIndex, Sets.newHashSet("A")); fieldConstraints = constraints.getFieldConstraints("A"); assertNotNull(fieldConstraints); dimRanges = fieldConstraints.getDimensionRanges(0); assertNotNull(dimRanges); assertEquals(2, dimRanges.getRanges().size()); assertEquals(5L, ((Double) dimRanges.getRanges().get(0).getStart()).longValue()); assertTrue(dimRanges.getRanges().get(0).isStartInclusive()); assertEquals(10L, ((Double) dimRanges.getRanges().get(0).getEnd()).longValue()); assertTrue(dimRanges.getRanges().get(0).isEndInclusive()); assertTrue(dimRanges.getRanges().get(0).isExact()); assertEquals(15L, ((Double) dimRanges.getRanges().get(1).getStart()).longValue()); assertTrue(dimRanges.getRanges().get(1).isStartInclusive()); assertEquals(20L, ((Double) dimRanges.getRanges().get(1).getEnd()).longValue()); assertTrue(dimRanges.getRanges().get(1).isEndInclusive()); assertTrue(dimRanges.getRanges().get(1).isExact()); // Check B constraints final Index bIndex = AttributeDimensionalityTypeProvider.createIndexForDescriptor( adapter, adapter.getFieldDescriptor("B"), "bIndex"); mapping = BaseDataStoreUtils.mapAdapterToIndex(adapter.asInternalAdapter((short) 0), bIndex); constraints = f.getConstraints(Double.class, null, adapter, mapping, bIndex, Sets.newHashSet("B")); fieldConstraints = constraints.getFieldConstraints("B"); assertNotNull(fieldConstraints); dimRanges = fieldConstraints.getDimensionRanges(0); assertNotNull(dimRanges); assertEquals(1, dimRanges.getRanges().size()); assertNull(dimRanges.getRanges().get(0).getStart()); assertTrue(dimRanges.getRanges().get(0).isStartInclusive()); assertNull(dimRanges.getRanges().get(0).getEnd()); assertTrue(dimRanges.getRanges().get(0).isEndInclusive()); assertTrue(dimRanges.getRanges().get(0).isExact()); // Check name constraints final Index nameIndex = AttributeDimensionalityTypeProvider.createIndexForDescriptor( adapter, adapter.getFieldDescriptor("name"), "nameIndex"); mapping = BaseDataStoreUtils.mapAdapterToIndex(adapter.asInternalAdapter((short) 0), nameIndex); FilterConstraints textConstraints = f.getConstraints(String.class, null, adapter, mapping, nameIndex, Sets.newHashSet("name")); fieldConstraints = textConstraints.getFieldConstraints("name"); assertNotNull(fieldConstraints); dimRanges = fieldConstraints.getDimensionRanges(0); assertNotNull(dimRanges); assertEquals(1, dimRanges.getRanges().size()); assertEquals("aBc", dimRanges.getRanges().get(0).getStart()); assertTrue(dimRanges.getRanges().get(0).isStartInclusive()); assertEquals("aBc", dimRanges.getRanges().get(0).getEnd()); assertTrue(dimRanges.getRanges().get(0).isEndInclusive()); assertTrue(dimRanges.getRanges().get(0).isExact()); } private Filter fromCQL(final String cqlStr) throws CQLException { final org.opengis.filter.Filter cqlFilter = ECQL.toFilter(cqlStr); return (Filter) cqlFilter.accept(new CQLToGeoWaveFilterVisitor(adapter), null); } } ================================================ FILE: core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/query/filter/expression/SpatialTemporalFilterExpressionTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.filter.expression; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.time.Instant; import java.util.Calendar; import java.util.Date; import java.util.TimeZone; import org.geotools.geometry.jts.ReferencedEnvelope; import org.junit.Test; import org.locationtech.geowave.core.geotime.adapter.SpatialFieldDescriptorBuilder; import org.locationtech.geowave.core.geotime.adapter.TemporalFieldDescriptorBuilder; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.BBox; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Crosses; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Disjoint; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Intersects; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Overlaps; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.PreparedFilterGeometry; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialContains; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialEqualTo; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialFieldValue; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialLiteral; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialNotEqualTo; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Touches; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.UnpreparedFilterGeometry; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Within; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.After; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.Before; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.BeforeOrDuring; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.During; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.DuringOrAfter; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalBetween; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalFieldValue; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalLiteral; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TimeOverlaps; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.adapter.AbstractDataTypeAdapter; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.adapter.FieldDescriptorBuilder; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; import org.threeten.extra.Interval; public class SpatialTemporalFilterExpressionTest { @Test public void testSpatialExpressions() { final DataTypeAdapter adapter = new TestTypeBasicDataAdapter(); final TestType entry = new TestType( GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(20, 20)), new Date(), "test"); final TestType entryNulls = new TestType(null, null, null); final SpatialLiteral bboxLit = SpatialLiteral.of(new Envelope(0, 5, 0, 5)); final SpatialLiteral preparedBboxLit = SpatialLiteral.of(new Envelope(0, 5, 0, 5)); preparedBboxLit.prepare(GeometryUtils.getDefaultCRS()); final SpatialLiteral polygonLit = SpatialLiteral.of( GeometryUtils.GEOMETRY_FACTORY.createPolygon( new Coordinate[] { new Coordinate(0, 5), new Coordinate(0, 10), new Coordinate(5, 10), new Coordinate(5, 5), new Coordinate(0, 5)})); final SpatialLiteral preparedPolygonLit = SpatialLiteral.of( GeometryUtils.GEOMETRY_FACTORY.createPolygon( new Coordinate[] { new Coordinate(0, 5), new Coordinate(0, 10), new Coordinate(5, 10), new Coordinate(5, 5), new Coordinate(0, 5)})); preparedPolygonLit.prepare(GeometryUtils.getDefaultCRS()); final SpatialLiteral referencedBboxLit = SpatialLiteral.of(new ReferencedEnvelope(0, 25, 0, 25, GeometryUtils.getDefaultCRS())); final SpatialLiteral referencedBboxLit2 = SpatialLiteral.of(new ReferencedEnvelope(4, 25, 4, 25, GeometryUtils.getDefaultCRS())); final SpatialFieldValue spatialField = SpatialFieldValue.of("geom"); // Test comparisons assertTrue( bboxLit.isEqualTo( GeometryUtils.GEOMETRY_FACTORY.createPolygon( new Coordinate[] { new Coordinate(0, 0), new Coordinate(0, 5), new Coordinate(5, 5), new Coordinate(5, 0), new Coordinate(0, 0)})).evaluate(adapter, entry)); assertTrue(bboxLit.isEqualTo(preparedBboxLit).evaluate(adapter, entry)); assertTrue(preparedBboxLit.isEqualTo(bboxLit).evaluate(adapter, entry)); assertTrue(bboxLit.isEqualTo(new Envelope(0, 5, 0, 5)).evaluate(adapter, entry)); assertFalse(spatialField.isEqualTo(referencedBboxLit).evaluate(adapter, entry)); assertFalse(spatialField.isEqualTo(null).evaluate(adapter, entry)); assertTrue(spatialField.isEqualTo(null).evaluate(adapter, entryNulls)); assertFalse(spatialField.isEqualTo(bboxLit).evaluate(adapter, entryNulls)); assertFalse(spatialField.isNull().evaluate(adapter, entry)); assertTrue(spatialField.isNull().evaluate(adapter, entryNulls)); assertFalse( bboxLit.isNotEqualTo( new ReferencedEnvelope(0, 5, 0, 5, GeometryUtils.getDefaultCRS())).evaluate( adapter, entry)); assertFalse(bboxLit.isNotEqualTo(preparedBboxLit).evaluate(adapter, entry)); assertTrue(bboxLit.isNotEqualTo(polygonLit).evaluate(adapter, entry)); assertFalse(polygonLit.isNotEqualTo(polygonLit).evaluate(adapter, entry)); assertTrue(spatialField.isNotEqualTo(bboxLit).evaluate(adapter, entryNulls)); assertFalse(spatialField.isNotEqualTo(null).evaluate(adapter, entryNulls)); assertTrue(spatialField.isNotEqualTo(null).evaluate(adapter, entry)); assertTrue(SpatialLiteral.of(null).isNull().evaluate(adapter, entry)); // Preparing null or already prepared geometries should not fail preparedBboxLit.prepare(GeometryUtils.getDefaultCRS()); SpatialLiteral.of(null).prepare(GeometryUtils.getDefaultCRS()); try { SpatialLiteral.of("invalid"); fail(); } catch (RuntimeException e) { // expected } // Test functions assertTrue(spatialField.bbox(19, 19, 21, 21).evaluate(adapter, entry)); assertFalse(spatialField.bbox(0, 0, 5, 5).evaluate(adapter, entry)); assertTrue(bboxLit.touches(polygonLit).evaluate(adapter, entry)); assertTrue(preparedBboxLit.touches(polygonLit).evaluate(adapter, entry)); assertTrue(preparedBboxLit.touches(preparedPolygonLit).evaluate(adapter, entry)); assertTrue(bboxLit.touches(preparedPolygonLit).evaluate(adapter, entry)); assertFalse(spatialField.touches(polygonLit).evaluate(adapter, entry)); assertFalse(spatialField.touches(polygonLit).evaluate(adapter, entryNulls)); assertFalse(polygonLit.touches(spatialField).evaluate(adapter, entryNulls)); assertFalse(spatialField.touches(preparedPolygonLit).evaluate(adapter, entry)); assertTrue(bboxLit.intersects(referencedBboxLit).evaluate(adapter, entry)); assertTrue(preparedBboxLit.intersects(polygonLit).evaluate(adapter, entry)); assertTrue(preparedBboxLit.intersects(preparedPolygonLit).evaluate(adapter, entry)); assertTrue(bboxLit.intersects(preparedPolygonLit).evaluate(adapter, entry)); assertTrue(spatialField.intersects(referencedBboxLit).evaluate(adapter, entry)); assertFalse(spatialField.intersects(referencedBboxLit).evaluate(adapter, entryNulls)); assertFalse(polygonLit.intersects(spatialField).evaluate(adapter, entryNulls)); assertFalse(spatialField.intersects(preparedPolygonLit).evaluate(adapter, entry)); assertFalse(bboxLit.disjoint(referencedBboxLit).evaluate(adapter, entry)); assertFalse(preparedBboxLit.disjoint(polygonLit).evaluate(adapter, entry)); assertFalse(preparedBboxLit.disjoint(preparedPolygonLit).evaluate(adapter, entry)); assertFalse(bboxLit.disjoint(preparedPolygonLit).evaluate(adapter, entry)); assertFalse(spatialField.disjoint(referencedBboxLit).evaluate(adapter, entry)); assertFalse(spatialField.disjoint(referencedBboxLit).evaluate(adapter, entryNulls)); assertFalse(polygonLit.disjoint(spatialField).evaluate(adapter, entryNulls)); assertTrue(spatialField.disjoint(preparedPolygonLit).evaluate(adapter, entry)); assertTrue(bboxLit.disjoint(spatialField).evaluate(adapter, entry)); assertFalse(bboxLit.contains(referencedBboxLit).evaluate(adapter, entry)); assertTrue(referencedBboxLit.contains(bboxLit).evaluate(adapter, entry)); assertFalse(preparedBboxLit.contains(preparedPolygonLit).evaluate(adapter, entry)); assertFalse(bboxLit.contains(preparedPolygonLit).evaluate(adapter, entry)); assertFalse(spatialField.contains(referencedBboxLit).evaluate(adapter, entry)); assertFalse(spatialField.contains(referencedBboxLit).evaluate(adapter, entryNulls)); assertFalse(polygonLit.contains(spatialField).evaluate(adapter, entryNulls)); assertFalse(spatialField.contains(preparedPolygonLit).evaluate(adapter, entry)); assertTrue(referencedBboxLit.contains(spatialField).evaluate(adapter, entry)); assertFalse(bboxLit.crosses(referencedBboxLit).evaluate(adapter, entry)); assertFalse(referencedBboxLit.crosses(bboxLit).evaluate(adapter, entry)); assertFalse(preparedBboxLit.crosses(preparedPolygonLit).evaluate(adapter, entry)); assertFalse(bboxLit.crosses(preparedPolygonLit).evaluate(adapter, entry)); assertFalse(spatialField.crosses(referencedBboxLit).evaluate(adapter, entry)); assertFalse(spatialField.crosses(referencedBboxLit).evaluate(adapter, entryNulls)); assertFalse(polygonLit.crosses(spatialField).evaluate(adapter, entryNulls)); assertFalse(spatialField.crosses(preparedPolygonLit).evaluate(adapter, entry)); assertFalse(referencedBboxLit.crosses(spatialField).evaluate(adapter, entry)); assertTrue( SpatialLiteral.of( GeometryUtils.GEOMETRY_FACTORY.createLineString( new Coordinate[] {new Coordinate(0, 0), new Coordinate(5, 5)})).crosses( SpatialLiteral.of( GeometryUtils.GEOMETRY_FACTORY.createLineString( new Coordinate[] { new Coordinate(5, 0), new Coordinate(0, 5)}))).evaluate(adapter, entry)); assertTrue(bboxLit.overlaps(referencedBboxLit2).evaluate(adapter, entry)); assertTrue(referencedBboxLit2.overlaps(bboxLit).evaluate(adapter, entry)); assertFalse(preparedBboxLit.overlaps(preparedPolygonLit).evaluate(adapter, entry)); assertFalse(bboxLit.overlaps(preparedPolygonLit).evaluate(adapter, entry)); assertFalse(spatialField.overlaps(referencedBboxLit).evaluate(adapter, entry)); assertFalse(spatialField.overlaps(referencedBboxLit).evaluate(adapter, entryNulls)); assertFalse(polygonLit.overlaps(spatialField).evaluate(adapter, entryNulls)); assertFalse(spatialField.overlaps(preparedPolygonLit).evaluate(adapter, entry)); assertFalse(referencedBboxLit.overlaps(spatialField).evaluate(adapter, entry)); assertTrue(bboxLit.within(referencedBboxLit).evaluate(adapter, entry)); assertFalse(referencedBboxLit.within(bboxLit).evaluate(adapter, entry)); assertFalse(preparedBboxLit.within(preparedPolygonLit).evaluate(adapter, entry)); assertFalse(bboxLit.within(preparedPolygonLit).evaluate(adapter, entry)); assertTrue(spatialField.within(referencedBboxLit).evaluate(adapter, entry)); assertFalse(spatialField.within(referencedBboxLit).evaluate(adapter, entryNulls)); assertFalse(polygonLit.within(spatialField).evaluate(adapter, entryNulls)); assertFalse(spatialField.within(preparedPolygonLit).evaluate(adapter, entry)); assertFalse(referencedBboxLit.within(spatialField).evaluate(adapter, entry)); // Test CRS transforms // This looks like it should be true, but spatial expressions need to be prepared for the query, // the spatial field could be any CRS because it would be determined by the index and not the // field descriptor assertFalse( spatialField.bbox( 2115070, 2154935, 2337709, 2391878, GeometryUtils.decodeCRS("EPSG:3857")).evaluate(adapter, entry)); // This looks like it should be false, but the expression hasn't been prepared for the query. assertTrue( spatialField.bbox(0, 0, 556597, 557305, GeometryUtils.decodeCRS("EPSG:3857")).evaluate( adapter, entry)); // TODO: add tests for prepared queries where this passes try { bboxLit.isEqualTo(5).evaluate(adapter, entry); fail(); } catch (RuntimeException e) { // expected } try { bboxLit.isNotEqualTo(5).evaluate(adapter, entry); fail(); } catch (RuntimeException e) { // expected } // Test serialization byte[] bytes = PersistenceUtils.toBinary(spatialField.bbox(-5, -8, 5, 8)); final BBox bbox = (BBox) PersistenceUtils.fromBinary(bytes); assertTrue(bbox.getExpression1() instanceof SpatialFieldValue); assertEquals("geom", ((SpatialFieldValue) bbox.getExpression1()).getFieldName()); assertTrue(bbox.getExpression2() instanceof SpatialLiteral); assertTrue( ((SpatialLiteral) bbox.getExpression2()).getValue() instanceof UnpreparedFilterGeometry); assertTrue( ((UnpreparedFilterGeometry) ((SpatialLiteral) bbox.getExpression2()).getValue()).getGeometry().equalsTopo( GeometryUtils.GEOMETRY_FACTORY.toGeometry(new Envelope(-5, 5, -8, 8)))); bytes = PersistenceUtils.toBinary(spatialField.crosses(bboxLit)); final Crosses crosses = (Crosses) PersistenceUtils.fromBinary(bytes); assertTrue(crosses.getExpression1() instanceof SpatialFieldValue); assertEquals("geom", ((SpatialFieldValue) crosses.getExpression1()).getFieldName()); assertTrue(crosses.getExpression2() instanceof SpatialLiteral); assertTrue( ((SpatialLiteral) crosses.getExpression2()).getValue() instanceof UnpreparedFilterGeometry); assertTrue( ((UnpreparedFilterGeometry) ((SpatialLiteral) crosses.getExpression2()).getValue()).getGeometry().equalsTopo( GeometryUtils.GEOMETRY_FACTORY.toGeometry(new Envelope(0, 5, 0, 5)))); bytes = PersistenceUtils.toBinary(spatialField.disjoint(preparedBboxLit)); final Disjoint disjoint = (Disjoint) PersistenceUtils.fromBinary(bytes); assertTrue(disjoint.getExpression1() instanceof SpatialFieldValue); assertEquals("geom", ((SpatialFieldValue) disjoint.getExpression1()).getFieldName()); assertTrue(disjoint.getExpression2() instanceof SpatialLiteral); assertTrue( ((SpatialLiteral) disjoint.getExpression2()).getValue() instanceof PreparedFilterGeometry); assertTrue( ((PreparedFilterGeometry) ((SpatialLiteral) disjoint.getExpression2()).getValue()).getGeometry().equalsTopo( GeometryUtils.GEOMETRY_FACTORY.toGeometry(new Envelope(0, 5, 0, 5)))); bytes = PersistenceUtils.toBinary(spatialField.intersects(preparedBboxLit)); final Intersects intersects = (Intersects) PersistenceUtils.fromBinary(bytes); assertTrue(intersects.getExpression1() instanceof SpatialFieldValue); assertEquals("geom", ((SpatialFieldValue) intersects.getExpression1()).getFieldName()); assertTrue(intersects.getExpression2() instanceof SpatialLiteral); assertTrue( ((SpatialLiteral) intersects.getExpression2()).getValue() instanceof PreparedFilterGeometry); assertTrue( ((PreparedFilterGeometry) ((SpatialLiteral) intersects.getExpression2()).getValue()).getGeometry().equalsTopo( GeometryUtils.GEOMETRY_FACTORY.toGeometry(new Envelope(0, 5, 0, 5)))); bytes = PersistenceUtils.toBinary(spatialField.overlaps(preparedBboxLit)); final Overlaps overlaps = (Overlaps) PersistenceUtils.fromBinary(bytes); assertTrue(overlaps.getExpression1() instanceof SpatialFieldValue); assertEquals("geom", ((SpatialFieldValue) overlaps.getExpression1()).getFieldName()); assertTrue(overlaps.getExpression2() instanceof SpatialLiteral); assertTrue( ((SpatialLiteral) overlaps.getExpression2()).getValue() instanceof PreparedFilterGeometry); assertTrue( ((PreparedFilterGeometry) ((SpatialLiteral) overlaps.getExpression2()).getValue()).getGeometry().equalsTopo( GeometryUtils.GEOMETRY_FACTORY.toGeometry(new Envelope(0, 5, 0, 5)))); bytes = PersistenceUtils.toBinary(spatialField.contains(SpatialLiteral.of(null))); final SpatialContains contains = (SpatialContains) PersistenceUtils.fromBinary(bytes); assertTrue(contains.getExpression1() instanceof SpatialFieldValue); assertEquals("geom", ((SpatialFieldValue) contains.getExpression1()).getFieldName()); assertTrue(contains.getExpression2() instanceof SpatialLiteral); assertNull(((SpatialLiteral) contains.getExpression2()).getValue()); bytes = PersistenceUtils.toBinary(spatialField.touches(preparedBboxLit)); final Touches touches = (Touches) PersistenceUtils.fromBinary(bytes); assertTrue(touches.getExpression1() instanceof SpatialFieldValue); assertEquals("geom", ((SpatialFieldValue) touches.getExpression1()).getFieldName()); assertTrue(touches.getExpression2() instanceof SpatialLiteral); assertTrue( ((SpatialLiteral) touches.getExpression2()).getValue() instanceof PreparedFilterGeometry); assertTrue( ((PreparedFilterGeometry) ((SpatialLiteral) touches.getExpression2()).getValue()).getGeometry().equalsTopo( GeometryUtils.GEOMETRY_FACTORY.toGeometry(new Envelope(0, 5, 0, 5)))); bytes = PersistenceUtils.toBinary(spatialField.within(preparedBboxLit)); final Within within = (Within) PersistenceUtils.fromBinary(bytes); assertTrue(within.getExpression1() instanceof SpatialFieldValue); assertEquals("geom", ((SpatialFieldValue) within.getExpression1()).getFieldName()); assertTrue(within.getExpression2() instanceof SpatialLiteral); assertTrue( ((SpatialLiteral) within.getExpression2()).getValue() instanceof PreparedFilterGeometry); assertTrue( ((PreparedFilterGeometry) ((SpatialLiteral) within.getExpression2()).getValue()).getGeometry().equalsTopo( GeometryUtils.GEOMETRY_FACTORY.toGeometry(new Envelope(0, 5, 0, 5)))); bytes = PersistenceUtils.toBinary(spatialField.isEqualTo(preparedBboxLit)); final SpatialEqualTo equalTo = (SpatialEqualTo) PersistenceUtils.fromBinary(bytes); assertTrue(equalTo.getExpression1() instanceof SpatialFieldValue); assertEquals("geom", ((SpatialFieldValue) equalTo.getExpression1()).getFieldName()); assertTrue(equalTo.getExpression2() instanceof SpatialLiteral); assertTrue( ((SpatialLiteral) equalTo.getExpression2()).getValue() instanceof PreparedFilterGeometry); assertTrue( ((PreparedFilterGeometry) ((SpatialLiteral) equalTo.getExpression2()).getValue()).getGeometry().equalsTopo( GeometryUtils.GEOMETRY_FACTORY.toGeometry(new Envelope(0, 5, 0, 5)))); bytes = PersistenceUtils.toBinary(spatialField.isNotEqualTo(preparedBboxLit)); final SpatialNotEqualTo notEqualTo = (SpatialNotEqualTo) PersistenceUtils.fromBinary(bytes); assertTrue(notEqualTo.getExpression1() instanceof SpatialFieldValue); assertEquals("geom", ((SpatialFieldValue) notEqualTo.getExpression1()).getFieldName()); assertTrue(notEqualTo.getExpression2() instanceof SpatialLiteral); assertTrue( ((SpatialLiteral) notEqualTo.getExpression2()).getValue() instanceof PreparedFilterGeometry); assertTrue( ((PreparedFilterGeometry) ((SpatialLiteral) notEqualTo.getExpression2()).getValue()).getGeometry().equalsTopo( GeometryUtils.GEOMETRY_FACTORY.toGeometry(new Envelope(0, 5, 0, 5)))); } @Test public void testTemporalExpressions() { final DataTypeAdapter adapter = new TestTypeBasicDataAdapter(); final TestType entry = new TestType( GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(20, 20)), new Date(500), "test"); final TestType entryNulls = new TestType(null, null, null); final TemporalFieldValue dateField = TemporalFieldValue.of("date"); final TemporalLiteral dateLit = TemporalLiteral.of(new Date(300)); final Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC")); calendar.setTime(new Date(400)); final TemporalLiteral calendarLit = TemporalLiteral.of(calendar); final TemporalLiteral longLit = TemporalLiteral.of(600); final TemporalLiteral instantLit = TemporalLiteral.of(Instant.ofEpochMilli(700)); final TemporalLiteral intervalLit = TemporalLiteral.of(Interval.of(Instant.ofEpochMilli(450), Instant.ofEpochMilli(650))); // Test comparisons assertTrue(calendarLit.isEqualTo(new Date(400)).evaluate(adapter, entry)); assertFalse(calendarLit.isEqualTo(dateLit).evaluate(adapter, entry)); assertTrue(dateField.isEqualTo(new Date(500)).evaluate(adapter, entry)); assertFalse(dateField.isEqualTo(longLit).evaluate(adapter, entry)); assertTrue(dateField.isEqualTo(null).evaluate(adapter, entryNulls)); assertFalse(dateField.isEqualTo(null).evaluate(adapter, entry)); assertFalse(calendarLit.isNotEqualTo(new Date(400)).evaluate(adapter, entry)); assertTrue(calendarLit.isNotEqualTo(dateLit).evaluate(adapter, entry)); assertFalse(dateField.isNotEqualTo(new Date(500)).evaluate(adapter, entry)); assertTrue(dateField.isNotEqualTo(longLit).evaluate(adapter, entry)); assertFalse(dateField.isNotEqualTo(null).evaluate(adapter, entryNulls)); assertTrue(dateField.isNotEqualTo(null).evaluate(adapter, entry)); assertFalse(dateField.isNull().evaluate(adapter, entry)); assertTrue(dateField.isNull().evaluate(adapter, entryNulls)); assertFalse(instantLit.isNull().evaluate(adapter, entry)); assertFalse(intervalLit.isNull().evaluate(adapter, entry)); assertTrue(TemporalLiteral.of(null).isNull().evaluate(adapter, entry)); assertTrue(dateField.isNotNull().evaluate(adapter, entry)); assertFalse(dateField.isNotNull().evaluate(adapter, entryNulls)); assertTrue(instantLit.isNotNull().evaluate(adapter, entry)); assertTrue(intervalLit.isNotNull().evaluate(adapter, entry)); assertFalse(TemporalLiteral.of(null).isNotNull().evaluate(adapter, entry)); assertTrue(dateField.isLessThan(longLit).evaluate(adapter, entry)); assertFalse(dateField.isLessThan(calendarLit).evaluate(adapter, entry)); assertTrue(calendarLit.isLessThan(intervalLit).evaluate(adapter, entry)); assertFalse(dateField.isLessThan(intervalLit).evaluate(adapter, entry)); assertTrue(intervalLit.isLessThan(instantLit).evaluate(adapter, entry)); assertFalse(dateField.isLessThan(longLit).evaluate(adapter, entryNulls)); assertFalse(longLit.isLessThan(dateField).evaluate(adapter, entryNulls)); assertTrue(dateField.isLessThanOrEqualTo(longLit).evaluate(adapter, entry)); assertFalse(dateField.isLessThanOrEqualTo(calendarLit).evaluate(adapter, entry)); assertTrue(calendarLit.isLessThanOrEqualTo(intervalLit).evaluate(adapter, entry)); assertTrue(dateField.isLessThanOrEqualTo(intervalLit).evaluate(adapter, entry)); assertTrue(intervalLit.isLessThanOrEqualTo(instantLit).evaluate(adapter, entry)); assertFalse(dateField.isLessThanOrEqualTo(longLit).evaluate(adapter, entryNulls)); assertFalse(longLit.isLessThanOrEqualTo(dateField).evaluate(adapter, entryNulls)); assertFalse(dateField.isGreaterThan(longLit).evaluate(adapter, entry)); assertTrue(dateField.isGreaterThan(calendarLit).evaluate(adapter, entry)); assertFalse(calendarLit.isGreaterThan(intervalLit).evaluate(adapter, entry)); assertTrue(dateField.isGreaterThan(dateLit).evaluate(adapter, entry)); assertFalse(intervalLit.isGreaterThan(instantLit).evaluate(adapter, entry)); assertTrue(instantLit.isGreaterThan(intervalLit).evaluate(adapter, entry)); assertFalse(dateField.isGreaterThan(longLit).evaluate(adapter, entryNulls)); assertFalse(longLit.isGreaterThan(dateField).evaluate(adapter, entryNulls)); assertFalse(dateField.isGreaterThanOrEqualTo(longLit).evaluate(adapter, entry)); assertTrue(dateField.isGreaterThanOrEqualTo(calendarLit).evaluate(adapter, entry)); assertFalse(calendarLit.isGreaterThanOrEqualTo(intervalLit).evaluate(adapter, entry)); assertTrue(dateField.isGreaterThanOrEqualTo(dateLit).evaluate(adapter, entry)); assertFalse(intervalLit.isGreaterThanOrEqualTo(instantLit).evaluate(adapter, entry)); assertTrue(instantLit.isGreaterThanOrEqualTo(intervalLit).evaluate(adapter, entry)); assertFalse(dateField.isGreaterThanOrEqualTo(longLit).evaluate(adapter, entryNulls)); assertFalse(longLit.isGreaterThanOrEqualTo(dateField).evaluate(adapter, entryNulls)); assertTrue(calendarLit.isBetween(dateLit, longLit).evaluate(adapter, entry)); assertFalse(dateLit.isBetween(calendarLit, longLit).evaluate(adapter, entry)); assertFalse(longLit.isBetween(dateLit, calendarLit).evaluate(adapter, entry)); assertFalse(dateField.isBetween(dateLit, longLit).evaluate(adapter, entryNulls)); assertFalse(dateLit.isBetween(dateField, longLit).evaluate(adapter, entryNulls)); assertFalse(longLit.isBetween(dateLit, dateField).evaluate(adapter, entryNulls)); TemporalBetween between = (TemporalBetween) calendarLit.isBetween(dateField, calendarLit); assertTrue(between.getValue() instanceof TemporalLiteral); assertTrue(between.getLowerBound() instanceof TemporalFieldValue); assertTrue(between.getUpperBound() instanceof TemporalLiteral); try { dateField.isLessThan("invalid"); fail(); } catch (RuntimeException e) { // expected } try { dateField.isLessThanOrEqualTo("invalid"); fail(); } catch (RuntimeException e) { // expected } try { dateField.isGreaterThan("invalid"); fail(); } catch (RuntimeException e) { // expected } try { dateField.isGreaterThanOrEqualTo("invalid"); fail(); } catch (RuntimeException e) { // expected } try { dateField.isBetween("invalid", longLit); fail(); } catch (RuntimeException e) { // expected } try { dateField.isBetween(longLit, "invalid"); fail(); } catch (RuntimeException e) { // expected } try { TemporalLiteral.of("invalid"); fail(); } catch (RuntimeException e) { // expected } // Test functions assertTrue(dateField.isBefore(longLit).evaluate(adapter, entry)); assertFalse(dateField.isBefore(calendarLit).evaluate(adapter, entry)); assertTrue(calendarLit.isBefore(intervalLit).evaluate(adapter, entry)); assertFalse(dateField.isBefore(intervalLit).evaluate(adapter, entry)); assertTrue(intervalLit.isBefore(instantLit).evaluate(adapter, entry)); assertFalse(dateField.isBefore(longLit).evaluate(adapter, entryNulls)); assertFalse(longLit.isBefore(dateField).evaluate(adapter, entryNulls)); assertTrue(dateField.isBeforeOrDuring(longLit).evaluate(adapter, entry)); assertFalse(dateField.isBeforeOrDuring(calendarLit).evaluate(adapter, entry)); assertTrue(calendarLit.isBeforeOrDuring(intervalLit).evaluate(adapter, entry)); assertTrue(dateField.isBeforeOrDuring(intervalLit).evaluate(adapter, entry)); assertTrue(intervalLit.isBeforeOrDuring(instantLit).evaluate(adapter, entry)); assertFalse(dateField.isBeforeOrDuring(longLit).evaluate(adapter, entryNulls)); assertFalse(longLit.isBeforeOrDuring(dateField).evaluate(adapter, entryNulls)); assertTrue(dateField.isBefore(longLit).evaluate(adapter, entry)); assertFalse(dateField.isBefore(calendarLit).evaluate(adapter, entry)); assertTrue(calendarLit.isBefore(intervalLit).evaluate(adapter, entry)); assertFalse(dateField.isBefore(intervalLit).evaluate(adapter, entry)); assertTrue(intervalLit.isBefore(instantLit).evaluate(adapter, entry)); assertFalse(dateField.isBefore(longLit).evaluate(adapter, entryNulls)); assertFalse(longLit.isBefore(dateField).evaluate(adapter, entryNulls)); assertFalse(dateField.isAfter(longLit).evaluate(adapter, entry)); assertTrue(dateField.isAfter(calendarLit).evaluate(adapter, entry)); assertFalse(calendarLit.isAfter(intervalLit).evaluate(adapter, entry)); assertTrue(dateField.isAfter(dateLit).evaluate(adapter, entry)); assertFalse(intervalLit.isAfter(instantLit).evaluate(adapter, entry)); assertTrue(instantLit.isAfter(intervalLit).evaluate(adapter, entry)); assertFalse(dateField.isAfter(longLit).evaluate(adapter, entryNulls)); assertFalse(longLit.isAfter(dateField).evaluate(adapter, entryNulls)); assertFalse(dateField.isDuringOrAfter(longLit).evaluate(adapter, entry)); assertTrue(dateField.isDuringOrAfter(calendarLit).evaluate(adapter, entry)); assertFalse(calendarLit.isDuringOrAfter(intervalLit).evaluate(adapter, entry)); assertTrue(dateField.isDuringOrAfter(dateLit).evaluate(adapter, entry)); assertFalse(intervalLit.isDuringOrAfter(instantLit).evaluate(adapter, entry)); assertTrue(instantLit.isDuringOrAfter(intervalLit).evaluate(adapter, entry)); assertFalse(dateField.isDuringOrAfter(longLit).evaluate(adapter, entryNulls)); assertFalse(longLit.isDuringOrAfter(dateField).evaluate(adapter, entryNulls)); assertFalse(dateField.isDuring(longLit).evaluate(adapter, entry)); assertFalse(dateField.isDuring(calendarLit).evaluate(adapter, entry)); assertFalse(calendarLit.isDuring(intervalLit).evaluate(adapter, entry)); assertTrue(dateField.isDuring(intervalLit).evaluate(adapter, entry)); assertTrue(longLit.isDuring(intervalLit).evaluate(adapter, entry)); assertFalse(intervalLit.isDuring(dateField).evaluate(adapter, entry)); assertFalse(instantLit.isDuring(intervalLit).evaluate(adapter, entry)); assertFalse(dateField.isDuring(intervalLit).evaluate(adapter, entryNulls)); assertFalse(intervalLit.isDuring(dateField).evaluate(adapter, entryNulls)); assertFalse(dateField.contains(longLit).evaluate(adapter, entry)); assertFalse(dateField.contains(calendarLit).evaluate(adapter, entry)); assertFalse(calendarLit.contains(intervalLit).evaluate(adapter, entry)); assertTrue(intervalLit.contains(dateField).evaluate(adapter, entry)); assertTrue(intervalLit.contains(longLit).evaluate(adapter, entry)); assertFalse(instantLit.contains(intervalLit).evaluate(adapter, entry)); assertFalse(dateField.contains(intervalLit).evaluate(adapter, entryNulls)); assertFalse(intervalLit.contains(dateField).evaluate(adapter, entryNulls)); assertFalse(dateField.overlaps(longLit).evaluate(adapter, entry)); assertFalse(dateField.overlaps(calendarLit).evaluate(adapter, entry)); assertFalse(calendarLit.overlaps(intervalLit).evaluate(adapter, entry)); assertTrue(dateField.overlaps(intervalLit).evaluate(adapter, entry)); assertTrue(longLit.overlaps(intervalLit).evaluate(adapter, entry)); assertTrue(intervalLit.overlaps(dateField).evaluate(adapter, entry)); assertFalse(instantLit.overlaps(intervalLit).evaluate(adapter, entry)); assertTrue( TemporalLiteral.of( Interval.of(Instant.ofEpochMilli(200), Instant.ofEpochMilli(500))).overlaps( intervalLit).evaluate(adapter, entry)); assertFalse( TemporalLiteral.of( Interval.of(Instant.ofEpochMilli(100), Instant.ofEpochMilli(300))).overlaps( intervalLit).evaluate(adapter, entry)); assertFalse(dateField.overlaps(intervalLit).evaluate(adapter, entryNulls)); assertFalse(intervalLit.overlaps(dateField).evaluate(adapter, entryNulls)); // Test serialization byte[] bytes = PersistenceUtils.toBinary(dateField.isAfter(longLit)); final After after = (After) PersistenceUtils.fromBinary(bytes); assertTrue(after.getExpression1() instanceof TemporalFieldValue); assertEquals("date", ((TemporalFieldValue) after.getExpression1()).getFieldName()); assertTrue(after.getExpression2() instanceof TemporalLiteral); assertTrue(((TemporalLiteral) after.getExpression2()).getValue() instanceof Interval); assertEquals( 600, ((Interval) ((TemporalLiteral) after.getExpression2()).getValue()).getStart().toEpochMilli()); assertEquals( 600, ((Interval) ((TemporalLiteral) after.getExpression2()).getValue()).getEnd().toEpochMilli()); bytes = PersistenceUtils.toBinary(dateField.isDuringOrAfter(intervalLit)); final DuringOrAfter duringOrAfter = (DuringOrAfter) PersistenceUtils.fromBinary(bytes); assertTrue(duringOrAfter.getExpression1() instanceof TemporalFieldValue); assertEquals("date", ((TemporalFieldValue) duringOrAfter.getExpression1()).getFieldName()); assertTrue(duringOrAfter.getExpression2() instanceof TemporalLiteral); assertTrue(((TemporalLiteral) duringOrAfter.getExpression2()).getValue() instanceof Interval); assertEquals( 450, ((Interval) ((TemporalLiteral) duringOrAfter.getExpression2()).getValue()).getStart().toEpochMilli()); assertEquals( 650, ((Interval) ((TemporalLiteral) duringOrAfter.getExpression2()).getValue()).getEnd().toEpochMilli()); bytes = PersistenceUtils.toBinary(dateField.isBefore(longLit)); final Before before = (Before) PersistenceUtils.fromBinary(bytes); assertTrue(before.getExpression1() instanceof TemporalFieldValue); assertEquals("date", ((TemporalFieldValue) before.getExpression1()).getFieldName()); assertTrue(before.getExpression2() instanceof TemporalLiteral); assertTrue(((TemporalLiteral) before.getExpression2()).getValue() instanceof Interval); assertEquals( 600, ((Interval) ((TemporalLiteral) before.getExpression2()).getValue()).getStart().toEpochMilli()); assertEquals( 600, ((Interval) ((TemporalLiteral) before.getExpression2()).getValue()).getEnd().toEpochMilli()); bytes = PersistenceUtils.toBinary(dateField.isBeforeOrDuring(intervalLit)); final BeforeOrDuring beforeOrDuring = (BeforeOrDuring) PersistenceUtils.fromBinary(bytes); assertTrue(beforeOrDuring.getExpression1() instanceof TemporalFieldValue); assertEquals("date", ((TemporalFieldValue) beforeOrDuring.getExpression1()).getFieldName()); assertTrue(beforeOrDuring.getExpression2() instanceof TemporalLiteral); assertTrue(((TemporalLiteral) beforeOrDuring.getExpression2()).getValue() instanceof Interval); assertEquals( 450, ((Interval) ((TemporalLiteral) beforeOrDuring.getExpression2()).getValue()).getStart().toEpochMilli()); assertEquals( 650, ((Interval) ((TemporalLiteral) beforeOrDuring.getExpression2()).getValue()).getEnd().toEpochMilli()); bytes = PersistenceUtils.toBinary(dateField.isDuring(TemporalLiteral.of(null))); final During during = (During) PersistenceUtils.fromBinary(bytes); assertTrue(during.getExpression1() instanceof TemporalFieldValue); assertEquals("date", ((TemporalFieldValue) during.getExpression1()).getFieldName()); assertTrue(during.getExpression2() instanceof TemporalLiteral); assertNull(((TemporalLiteral) during.getExpression2()).getValue()); bytes = PersistenceUtils.toBinary(dateField.isBetween(longLit, intervalLit)); between = (TemporalBetween) PersistenceUtils.fromBinary(bytes); assertTrue(between.getValue() instanceof TemporalFieldValue); assertEquals("date", ((TemporalFieldValue) between.getValue()).getFieldName()); assertTrue(between.getLowerBound() instanceof TemporalLiteral); assertTrue(((TemporalLiteral) between.getLowerBound()).getValue() instanceof Interval); assertEquals( 600, ((Interval) ((TemporalLiteral) between.getLowerBound()).getValue()).getStart().toEpochMilli()); assertEquals( 600, ((Interval) ((TemporalLiteral) between.getLowerBound()).getValue()).getEnd().toEpochMilli()); assertTrue(between.getUpperBound() instanceof TemporalLiteral); assertTrue(((TemporalLiteral) between.getUpperBound()).getValue() instanceof Interval); assertEquals( 450, ((Interval) ((TemporalLiteral) between.getUpperBound()).getValue()).getStart().toEpochMilli()); assertEquals( 650, ((Interval) ((TemporalLiteral) between.getUpperBound()).getValue()).getEnd().toEpochMilli()); bytes = PersistenceUtils.toBinary(dateField.overlaps(intervalLit)); final TimeOverlaps overlaps = (TimeOverlaps) PersistenceUtils.fromBinary(bytes); assertTrue(overlaps.getExpression1() instanceof TemporalFieldValue); assertEquals("date", ((TemporalFieldValue) overlaps.getExpression1()).getFieldName()); assertTrue(overlaps.getExpression2() instanceof TemporalLiteral); assertTrue(((TemporalLiteral) overlaps.getExpression2()).getValue() instanceof Interval); assertEquals( 450, ((Interval) ((TemporalLiteral) overlaps.getExpression2()).getValue()).getStart().toEpochMilli()); assertEquals( 650, ((Interval) ((TemporalLiteral) overlaps.getExpression2()).getValue()).getEnd().toEpochMilli()); } public static class TestType { public Geometry geom; public Date date; public String name; public TestType(final Geometry geom, final Date date, final String name) { this.geom = geom; this.date = date; this.name = name; } } public static class TestTypeBasicDataAdapter extends AbstractDataTypeAdapter { static final FieldDescriptor[] fields = new FieldDescriptor[] { new SpatialFieldDescriptorBuilder<>(Geometry.class).fieldName("geom").build(), new TemporalFieldDescriptorBuilder<>(Date.class).fieldName("date").build(), new FieldDescriptorBuilder<>(String.class).fieldName("name").build(), new FieldDescriptorBuilder<>(Long.class).fieldName("EMPLOYED").build(), new FieldDescriptorBuilder<>(Long.class).fieldName("UNEMPLOY").build(), new FieldDescriptorBuilder<>(Boolean.class).fieldName("bool").build(), new FieldDescriptorBuilder<>(Integer.class).fieldName("A").build(), new FieldDescriptorBuilder<>(Integer.class).fieldName("B").build(), new FieldDescriptorBuilder<>(Integer.class).fieldName("C").build(), new FieldDescriptorBuilder<>(Integer.class).fieldName("D").build(), new FieldDescriptorBuilder<>(Integer.class).fieldName("E").build()}; public TestTypeBasicDataAdapter() {} public TestTypeBasicDataAdapter(final String typeName) { super(typeName, fields, fields[2]); } @Override public Object getFieldValue(TestType entry, String fieldName) { switch (fieldName) { case "geom": return entry.geom; case "date": return entry.date; case "name": return entry.name; } return null; } @Override public TestType buildObject(final Object dataId, Object[] fieldValues) { return new TestType( (Geometry) fieldValues[0], (Date) fieldValues[1], (String) fieldValues[2]); } } } ================================================ FILE: core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/query/gwql/GWQLParserTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.query.gwql; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.util.Date; import org.geotools.referencing.CRS; import org.junit.Test; import org.locationtech.geowave.core.geotime.adapter.annotation.GeoWaveSpatialField; import org.locationtech.geowave.core.geotime.adapter.annotation.GeoWaveTemporalField; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.BBox; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.BinarySpatialPredicate; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Crosses; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Disjoint; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Intersects; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Overlaps; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialContains; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialEqualTo; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialFieldValue; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.SpatialLiteral; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Touches; import org.locationtech.geowave.core.geotime.store.query.filter.expression.spatial.Within; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.After; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.Before; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.BeforeOrDuring; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.BinaryTemporalPredicate; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.During; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.DuringOrAfter; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalBetween; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalExpression; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalFieldValue; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TemporalLiteral; import org.locationtech.geowave.core.geotime.store.query.filter.expression.temporal.TimeOverlaps; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.store.adapter.BasicDataTypeAdapter; import org.locationtech.geowave.core.store.adapter.annotation.GeoWaveDataType; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.query.filter.expression.Filter; import org.locationtech.geowave.core.store.query.gwql.AbstractGWQLTest; import org.locationtech.geowave.core.store.query.gwql.parse.GWQLParser; import org.locationtech.geowave.core.store.query.gwql.statement.SelectStatement; import org.locationtech.geowave.core.store.query.gwql.statement.Statement; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.opengis.referencing.FactoryException; import org.opengis.referencing.NoSuchAuthorityCodeException; import org.opengis.referencing.crs.CoordinateReferenceSystem; public class GWQLParserTest extends AbstractGWQLTest { @Override protected DataTypeAdapter createDefaultAdapter() { return BasicDataTypeAdapter.newAdapter("type", SpatialTemporalType.class, "pid"); } @GeoWaveDataType protected static class SpatialTemporalType extends DefaultGWQLTestType { @GeoWaveSpatialField private Geometry geometry; @GeoWaveTemporalField private Date start; @GeoWaveTemporalField private Date end; public SpatialTemporalType() {} public SpatialTemporalType( final String pid, final Long pop, final String comment, final Geometry geometry, final Date start, final Date end) { super(pid, pop, comment); this.geometry = geometry; this.start = start; this.end = end; } } @Test public void testTemporalOperatorFunctions() { final DataStore dataStore = createDataStore(); String statement = "SELECT * FROM type WHERE start AFTER '2020-01-01'"; Statement gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); SelectStatement selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); Filter filter = selectStatement.getFilter(); assertTrue(filter instanceof After); BinaryTemporalPredicate predicate = (BinaryTemporalPredicate) filter; assertTrue(predicate.getExpression1() instanceof TemporalFieldValue); assertEquals("start", ((TemporalFieldValue) predicate.getExpression1()).getFieldName()); assertTrue(predicate.getExpression2() instanceof TemporalLiteral); assertEquals( TemporalExpression.stringToDate("2020-01-01").getTime(), ((TemporalLiteral) predicate.getExpression2()).getValue().getStart().toEpochMilli()); assertEquals( TemporalExpression.stringToDate("2020-01-01").getTime(), ((TemporalLiteral) predicate.getExpression2()).getValue().getEnd().toEpochMilli()); statement = "SELECT * FROM type WHERE start DURING_OR_AFTER '2020-01-01/2020-01-05'"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof DuringOrAfter); predicate = (BinaryTemporalPredicate) filter; assertTrue(predicate.getExpression1() instanceof TemporalFieldValue); assertEquals("start", ((TemporalFieldValue) predicate.getExpression1()).getFieldName()); assertTrue(predicate.getExpression2() instanceof TemporalLiteral); assertEquals( TemporalExpression.stringToDate("2020-01-01").getTime(), ((TemporalLiteral) predicate.getExpression2()).getValue().getStart().toEpochMilli()); assertEquals( TemporalExpression.stringToDate("2020-01-05").getTime(), ((TemporalLiteral) predicate.getExpression2()).getValue().getEnd().toEpochMilli()); statement = "SELECT * FROM type WHERE start DURING '2020-01-01/2020-01-05'"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof During); predicate = (BinaryTemporalPredicate) filter; assertTrue(predicate.getExpression1() instanceof TemporalFieldValue); assertEquals("start", ((TemporalFieldValue) predicate.getExpression1()).getFieldName()); assertTrue(predicate.getExpression2() instanceof TemporalLiteral); assertEquals( TemporalExpression.stringToDate("2020-01-01").getTime(), ((TemporalLiteral) predicate.getExpression2()).getValue().getStart().toEpochMilli()); assertEquals( TemporalExpression.stringToDate("2020-01-05").getTime(), ((TemporalLiteral) predicate.getExpression2()).getValue().getEnd().toEpochMilli()); statement = "SELECT * FROM type WHERE start BEFORE_OR_DURING '2020-01-01/2020-01-05'"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof BeforeOrDuring); predicate = (BinaryTemporalPredicate) filter; assertTrue(predicate.getExpression1() instanceof TemporalFieldValue); assertEquals("start", ((TemporalFieldValue) predicate.getExpression1()).getFieldName()); assertTrue(predicate.getExpression2() instanceof TemporalLiteral); assertEquals( TemporalExpression.stringToDate("2020-01-01").getTime(), ((TemporalLiteral) predicate.getExpression2()).getValue().getStart().toEpochMilli()); assertEquals( TemporalExpression.stringToDate("2020-01-05").getTime(), ((TemporalLiteral) predicate.getExpression2()).getValue().getEnd().toEpochMilli()); statement = "SELECT * FROM type WHERE start BEFORE '2020-01-05'"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof Before); predicate = (BinaryTemporalPredicate) filter; assertTrue(predicate.getExpression1() instanceof TemporalFieldValue); assertEquals("start", ((TemporalFieldValue) predicate.getExpression1()).getFieldName()); assertTrue(predicate.getExpression2() instanceof TemporalLiteral); assertEquals( TemporalExpression.stringToDate("2020-01-05").getTime(), ((TemporalLiteral) predicate.getExpression2()).getValue().getStart().toEpochMilli()); assertEquals( TemporalExpression.stringToDate("2020-01-05").getTime(), ((TemporalLiteral) predicate.getExpression2()).getValue().getEnd().toEpochMilli()); } @Test public void testSpatialPredicateFunctions() throws NoSuchAuthorityCodeException, FactoryException { final Geometry point = GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(1, 1)); final Geometry bbox = GeometryUtils.GEOMETRY_FACTORY.createPolygon( new Coordinate[] { new Coordinate(0, 0), new Coordinate(0, 1), new Coordinate(1, 1), new Coordinate(1, 0), new Coordinate(0, 0)}); final CoordinateReferenceSystem altCRS = CRS.decode("EPSG:3857"); final DataStore dataStore = createDataStore(); String statement = "SELECT * FROM type WHERE intersects(geometry, 'POINT(1 1)')"; Statement gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); SelectStatement selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); Filter filter = selectStatement.getFilter(); assertTrue(filter instanceof Intersects); assertFalse(((Intersects) filter).isLoose()); BinarySpatialPredicate predicate = (BinarySpatialPredicate) filter; assertTrue(predicate.getExpression1() instanceof SpatialFieldValue); assertEquals("geometry", ((SpatialFieldValue) predicate.getExpression1()).getFieldName()); assertTrue(predicate.getExpression2() instanceof SpatialLiteral); assertTrue( point.equalsExact(((SpatialLiteral) predicate.getExpression2()).getValue().getGeometry())); assertEquals(GeometryUtils.getDefaultCRS(), predicate.getExpression2().getCRS(null)); statement = "SELECT * FROM type WHERE intersectsLoose(geometry, 'POINT(1 1)')"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof Intersects); assertTrue(((Intersects) filter).isLoose()); predicate = (BinarySpatialPredicate) filter; assertTrue(predicate.getExpression1() instanceof SpatialFieldValue); assertEquals("geometry", ((SpatialFieldValue) predicate.getExpression1()).getFieldName()); assertTrue(predicate.getExpression2() instanceof SpatialLiteral); assertTrue( point.equalsExact(((SpatialLiteral) predicate.getExpression2()).getValue().getGeometry())); assertEquals(GeometryUtils.getDefaultCRS(), predicate.getExpression2().getCRS(null)); statement = "SELECT * FROM type WHERE bbox(geometry, 0, 0, 1, 1)"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof BBox); assertFalse(((BBox) filter).isLoose()); predicate = (BinarySpatialPredicate) filter; assertTrue(predicate.getExpression1() instanceof SpatialFieldValue); assertEquals("geometry", ((SpatialFieldValue) predicate.getExpression1()).getFieldName()); assertTrue(predicate.getExpression2() instanceof SpatialLiteral); assertTrue( bbox.equalsExact(((SpatialLiteral) predicate.getExpression2()).getValue().getGeometry())); assertEquals(GeometryUtils.getDefaultCRS(), predicate.getExpression2().getCRS(null)); statement = "SELECT * FROM type WHERE bboxLoose(geometry, 0, 0, 1, 1)"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof BBox); assertTrue(((BBox) filter).isLoose()); predicate = (BinarySpatialPredicate) filter; assertTrue(predicate.getExpression1() instanceof SpatialFieldValue); assertEquals("geometry", ((SpatialFieldValue) predicate.getExpression1()).getFieldName()); assertTrue(predicate.getExpression2() instanceof SpatialLiteral); assertTrue( bbox.equalsExact(((SpatialLiteral) predicate.getExpression2()).getValue().getGeometry())); assertEquals(GeometryUtils.getDefaultCRS(), predicate.getExpression2().getCRS(null)); statement = "SELECT * FROM type WHERE bbox(geometry, 0, 0, 1, 1, 'EPSG:3857')"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof BBox); assertFalse(((BBox) filter).isLoose()); predicate = (BinarySpatialPredicate) filter; assertTrue(predicate.getExpression1() instanceof SpatialFieldValue); assertEquals("geometry", ((SpatialFieldValue) predicate.getExpression1()).getFieldName()); assertTrue(predicate.getExpression2() instanceof SpatialLiteral); assertTrue( bbox.equalsExact(((SpatialLiteral) predicate.getExpression2()).getValue().getGeometry())); assertEquals(altCRS, predicate.getExpression2().getCRS(null)); statement = "SELECT * FROM type WHERE bboxLoose(geometry, 0, 0, 1, 1, 'EPSG:3857')"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof BBox); assertTrue(((BBox) filter).isLoose()); predicate = (BinarySpatialPredicate) filter; assertTrue(predicate.getExpression1() instanceof SpatialFieldValue); assertEquals("geometry", ((SpatialFieldValue) predicate.getExpression1()).getFieldName()); assertTrue(predicate.getExpression2() instanceof SpatialLiteral); assertTrue( bbox.equalsExact(((SpatialLiteral) predicate.getExpression2()).getValue().getGeometry())); assertEquals(altCRS, predicate.getExpression2().getCRS(null)); statement = "SELECT * FROM type WHERE disjoint(geometry, 'POINT(1 1)')"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof Disjoint); assertFalse(((Disjoint) filter).isLoose()); predicate = (BinarySpatialPredicate) filter; assertTrue(predicate.getExpression1() instanceof SpatialFieldValue); assertEquals("geometry", ((SpatialFieldValue) predicate.getExpression1()).getFieldName()); assertTrue(predicate.getExpression2() instanceof SpatialLiteral); assertTrue( point.equalsExact(((SpatialLiteral) predicate.getExpression2()).getValue().getGeometry())); assertEquals(GeometryUtils.getDefaultCRS(), predicate.getExpression2().getCRS(null)); statement = "SELECT * FROM type WHERE disjointLoose(geometry, 'POINT(1 1)')"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof Disjoint); assertTrue(((Disjoint) filter).isLoose()); predicate = (BinarySpatialPredicate) filter; assertTrue(predicate.getExpression1() instanceof SpatialFieldValue); assertEquals("geometry", ((SpatialFieldValue) predicate.getExpression1()).getFieldName()); assertTrue(predicate.getExpression2() instanceof SpatialLiteral); assertTrue( point.equalsExact(((SpatialLiteral) predicate.getExpression2()).getValue().getGeometry())); assertEquals(GeometryUtils.getDefaultCRS(), predicate.getExpression2().getCRS(null)); statement = "SELECT * FROM type WHERE crosses(geometry, 'POINT(1 1)')"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof Crosses); predicate = (BinarySpatialPredicate) filter; assertTrue(predicate.getExpression1() instanceof SpatialFieldValue); assertEquals("geometry", ((SpatialFieldValue) predicate.getExpression1()).getFieldName()); assertTrue(predicate.getExpression2() instanceof SpatialLiteral); assertTrue( point.equalsExact(((SpatialLiteral) predicate.getExpression2()).getValue().getGeometry())); assertEquals(GeometryUtils.getDefaultCRS(), predicate.getExpression2().getCRS(null)); statement = "SELECT * FROM type WHERE touches(geometry, 'POINT(1 1)')"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof Touches); predicate = (BinarySpatialPredicate) filter; assertTrue(predicate.getExpression1() instanceof SpatialFieldValue); assertEquals("geometry", ((SpatialFieldValue) predicate.getExpression1()).getFieldName()); assertTrue(predicate.getExpression2() instanceof SpatialLiteral); assertTrue( point.equalsExact(((SpatialLiteral) predicate.getExpression2()).getValue().getGeometry())); assertEquals(GeometryUtils.getDefaultCRS(), predicate.getExpression2().getCRS(null)); statement = "SELECT * FROM type WHERE overlaps(geometry, 'POINT(1 1)')"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof Overlaps); predicate = (BinarySpatialPredicate) filter; assertTrue(predicate.getExpression1() instanceof SpatialFieldValue); assertEquals("geometry", ((SpatialFieldValue) predicate.getExpression1()).getFieldName()); assertTrue(predicate.getExpression2() instanceof SpatialLiteral); assertTrue( point.equalsExact(((SpatialLiteral) predicate.getExpression2()).getValue().getGeometry())); assertEquals(GeometryUtils.getDefaultCRS(), predicate.getExpression2().getCRS(null)); statement = "SELECT * FROM type WHERE contains(geometry, 'POINT(1 1)')"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof SpatialContains); predicate = (BinarySpatialPredicate) filter; assertTrue(predicate.getExpression1() instanceof SpatialFieldValue); assertEquals("geometry", ((SpatialFieldValue) predicate.getExpression1()).getFieldName()); assertTrue(predicate.getExpression2() instanceof SpatialLiteral); assertTrue( point.equalsExact(((SpatialLiteral) predicate.getExpression2()).getValue().getGeometry())); assertEquals(GeometryUtils.getDefaultCRS(), predicate.getExpression2().getCRS(null)); statement = "SELECT * FROM type WHERE within(geometry, 'POINT(1 1)')"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof Within); predicate = (BinarySpatialPredicate) filter; assertTrue(predicate.getExpression1() instanceof SpatialFieldValue); assertEquals("geometry", ((SpatialFieldValue) predicate.getExpression1()).getFieldName()); assertTrue(predicate.getExpression2() instanceof SpatialLiteral); assertTrue( point.equalsExact(((SpatialLiteral) predicate.getExpression2()).getValue().getGeometry())); assertEquals(GeometryUtils.getDefaultCRS(), predicate.getExpression2().getCRS(null)); } @Test public void testTemporalPredicateFunctions() { final DataStore dataStore = createDataStore(); String statement = "SELECT * FROM type WHERE tcontains(start, '2020-01-01')"; Statement gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); SelectStatement selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); Filter filter = selectStatement.getFilter(); // During is the inverse of contains, so the operands should be flipped assertTrue(filter instanceof During); BinaryTemporalPredicate predicate = (BinaryTemporalPredicate) filter; assertTrue(predicate.getExpression1() instanceof TemporalLiteral); assertEquals( TemporalExpression.stringToDate("2020-01-01").getTime(), ((TemporalLiteral) predicate.getExpression1()).getValue().getStart().toEpochMilli()); assertEquals( TemporalExpression.stringToDate("2020-01-01").getTime(), ((TemporalLiteral) predicate.getExpression1()).getValue().getEnd().toEpochMilli()); assertTrue(predicate.getExpression2() instanceof TemporalFieldValue); assertEquals("start", ((TemporalFieldValue) predicate.getExpression2()).getFieldName()); statement = "SELECT * FROM type WHERE toverlaps(start, '2020-01-01')"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof TimeOverlaps); predicate = (BinaryTemporalPredicate) filter; assertTrue(predicate.getExpression1() instanceof TemporalFieldValue); assertEquals("start", ((TemporalFieldValue) predicate.getExpression1()).getFieldName()); assertTrue(predicate.getExpression2() instanceof TemporalLiteral); assertEquals( TemporalExpression.stringToDate("2020-01-01").getTime(), ((TemporalLiteral) predicate.getExpression2()).getValue().getStart().toEpochMilli()); assertEquals( TemporalExpression.stringToDate("2020-01-01").getTime(), ((TemporalLiteral) predicate.getExpression2()).getValue().getEnd().toEpochMilli()); } @Test public void testCasting() { final DataStore dataStore = createDataStore(); String statement = "SELECT * FROM type WHERE pop::date BETWEEN '2020-01-01' AND '2020-01-02'"; Statement gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); SelectStatement selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); Filter filter = selectStatement.getFilter(); assertTrue(filter instanceof TemporalBetween); final TemporalBetween between = (TemporalBetween) filter; assertTrue(between.getValue() instanceof TemporalFieldValue); assertEquals("pop", ((TemporalFieldValue) between.getValue()).getFieldName()); assertTrue(between.getLowerBound() instanceof TemporalLiteral); assertTrue(between.getUpperBound() instanceof TemporalLiteral); statement = "SELECT * FROM type WHERE geometry = 'POINT(1 1)'::geometry"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof SpatialEqualTo); final SpatialEqualTo equals = (SpatialEqualTo) filter; assertTrue(equals.getExpression1() instanceof SpatialFieldValue); assertEquals("geometry", ((SpatialFieldValue) equals.getExpression1()).getFieldName()); assertTrue(equals.getExpression2() instanceof SpatialLiteral); } } ================================================ FILE: core/geotime/src/test/java/org/locationtech/geowave/core/geotime/store/statistics/BoundingBoxStatisticTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.store.statistics; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import org.geotools.referencing.CRS; import org.junit.Test; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.opengis.referencing.FactoryException; import org.opengis.referencing.NoSuchAuthorityCodeException; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.opengis.referencing.operation.MathTransform; public class BoundingBoxStatisticTest { @Test public void testBoundingBoxStatisticSerialization() throws NoSuchAuthorityCodeException, FactoryException { BoundingBoxStatistic expected = new BoundingBoxStatistic("testType", "testField"); byte[] statBytes = PersistenceUtils.toBinary(expected); BoundingBoxStatistic actual = (BoundingBoxStatistic) PersistenceUtils.fromBinary(statBytes); assertEquals(expected.getTypeName(), actual.getTypeName()); assertEquals(expected.getFieldName(), actual.getFieldName()); assertNull(actual.getTransform()); assertNull(actual.getBinningStrategy()); CoordinateReferenceSystem sourceCrs = CRS.decode("EPSG:4326"); CoordinateReferenceSystem destinationCrs = CRS.decode("EPSG:3857"); MathTransform expectedTransform = CRS.findMathTransform(sourceCrs, destinationCrs); expected = new BoundingBoxStatistic("testType", "testField", sourceCrs, destinationCrs); statBytes = PersistenceUtils.toBinary(expected); actual = (BoundingBoxStatistic) PersistenceUtils.fromBinary(statBytes); assertEquals(expected.getTypeName(), actual.getTypeName()); assertEquals(expected.getFieldName(), actual.getFieldName()); assertEquals(expected.getSourceCrs(), actual.getSourceCrs()); assertEquals(expected.getDestinationCrs(), actual.getDestinationCrs()); assertEquals(expected.getTransform(), actual.getTransform()); assertEquals(expectedTransform, actual.getTransform()); assertNull(actual.getBinningStrategy()); } } ================================================ FILE: core/geotime/src/test/java/org/locationtech/geowave/core/geotime/util/GeometryUtilsTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.util; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.util.Arrays; import java.util.Collections; import java.util.List; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition; import org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition; import org.locationtech.geowave.core.index.IndexMetaData; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.MultiDimensionalCoordinateRanges; import org.locationtech.geowave.core.index.MultiDimensionalCoordinates; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.index.QueryRanges; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.store.index.IndexImpl; import org.locationtech.geowave.core.store.query.constraints.Constraints; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.Point; import org.locationtech.jts.geom.Polygon; public class GeometryUtilsTest { private final float DELTA = 0; private Point point3D; private Point point2D; @Before public void createGeometry() { final GeometryFactory gf = new GeometryFactory(); point2D = gf.createPoint(new Coordinate(1, 2)); point3D = gf.createPoint(new Coordinate(1, 2, 3)); } @Test public void test2DGeometryBinaryConversion() { // convert 2D point to binary representation final byte[] bytes = GeometryUtils.geometryToBinary(point2D, GeometryUtils.MAX_GEOMETRY_PRECISION); // load the converted 2D geometry final Geometry convGeo = GeometryUtils.geometryFromBinary(bytes, GeometryUtils.MAX_GEOMETRY_PRECISION); // get the coordinates for each version final Coordinate origCoords = point2D.getCoordinates()[0]; final Coordinate convCoords = convGeo.getCoordinates()[0]; Assert.assertEquals(origCoords.x, convCoords.x, DELTA); Assert.assertEquals(origCoords.y, convCoords.y, DELTA); Assert.assertTrue(Double.isNaN(convCoords.getZ())); } @Test public void test3DGeometryBinaryConversion() { // convert 3D point to binary representation final byte[] bytes = GeometryUtils.geometryToBinary(point3D, GeometryUtils.MAX_GEOMETRY_PRECISION); // load the converted 3D geometry final Geometry convGeo = GeometryUtils.geometryFromBinary(bytes, GeometryUtils.MAX_GEOMETRY_PRECISION); // get the coordinates for each version final Coordinate origCoords = point3D.getCoordinates()[0]; final Coordinate convCoords = convGeo.getCoordinates()[0]; Assert.assertEquals(origCoords.x, convCoords.x, DELTA); Assert.assertEquals(origCoords.y, convCoords.y, DELTA); Assert.assertEquals(origCoords.z, convCoords.z, DELTA); } @Test public void testConstraintGeneration() { final GeometryFactory gf = new GeometryFactory(); final Geometry multiPolygon = gf.createMultiPolygon( new Polygon[] { gf.createPolygon( new Coordinate[] { new Coordinate(20.0, 30), new Coordinate(20, 40), new Coordinate(10, 40), new Coordinate(10, 30), new Coordinate(20, 30)}), gf.createPolygon( new Coordinate[] { new Coordinate(-9, -2), new Coordinate(-9, -1), new Coordinate(-8, -1), new Coordinate(-8, -2), new Coordinate(-9, -2)})}); final Constraints constraints = GeometryUtils.basicConstraintsFromGeometry(multiPolygon); final List results = constraints.getIndexConstraints(new IndexImpl(new ExampleNumericIndexStrategy(), null)); assertEquals(2, results.size()); assertTrue(Arrays.equals(new Double[] {10d, 30d}, results.get(0).getMinValuesPerDimension())); assertTrue(Arrays.equals(new Double[] {20d, 40d}, results.get(0).getMaxValuesPerDimension())); assertTrue(Arrays.equals(new Double[] {-9d, -2d}, results.get(1).getMinValuesPerDimension())); assertTrue(Arrays.equals(new Double[] {-8d, -1d}, results.get(1).getMaxValuesPerDimension())); } GeometryFactory factory = new GeometryFactory(); @Test public void testSplit() { final Geometry multiPolygon = factory.createMultiPolygon( new Polygon[] { factory.createPolygon( new Coordinate[] { new Coordinate(179.0, -89), new Coordinate(179.0, -92), new Coordinate(182.0, -92), new Coordinate(192.0, -89), new Coordinate(179.0, -89)})}); final Geometry result = GeometryUtils.adjustGeo(GeometryUtils.getDefaultCRS(), multiPolygon); assertTrue(result.intersects(multiPolygon)); assertTrue(result.getNumGeometries() == 2); } @Test public void testSimple() { final Geometry singlePoly = factory.createMultiPolygon( new Polygon[] { factory.createPolygon( new Coordinate[] { new Coordinate(169.0, 20), new Coordinate(169.0, 21), new Coordinate(172.0, 21), new Coordinate(172.0, 20), new Coordinate(169.0, 20)})}); final Geometry result = GeometryUtils.adjustGeo(GeometryUtils.getDefaultCRS(), singlePoly); assertTrue(result.intersects(singlePoly)); assertTrue(singlePoly.isValid()); assertTrue(singlePoly.getNumGeometries() == 1); } public static class ExampleNumericIndexStrategy implements NumericIndexStrategy { @Override public byte[] toBinary() { return null; } @Override public void fromBinary(final byte[] bytes) {} @Override public NumericDimensionDefinition[] getOrderedDimensionDefinitions() { return new NumericDimensionDefinition[] {new LongitudeDefinition(), new LatitudeDefinition()}; } @Override public String getId() { return "test-gt"; } @Override public double[] getHighestPrecisionIdRangePerDimension() { return null; } @Override public List createMetaData() { return Collections.emptyList(); } @Override public MultiDimensionalCoordinateRanges[] getCoordinateRangesPerDimension( final MultiDimensionalNumericData dataRange, final IndexMetaData... hints) { return null; } @Override public QueryRanges getQueryRanges( final MultiDimensionalNumericData indexedRange, final IndexMetaData... hints) { return null; } @Override public QueryRanges getQueryRanges( final MultiDimensionalNumericData indexedRange, final int maxEstimatedRangeDecomposition, final IndexMetaData... hints) { return null; } @Override public InsertionIds getInsertionIds(final MultiDimensionalNumericData indexedData) { return null; } @Override public InsertionIds getInsertionIds( final MultiDimensionalNumericData indexedData, final int maxEstimatedDuplicateIds) { return null; } @Override public MultiDimensionalNumericData getRangeForId( final byte[] partitionKey, final byte[] sortKey) { return null; } @Override public byte[][] getQueryPartitionKeys( final MultiDimensionalNumericData queryData, final IndexMetaData... hints) { return null; } @Override public MultiDimensionalCoordinates getCoordinatesPerDimension( final byte[] partitionKey, final byte[] sortKey) { return null; } @Override public byte[][] getInsertionPartitionKeys(final MultiDimensionalNumericData insertionData) { // TODO Auto-generated method stub return null; } @Override public int getPartitionKeyLength() { return 0; } } } ================================================ FILE: core/geotime/src/test/java/org/locationtech/geowave/core/geotime/util/TWKBTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.util; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryCollection; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.LineString; import org.locationtech.jts.geom.LinearRing; import org.locationtech.jts.geom.MultiLineString; import org.locationtech.jts.geom.MultiPoint; import org.locationtech.jts.geom.MultiPolygon; import org.locationtech.jts.geom.Point; import org.locationtech.jts.geom.Polygon; import org.locationtech.jts.io.ParseException; public class TWKBTest { private static GeometryFactory factory = null; private static TWKBWriter writerFullPrecision = null; private static TWKBWriter writer3Precision = null; private static TWKBWriter writer0Precision = null; private static TWKBWriter writerNegativePrecision = null; private static TWKBReader reader = null; @BeforeClass public static void init() { factory = new GeometryFactory(); writerFullPrecision = new TWKBWriter(); writer3Precision = new TWKBWriter(3); writer0Precision = new TWKBWriter(0); writerNegativePrecision = new TWKBWriter(-3); reader = new TWKBReader(); } @Test public void testReadWritePoint() throws ParseException { final Point point = factory.createPoint(new Coordinate(12.13281248321, -1518.375)); Point expected = factory.createPoint(new Coordinate(12.1328125, -1518.375)); // maximum // precision is 7 // decimal digits byte[] encoded = writerFullPrecision.write(point); Geometry decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); expected = factory.createPoint(new Coordinate(12.133, -1518.375)); encoded = writer3Precision.write(point); decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); expected = factory.createPoint(new Coordinate(12, -1518)); encoded = writer0Precision.write(point); decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); expected = factory.createPoint(new Coordinate(0, -2000)); encoded = writerNegativePrecision.write(point); decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); // test empty expected = factory.createPoint(); encoded = writerFullPrecision.write(expected); decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); } @Test public void testReadWriteLine() throws ParseException { final LineString line = factory.createLineString( new Coordinate[] { new Coordinate(12.13281248321, -1518.375), new Coordinate(15.875, -1495.38281248325), new Coordinate(17.2635, -1384.75)}); LineString expected = factory.createLineString( new Coordinate[] { new Coordinate(12.1328125, -1518.375), new Coordinate(15.875, -1495.3828125), new Coordinate(17.2635, -1384.75)}); byte[] encoded = writerFullPrecision.write(line); Geometry decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); expected = factory.createLineString( new Coordinate[] { new Coordinate(12.133, -1518.375), new Coordinate(15.875, -1495.383), new Coordinate(17.264, -1384.75)}); encoded = writer3Precision.write(line); decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); expected = factory.createLineString( new Coordinate[] { new Coordinate(12, -1518), new Coordinate(16, -1495), new Coordinate(17, -1385)}); encoded = writer0Precision.write(line); decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); expected = factory.createLineString( new Coordinate[] { new Coordinate(0, -2000), new Coordinate(0, -1000), new Coordinate(0, -1000)}); encoded = writerNegativePrecision.write(line); decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); // test empty expected = factory.createLineString(); encoded = writerFullPrecision.write(expected); decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); } @Test public void testReadWritePolygon() throws ParseException { final Polygon poly = factory.createPolygon( factory.createLinearRing( new Coordinate[] { new Coordinate(12.13281248321, -1518.375), new Coordinate(24.875, -1518.38281248325), new Coordinate(24.2635, -1284.75), new Coordinate(12.325, -1282.125), new Coordinate(12.13281248321, -1518.375)}), new LinearRing[] { factory.createLinearRing( new Coordinate[] { new Coordinate(13.5, -1500.1), new Coordinate(20.27335, -1495.3424), new Coordinate(20.1275, -1350.25), new Coordinate(13.875, -1348.75), new Coordinate(13.5, -1500.1)}), factory.createLinearRing( new Coordinate[] { new Coordinate(13.5, -1325.195), new Coordinate(20.27335, -1349.51), new Coordinate(20.1275, -1450.325), new Coordinate(13.5, -1325.195)})}); Polygon expected = factory.createPolygon( factory.createLinearRing( new Coordinate[] { new Coordinate(12.1328125, -1518.375), new Coordinate(24.875, -1518.3828125), new Coordinate(24.2635, -1284.75), new Coordinate(12.325, -1282.125), new Coordinate(12.1328125, -1518.375)}), new LinearRing[] { factory.createLinearRing( new Coordinate[] { new Coordinate(13.5, -1500.1), new Coordinate(20.27335, -1495.3424), new Coordinate(20.1275, -1350.25), new Coordinate(13.875, -1348.75), new Coordinate(13.5, -1500.1)}), factory.createLinearRing( new Coordinate[] { new Coordinate(13.5, -1325.195), new Coordinate(20.27335, -1349.51), new Coordinate(20.1275, -1450.325), new Coordinate(13.5, -1325.195)})}); byte[] encoded = writerFullPrecision.write(poly); Geometry decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); expected = factory.createPolygon( factory.createLinearRing( new Coordinate[] { new Coordinate(12.133, -1518.375), new Coordinate(24.875, -1518.383), new Coordinate(24.264, -1284.75), new Coordinate(12.325, -1282.125), new Coordinate(12.133, -1518.375)}), new LinearRing[] { factory.createLinearRing( new Coordinate[] { new Coordinate(13.5, -1500.1), new Coordinate(20.273, -1495.342), new Coordinate(20.128, -1350.25), new Coordinate(13.875, -1348.75), new Coordinate(13.5, -1500.1)}), factory.createLinearRing( new Coordinate[] { new Coordinate(13.5, -1325.195), new Coordinate(20.273, -1349.51), new Coordinate(20.128, -1450.325), new Coordinate(13.5, -1325.195)})}); encoded = writer3Precision.write(poly); decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); expected = factory.createPolygon( factory.createLinearRing( new Coordinate[] { new Coordinate(12, -1518), new Coordinate(25, -1518), new Coordinate(24, -1285), new Coordinate(12, -1282), new Coordinate(12, -1518)}), new LinearRing[] { factory.createLinearRing( new Coordinate[] { new Coordinate(14, -1500), new Coordinate(20, -1495), new Coordinate(20, -1350), new Coordinate(14, -1349), new Coordinate(14, -1500)}), factory.createLinearRing( new Coordinate[] { new Coordinate(14, -1325), new Coordinate(20, -1350), new Coordinate(20, -1450), new Coordinate(14, -1325)})}); encoded = writer0Precision.write(poly); decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); expected = factory.createPolygon( factory.createLinearRing( new Coordinate[] { new Coordinate(0, -2000), new Coordinate(0, -2000), new Coordinate(0, -1000), new Coordinate(0, -1000), new Coordinate(0, -2000)}), new LinearRing[] { factory.createLinearRing( new Coordinate[] { new Coordinate(0, -2000), new Coordinate(0, -1000), new Coordinate(0, -1000), new Coordinate(0, -1000), new Coordinate(0, -2000)}), factory.createLinearRing( new Coordinate[] { new Coordinate(0, -1000), new Coordinate(0, -1000), new Coordinate(0, -1000), new Coordinate(0, -1000)})}); encoded = writerNegativePrecision.write(poly); decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); // test empty expected = factory.createPolygon(); encoded = writerFullPrecision.write(expected); decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); } @Test public void testReadWriteMultiPoint() throws ParseException { final MultiPoint points = factory.createMultiPoint( new Point[] { factory.createPoint(new Coordinate(12.13281248321, -1518.375)), factory.createPoint(new Coordinate(15.875, -1495.38281248325)), factory.createPoint(new Coordinate(17.2635, -1384.75))}); MultiPoint expected = factory.createMultiPoint( new Point[] { factory.createPoint(new Coordinate(12.1328125, -1518.375)), factory.createPoint(new Coordinate(15.875, -1495.3828125)), factory.createPoint(new Coordinate(17.2635, -1384.75))}); byte[] encoded = writerFullPrecision.write(points); Geometry decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); expected = factory.createMultiPoint( new Point[] { factory.createPoint(new Coordinate(12.133, -1518.375)), factory.createPoint(new Coordinate(15.875, -1495.383)), factory.createPoint(new Coordinate(17.264, -1384.75))}); encoded = writer3Precision.write(points); decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); expected = factory.createMultiPoint( new Point[] { factory.createPoint(new Coordinate(12, -1518)), factory.createPoint(new Coordinate(16, -1495)), factory.createPoint(new Coordinate(17, -1385))}); encoded = writer0Precision.write(points); decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); expected = factory.createMultiPoint( new Point[] { factory.createPoint(new Coordinate(0, -2000)), factory.createPoint(new Coordinate(0, -1000)), factory.createPoint(new Coordinate(0, -1000))}); encoded = writerNegativePrecision.write(points); decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); // test empty expected = factory.createMultiPoint(); encoded = writerFullPrecision.write(expected); decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); } @Test public void testReadWriteMultiLineString() throws ParseException { final MultiLineString line = factory.createMultiLineString( new LineString[] { factory.createLineString( new Coordinate[] { new Coordinate(13.5, -1500.1), new Coordinate(20.273, -1495.342)}), factory.createLineString( new Coordinate[] { new Coordinate(12.13281248321, -1518.375), new Coordinate(15.875, -1495.38281248325), new Coordinate(17.2635, -1384.75)}), factory.createLineString( new Coordinate[] { new Coordinate(13.5, -1325.195), new Coordinate(20.27335, -1349.51), new Coordinate(20.1275, -1450.325)})}); MultiLineString expected = factory.createMultiLineString( new LineString[] { factory.createLineString( new Coordinate[] { new Coordinate(13.5, -1500.1), new Coordinate(20.273, -1495.342)}), factory.createLineString( new Coordinate[] { new Coordinate(12.1328125, -1518.375), new Coordinate(15.875, -1495.3828125), new Coordinate(17.2635, -1384.75)}), factory.createLineString( new Coordinate[] { new Coordinate(13.5, -1325.195), new Coordinate(20.27335, -1349.51), new Coordinate(20.1275, -1450.325)})}); byte[] encoded = writerFullPrecision.write(line); Geometry decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); expected = factory.createMultiLineString( new LineString[] { factory.createLineString( new Coordinate[] { new Coordinate(13.5, -1500.1), new Coordinate(20.273, -1495.342)}), factory.createLineString( new Coordinate[] { new Coordinate(12.133, -1518.375), new Coordinate(15.875, -1495.383), new Coordinate(17.264, -1384.75)}), factory.createLineString( new Coordinate[] { new Coordinate(13.5, -1325.195), new Coordinate(20.273, -1349.51), new Coordinate(20.128, -1450.325)})}); encoded = writer3Precision.write(line); decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); expected = factory.createMultiLineString( new LineString[] { factory.createLineString( new Coordinate[] {new Coordinate(14, -1500), new Coordinate(20, -1495)}), factory.createLineString( new Coordinate[] { new Coordinate(12, -1518), new Coordinate(16, -1495), new Coordinate(17, -1385)}), factory.createLineString( new Coordinate[] { new Coordinate(14, -1325), new Coordinate(20, -1350), new Coordinate(20, -1450)})}); encoded = writer0Precision.write(line); decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); expected = factory.createMultiLineString( new LineString[] { factory.createLineString( new Coordinate[] {new Coordinate(0, -2000), new Coordinate(0, -1000)}), factory.createLineString( new Coordinate[] { new Coordinate(0, -2000), new Coordinate(0, -1000), new Coordinate(0, -1000)}), factory.createLineString( new Coordinate[] { new Coordinate(0, -1000), new Coordinate(0, -1000), new Coordinate(0, -1000)})}); encoded = writerNegativePrecision.write(line); decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); // test empty expected = factory.createMultiLineString(); encoded = writerFullPrecision.write(expected); decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); } @Test public void testReadWriteMultiPolygon() throws ParseException { final MultiPolygon multiPoly = factory.createMultiPolygon( new Polygon[] { factory.createPolygon( factory.createLinearRing( new Coordinate[] { new Coordinate(12.13281248321, -1518.375), new Coordinate(24.875, -1518.38281248325), new Coordinate(24.2635, -1284.75), new Coordinate(12.325, -1282.125), new Coordinate(12.13281248321, -1518.375)}), new LinearRing[] { factory.createLinearRing( new Coordinate[] { new Coordinate(13.5, -1500.1), new Coordinate(20.27335, -1495.3424), new Coordinate(20.1275, -1350.25), new Coordinate(13.875, -1348.75), new Coordinate(13.5, -1500.1)}), factory.createLinearRing( new Coordinate[] { new Coordinate(13.5, -1325.195), new Coordinate(20.27335, -1349.51), new Coordinate(20.1275, -1450.325), new Coordinate(13.5, -1325.195)})}), factory.createPolygon( factory.createLinearRing( new Coordinate[] { new Coordinate(1513.5, -0.1), new Coordinate(1520.27335, -95.3424), new Coordinate(1520.1275, -50.25), new Coordinate(1513.875, -48.75), new Coordinate(1513.5, -0.1)})), factory.createPolygon()}); MultiPolygon expected = factory.createMultiPolygon( new Polygon[] { factory.createPolygon( factory.createLinearRing( new Coordinate[] { new Coordinate(12.1328125, -1518.375), new Coordinate(24.875, -1518.3828125), new Coordinate(24.2635, -1284.75), new Coordinate(12.325, -1282.125), new Coordinate(12.1328125, -1518.375)}), new LinearRing[] { factory.createLinearRing( new Coordinate[] { new Coordinate(13.5, -1500.1), new Coordinate(20.27335, -1495.3424), new Coordinate(20.1275, -1350.25), new Coordinate(13.875, -1348.75), new Coordinate(13.5, -1500.1)}), factory.createLinearRing( new Coordinate[] { new Coordinate(13.5, -1325.195), new Coordinate(20.27335, -1349.51), new Coordinate(20.1275, -1450.325), new Coordinate(13.5, -1325.195)})}), factory.createPolygon( factory.createLinearRing( new Coordinate[] { new Coordinate(1513.5, -0.1), new Coordinate(1520.27335, -95.3424), new Coordinate(1520.1275, -50.25), new Coordinate(1513.875, -48.75), new Coordinate(1513.5, -0.1)})), factory.createPolygon()}); byte[] encoded = writerFullPrecision.write(multiPoly); Geometry decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); expected = factory.createMultiPolygon( new Polygon[] { factory.createPolygon( factory.createLinearRing( new Coordinate[] { new Coordinate(12.133, -1518.375), new Coordinate(24.875, -1518.383), new Coordinate(24.264, -1284.75), new Coordinate(12.325, -1282.125), new Coordinate(12.133, -1518.375)}), new LinearRing[] { factory.createLinearRing( new Coordinate[] { new Coordinate(13.5, -1500.1), new Coordinate(20.273, -1495.342), new Coordinate(20.128, -1350.25), new Coordinate(13.875, -1348.75), new Coordinate(13.5, -1500.1)}), factory.createLinearRing( new Coordinate[] { new Coordinate(13.5, -1325.195), new Coordinate(20.273, -1349.51), new Coordinate(20.128, -1450.325), new Coordinate(13.5, -1325.195)})}), factory.createPolygon( factory.createLinearRing( new Coordinate[] { new Coordinate(1513.5, -0.1), new Coordinate(1520.273, -95.342), new Coordinate(1520.128, -50.25), new Coordinate(1513.875, -48.75), new Coordinate(1513.5, -0.1)})), factory.createPolygon()}); encoded = writer3Precision.write(multiPoly); decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); expected = factory.createMultiPolygon( new Polygon[] { factory.createPolygon( factory.createLinearRing( new Coordinate[] { new Coordinate(12, -1518), new Coordinate(25, -1518), new Coordinate(24, -1285), new Coordinate(12, -1282), new Coordinate(12, -1518)}), new LinearRing[] { factory.createLinearRing( new Coordinate[] { new Coordinate(14, -1500), new Coordinate(20, -1495), new Coordinate(20, -1350), new Coordinate(14, -1349), new Coordinate(14, -1500)}), factory.createLinearRing( new Coordinate[] { new Coordinate(14, -1325), new Coordinate(20, -1350), new Coordinate(20, -1450), new Coordinate(14, -1325)})}), factory.createPolygon( factory.createLinearRing( new Coordinate[] { new Coordinate(1514, 0), new Coordinate(1520, -95), new Coordinate(1520, -50), new Coordinate(1514, -49), new Coordinate(1514, 0)})), factory.createPolygon()}); encoded = writer0Precision.write(multiPoly); decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); expected = factory.createMultiPolygon( new Polygon[] { factory.createPolygon( factory.createLinearRing( new Coordinate[] { new Coordinate(0, -2000), new Coordinate(0, -2000), new Coordinate(0, -1000), new Coordinate(0, -1000), new Coordinate(0, -2000)}), new LinearRing[] { factory.createLinearRing( new Coordinate[] { new Coordinate(0, -2000), new Coordinate(0, -1000), new Coordinate(0, -1000), new Coordinate(0, -1000), new Coordinate(0, -2000)}), factory.createLinearRing( new Coordinate[] { new Coordinate(0, -1000), new Coordinate(0, -1000), new Coordinate(0, -1000), new Coordinate(0, -1000)})}), factory.createPolygon( factory.createLinearRing( new Coordinate[] { new Coordinate(2000, 0), new Coordinate(2000, 0), new Coordinate(2000, 0), new Coordinate(2000, 0), new Coordinate(2000, 0)})), factory.createPolygon()}); encoded = writerNegativePrecision.write(multiPoly); decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); // test empty expected = factory.createMultiPolygon(); encoded = writerFullPrecision.write(expected); decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); } @Test public void testReadWriteGeometryCollection() throws ParseException { final GeometryCollection geoms = factory.createGeometryCollection( new Geometry[] { factory.createPolygon( factory.createLinearRing( new Coordinate[] { new Coordinate(12.13281248321, -1518.375), new Coordinate(24.875, -1518.38281248325), new Coordinate(24.2635, -1284.75), new Coordinate(12.325, -1282.125), new Coordinate(12.13281248321, -1518.375)}), new LinearRing[] { factory.createLinearRing( new Coordinate[] { new Coordinate(13.5, -1500.1), new Coordinate(20.27335, -1495.3424), new Coordinate(20.1275, -1350.25), new Coordinate(13.875, -1348.75), new Coordinate(13.5, -1500.1)}), factory.createLinearRing( new Coordinate[] { new Coordinate(13.5, -1325.195), new Coordinate(20.27335, -1349.51), new Coordinate(20.1275, -1450.325), new Coordinate(13.5, -1325.195)})}), factory.createLineString( new Coordinate[] { new Coordinate(1513.5, -0.1), new Coordinate(1520.27335, -95.3424), new Coordinate(1520.1275, -50.25), new Coordinate(1513.875, -48.75), new Coordinate(1513.5, -0.1)}), factory.createPoint(new Coordinate(12.34, 18.1)), factory.createPoint(), factory.createLineString(), factory.createPolygon()}); GeometryCollection expected = factory.createGeometryCollection( new Geometry[] { factory.createPolygon( factory.createLinearRing( new Coordinate[] { new Coordinate(12.1328125, -1518.375), new Coordinate(24.875, -1518.3828125), new Coordinate(24.2635, -1284.75), new Coordinate(12.325, -1282.125), new Coordinate(12.1328125, -1518.375)}), new LinearRing[] { factory.createLinearRing( new Coordinate[] { new Coordinate(13.5, -1500.1), new Coordinate(20.27335, -1495.3424), new Coordinate(20.1275, -1350.25), new Coordinate(13.875, -1348.75), new Coordinate(13.5, -1500.1)}), factory.createLinearRing( new Coordinate[] { new Coordinate(13.5, -1325.195), new Coordinate(20.27335, -1349.51), new Coordinate(20.1275, -1450.325), new Coordinate(13.5, -1325.195)})}), factory.createLineString( new Coordinate[] { new Coordinate(1513.5, -0.1), new Coordinate(1520.27335, -95.3424), new Coordinate(1520.1275, -50.25), new Coordinate(1513.875, -48.75), new Coordinate(1513.5, -0.1)}), factory.createPoint(new Coordinate(12.34, 18.1)), factory.createPoint(), factory.createLineString(), factory.createPolygon()}); byte[] encoded = writerFullPrecision.write(geoms); Geometry decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); expected = factory.createGeometryCollection( new Geometry[] { factory.createPolygon( factory.createLinearRing( new Coordinate[] { new Coordinate(12.133, -1518.375), new Coordinate(24.875, -1518.383), new Coordinate(24.264, -1284.75), new Coordinate(12.325, -1282.125), new Coordinate(12.133, -1518.375)}), new LinearRing[] { factory.createLinearRing( new Coordinate[] { new Coordinate(13.5, -1500.1), new Coordinate(20.273, -1495.342), new Coordinate(20.128, -1350.25), new Coordinate(13.875, -1348.75), new Coordinate(13.5, -1500.1)}), factory.createLinearRing( new Coordinate[] { new Coordinate(13.5, -1325.195), new Coordinate(20.273, -1349.51), new Coordinate(20.128, -1450.325), new Coordinate(13.5, -1325.195)})}), factory.createLineString( new Coordinate[] { new Coordinate(1513.5, -0.1), new Coordinate(1520.273, -95.342), new Coordinate(1520.128, -50.25), new Coordinate(1513.875, -48.75), new Coordinate(1513.5, -0.1)}), factory.createPoint(new Coordinate(12.34, 18.1)), factory.createPoint(), factory.createLineString(), factory.createPolygon()}); encoded = writer3Precision.write(geoms); decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); expected = factory.createGeometryCollection( new Geometry[] { factory.createPolygon( factory.createLinearRing( new Coordinate[] { new Coordinate(12, -1518), new Coordinate(25, -1518), new Coordinate(24, -1285), new Coordinate(12, -1282), new Coordinate(12, -1518)}), new LinearRing[] { factory.createLinearRing( new Coordinate[] { new Coordinate(14, -1500), new Coordinate(20, -1495), new Coordinate(20, -1350), new Coordinate(14, -1349), new Coordinate(14, -1500)}), factory.createLinearRing( new Coordinate[] { new Coordinate(14, -1325), new Coordinate(20, -1350), new Coordinate(20, -1450), new Coordinate(14, -1325)})}), factory.createLineString( new Coordinate[] { new Coordinate(1514, 0), new Coordinate(1520, -95), new Coordinate(1520, -50), new Coordinate(1514, -49), new Coordinate(1514, 0)}), factory.createPoint(new Coordinate(12, 18)), factory.createPoint(), factory.createLineString(), factory.createPolygon()}); encoded = writer0Precision.write(geoms); decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); expected = factory.createGeometryCollection( new Geometry[] { factory.createPolygon( factory.createLinearRing( new Coordinate[] { new Coordinate(0, -2000), new Coordinate(0, -2000), new Coordinate(0, -1000), new Coordinate(0, -1000), new Coordinate(0, -2000)}), new LinearRing[] { factory.createLinearRing( new Coordinate[] { new Coordinate(0, -2000), new Coordinate(0, -1000), new Coordinate(0, -1000), new Coordinate(0, -1000), new Coordinate(0, -2000)}), factory.createLinearRing( new Coordinate[] { new Coordinate(0, -1000), new Coordinate(0, -1000), new Coordinate(0, -1000), new Coordinate(0, -1000)})}), factory.createLineString( new Coordinate[] { new Coordinate(2000, 0), new Coordinate(2000, 0), new Coordinate(2000, 0), new Coordinate(2000, 0), new Coordinate(2000, 0)}), factory.createPoint(new Coordinate(0, 0)), factory.createPoint(), factory.createLineString(), factory.createPolygon()}); encoded = writerNegativePrecision.write(geoms); decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); // test empty expected = factory.createMultiPolygon(); encoded = writerFullPrecision.write(expected); decoded = reader.read(encoded); Assert.assertEquals(expected, decoded); } } ================================================ FILE: core/geotime/src/test/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi ================================================ org.locationtech.geowave.core.geotime.TestGeoTimePersistableRegistry ================================================ FILE: core/index/pom.xml ================================================ 4.0.0 geowave-core-parent org.locationtech.geowave ../ 2.0.2-SNAPSHOT geowave-core-index GeoWave Index com.clearspring.analytics stream com.google.uzaygezen uzaygezen-core com.github.spotbugs spotbugs-annotations net.sf.json-lib json-lib jdk15 com.github.ben-manes.caffeine caffeine ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/ByteArray.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import java.nio.ByteBuffer; import java.util.Arrays; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; /** * This class is a wrapper around a byte array to ensure equals and hashcode operations use the * values of the bytes rather than explicit object identity */ public class ByteArray implements java.io.Serializable, Comparable { private static final long serialVersionUID = 1L; public static final byte[] EMPTY_BYTE_ARRAY = new byte[0]; protected byte[] bytes; @SuppressFBWarnings("SE_TRANSIENT_FIELD_NOT_RESTORED") protected transient String string; public ByteArray() { this(EMPTY_BYTE_ARRAY); } public ByteArray(final byte[] bytes) { this.bytes = bytes; } public ByteArray(final String string) { bytes = StringUtils.stringToBinary(string); this.string = string; } public byte[] getBytes() { return bytes; } public byte[] getNextPrefix() { return ByteArrayUtils.getNextPrefix(bytes); } public String getString() { if (string == null) { string = StringUtils.stringFromBinary(bytes); } return string; } public String getHexString() { return ByteArrayUtils.getHexString(bytes); } @Override public String toString() { return "ByteArray[" + bytes.length + "]=\"" + getString() + "\""; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + Arrays.hashCode(bytes); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final ByteArray other = (ByteArray) obj; return Arrays.equals(bytes, other.bytes); } public static byte[] toBytes(final ByteArray[] ids) { int len = VarintUtils.unsignedIntByteLength(ids.length); for (final ByteArray id : ids) { len += (id.bytes.length + VarintUtils.unsignedIntByteLength(id.bytes.length)); } final ByteBuffer buffer = ByteBuffer.allocate(len); VarintUtils.writeUnsignedInt(ids.length, buffer); for (final ByteArray id : ids) { VarintUtils.writeUnsignedInt(id.bytes.length, buffer); buffer.put(id.bytes); } return buffer.array(); } public static ByteArray[] fromBytes(final byte[] idData) { final ByteBuffer buffer = ByteBuffer.wrap(idData); final int len = VarintUtils.readUnsignedInt(buffer); ByteArrayUtils.verifyBufferSize(buffer, len); final ByteArray[] result = new ByteArray[len]; for (int i = 0; i < len; i++) { final int idSize = VarintUtils.readUnsignedInt(buffer); final byte[] id = ByteArrayUtils.safeRead(buffer, idSize); result[i] = new ByteArray(id); } return result; } @Override public int compareTo(final ByteArray o) { return ByteArrayUtils.compare(bytes, o.bytes); } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/ByteArrayRange.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; /** * Defines a unit interval on a number line */ public class ByteArrayRange implements Comparable { protected byte[] start; protected byte[] end; protected boolean singleValue; /** * * * * @param start start of unit interval * @param end end of unit interval */ public ByteArrayRange(final byte[] start, final byte[] end) { this(start, end, false); } /** * * * * @param start start of unit interval * @param end end of unit interval */ public ByteArrayRange(final byte[] start, final byte[] end, final boolean singleValue) { this.start = start; this.end = end; this.singleValue = singleValue; } public byte[] getStart() { return start; } public byte[] getEnd() { return end; } public byte[] getStartAsPreviousPrefix() { if (start == null) { return null; } return ByteArrayUtils.getPreviousPrefix(start); } public byte[] getEndAsNextPrefix() { if (end == null) { return null; } return ByteArrayUtils.getNextPrefix(end); } public boolean isSingleValue() { return singleValue; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((end == null) ? 0 : Arrays.hashCode(end)); result = (prime * result) + (singleValue ? 1231 : 1237); result = (prime * result) + ((start == null) ? 0 : Arrays.hashCode(start)); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final ByteArrayRange other = (ByteArrayRange) obj; if (end == null) { if (other.end != null) { return false; } } else if (!Arrays.equals(end, other.end)) { return false; } if (singleValue != other.singleValue) { return false; } if (start == null) { if (other.start != null) { return false; } } else if (!Arrays.equals(start, other.start)) { return false; } return true; } public boolean intersects(final ByteArrayRange other) { if (isSingleValue()) { if (other.isSingleValue()) { return Arrays.equals(getStart(), other.getStart()); } return false; } return ((ByteArrayUtils.compare(getStart(), other.getEndAsNextPrefix()) < 0) && (ByteArrayUtils.compare(getEndAsNextPrefix(), other.getStart()) > 0)); } public ByteArrayRange intersection(final ByteArrayRange other) { return new ByteArrayRange( ByteArrayUtils.compare(start, other.start) <= 0 ? other.start : start, ByteArrayUtils.compare(getEndAsNextPrefix(), other.getEndAsNextPrefix()) >= 0 ? other.end : end); } public ByteArrayRange union(final ByteArrayRange other) { return new ByteArrayRange( ByteArrayUtils.compare(start, other.start) <= 0 ? start : other.start, ByteArrayUtils.compare(getEndAsNextPrefix(), other.getEndAsNextPrefix()) >= 0 ? end : other.end); } @Override public int compareTo(final ByteArrayRange other) { final int diff = ByteArrayUtils.compare(getStart(), other.getStart()); return diff != 0 ? diff : ByteArrayUtils.compare(getEndAsNextPrefix(), other.getEndAsNextPrefix()); } public static enum MergeOperation { UNION, INTERSECTION } public static final Collection mergeIntersections( final Collection ranges, final MergeOperation op) { final List rangeList = new ArrayList<>(ranges); // sort order so the first range can consume following ranges Collections.sort(rangeList); final List result = new ArrayList<>(); for (int i = 0; i < rangeList.size();) { ByteArrayRange r1 = rangeList.get(i); int j = i + 1; for (; j < rangeList.size(); j++) { final ByteArrayRange r2 = rangeList.get(j); if (r1.intersects(r2)) { if (op.equals(MergeOperation.UNION)) { r1 = r1.union(r2); } else { r1 = r1.intersection(r2); } } else { break; } } i = j; result.add(r1); } return result; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/ByteArrayUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Base64; import java.util.Base64.Encoder; import java.util.List; import java.util.UUID; import org.apache.commons.lang3.tuple.Pair; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Preconditions; /** * Convenience methods for converting binary data to and from strings. The encoding and decoding is * done in base-64. These methods should be used for converting data that is binary in nature to a * String representation for transport. Use StringUtils for serializing and deserializing text-based * data. * *

Additionally, this class has methods for manipulating byte arrays, such as combining or * incrementing them. */ public class ByteArrayUtils { private static final Logger LOGGER = LoggerFactory.getLogger(ByteArrayUtils.class); private static Encoder ENCODER = Base64.getUrlEncoder().withoutPadding(); private static byte[] internalCombineArrays(final byte[] beginning, final byte[] end) { final byte[] combined = new byte[beginning.length + end.length]; System.arraycopy(beginning, 0, combined, 0, beginning.length); System.arraycopy(end, 0, combined, beginning.length, end.length); return combined; } /** * Convert binary data to a string for transport * * @param byteArray the binary data * @return the base64url encoded string */ public static String byteArrayToString(final byte[] byteArray) { return new String(ENCODER.encode(byteArray), StringUtils.getGeoWaveCharset()); } /** * Convert a string representation of binary data back to a String * * @param str the string representation of binary data * @return the base64url decoded binary data */ public static byte[] byteArrayFromString(final String str) { return Base64.getUrlDecoder().decode(str); } /** * Throw an exception if the requested length is longer than the remaining buffer size. * * @param buffer the byte buffer * @param length the number of bytes to read */ public static void verifyBufferSize(final ByteBuffer buffer, final int length) { if (length > buffer.remaining()) { throw new GeoWaveSerializationException( "Tried to read more data than was available in buffer."); } } /** * Read bytes from the buffer, but only if the buffer's remaining length supports it. * * @param buffer the byte buffer * @param length the number of bytes to read * @return the bytes that were read */ public static byte[] safeRead(final ByteBuffer buffer, final int length) { verifyBufferSize(buffer, length); final byte[] readBytes = new byte[length]; if (length > 0) { buffer.get(readBytes); } return readBytes; } /** * Combine 2 arrays into one large array. If both are not null it will append id2 to id1 and the * result will be of length id1.length + id2.length * * @param id1 the first byte array to use (the start of the result) * @param id2 the second byte array to combine (appended to id1) * @return the concatenated byte array */ public static byte[] combineArrays(final byte[] id1, final byte[] id2) { byte[] combinedId; if ((id1 == null) || (id1.length == 0)) { combinedId = id2; } else if ((id2 == null) || (id2.length == 0)) { combinedId = id1; } else { // concatenate bin ID 2 to the end of bin ID 1 combinedId = ByteArrayUtils.internalCombineArrays(id1, id2); } return combinedId; } public static byte[] replace(final byte[] arr, final byte[] find, final byte[] replace) { if ((find == null) || (find.length == 0) || (find.length > arr.length) || (replace == null)) { return arr; } int match = 0; int matchCount = 0; for (int i = 0; i < arr.length; i++) { if (arr[i] == find[match]) { match++; if (match == find.length) { matchCount++; match = 0; } } else if ((match > 0) && (arr[i] == find[0])) { match = 1; } else { match = 0; } } if (matchCount == 0) { return arr; } final byte[] newBytes = new byte[(arr.length - (find.length * matchCount)) + (replace.length * matchCount)]; match = 0; int copyIdx = 0; for (int i = 0; i < arr.length; i++) { if (arr[i] == find[match]) { match++; if (match == find.length) { for (int j = 0; j < replace.length; j++) { newBytes[copyIdx++] = replace[j]; } match = 0; } continue; } else if (match > 0) { for (int j = i - match; j < i; j++) { newBytes[copyIdx++] = arr[j]; } if (arr[i] == find[0]) { copyIdx--; match = 1; } else { match = 0; } } if (match == 0) { newBytes[copyIdx++] = arr[i]; } } return newBytes; } /** * add 1 to the least significant bit in this byte array (the last byte in the array) * * @param value the array to increment * @return will return true as long as the value did not overflow */ public static boolean increment(final byte[] value) { for (int i = value.length - 1; i >= 0; i--) { value[i]++; if (value[i] != 0) { return true; } } return value[0] != 0; } /** * Converts a UUID to a byte array * * @param uuid the uuid * @return the byte array representing that UUID */ public static byte[] uuidToByteArray(final UUID uuid) { final ByteBuffer bb = ByteBuffer.wrap(new byte[16]); bb.putLong(uuid.getMostSignificantBits()); bb.putLong(uuid.getLeastSignificantBits()); return bb.array(); } /** * Converts a long to a byte array * * @param l the long * @return the byte array representing that long */ public static byte[] longToByteArray(final long l) { final ByteBuffer bb = ByteBuffer.allocate(Long.BYTES); bb.putLong(l); return bb.array(); } /** * Converts a byte array to a long * * @param bytes the byte array the long * @return the long represented by the byte array */ public static long byteArrayToLong(final byte[] bytes) { final ByteBuffer bb = ByteBuffer.allocate(Long.BYTES); bb.put(bytes); bb.flip(); return bb.getLong(); } public static byte[] longToBytes(long val) { final int radix = 1 << 8; final int mask = radix - 1; // we want to eliminate trailing 0's (ie. truncate the byte array by // trailing 0's) int trailingZeros = 0; while ((((int) val) & mask) == 0) { val >>>= 8; trailingZeros++; if (trailingZeros == 8) { return new byte[0]; } } final byte[] array = new byte[8 - trailingZeros]; int pos = array.length; do { array[--pos] = (byte) (((int) val) & mask); val >>>= 8; } while ((val != 0) && (pos > 0)); return array; } public static long bytesToLong(final byte[] bytes) { long value = 0; for (int i = 0; i < 8; i++) { value = (value << 8); if (i < bytes.length) { value += (bytes[i] & 0xff); } } return value; } /** * Combines two variable length byte arrays into one large byte array and appends the length of * each individual byte array in sequential order at the end of the combined byte array. * *

Given byte_array_1 of length 8 + byte_array_2 of length 16, the result will be byte_array1 * + byte_array_2 + 8 + 16. * *

Lengths are put after the individual arrays so they don't impact sorting when used within * the key of a sorted key-value data store. * * @param array1 the first byte array * @param array2 the second byte array * @return the combined byte array including the individual byte array lengths */ public static byte[] combineVariableLengthArrays(final byte[] array1, final byte[] array2) { Preconditions.checkNotNull(array1, "First byte array cannot be null"); Preconditions.checkNotNull(array2, "Second byte array cannot be null"); Preconditions.checkArgument(array1.length > 1, "First byte array cannot have length 0"); Preconditions.checkArgument(array2.length > 1, "Second byte array cannot have length 0"); final byte[] combinedWithoutLengths = ByteArrayUtils.internalCombineArrays(array1, array2); final ByteBuffer combinedWithLengthsAppended = ByteBuffer.allocate(combinedWithoutLengths.length + 8); // 8 // for // two // integer // lengths combinedWithLengthsAppended.put(combinedWithoutLengths); combinedWithLengthsAppended.putInt(array1.length); combinedWithLengthsAppended.putInt(array2.length); return combinedWithLengthsAppended.array(); } public static Pair splitVariableLengthArrays(final byte[] combinedArray) { final ByteBuffer combined = ByteBuffer.wrap(combinedArray); final byte[] combinedArrays = new byte[combinedArray.length - 8]; combined.get(combinedArrays); final ByteBuffer bb = ByteBuffer.wrap(combinedArrays); final int len1 = combined.getInt(); final int len2 = combined.getInt(); final byte[] part1 = new byte[len1]; final byte[] part2 = new byte[len2]; bb.get(part1); bb.get(part2); return Pair.of(part1, part2); } public static String shortToString(final short input) { return byteArrayToString(shortToByteArray(input)); } public static short shortFromString(final String input) { return byteArrayToShort(byteArrayFromString(input)); } public static byte[] shortToByteArray(final short input) { return new byte[] {(byte) (input & 0xFF), (byte) ((input >> 8) & 0xFF)}; } public static short byteArrayToShort(final byte[] bytes) { int r = bytes[1] & 0xFF; r = (r << 8) | (bytes[0] & 0xFF); return (short) r; } public static byte[] variableLengthEncode(long n) { final int numRelevantBits = 64 - Long.numberOfLeadingZeros(n); int numBytes = (numRelevantBits + 6) / 7; if (numBytes == 0) { numBytes = 1; } final byte[] output = new byte[numBytes]; for (int i = numBytes - 1; i >= 0; i--) { int curByte = (int) (n & 0x7F); if (i != (numBytes - 1)) { curByte |= 0x80; } output[i] = (byte) curByte; n >>>= 7; } return output; } public static long variableLengthDecode(final byte[] b) { long n = 0; for (int i = 0; i < b.length; i++) { final int curByte = b[i] & 0xFF; n = (n << 7) | (curByte & 0x7F); if ((curByte & 0x80) == 0) { break; } } return n; } public static byte[] getNextPrefix(final byte[] rowKeyPrefix) { int offset = rowKeyPrefix.length; while (offset > 0) { if (rowKeyPrefix[offset - 1] != (byte) 0xFF) { break; } offset--; } if (offset == 0) { return getNextInclusive(rowKeyPrefix); } final byte[] newStopRow = Arrays.copyOfRange(rowKeyPrefix, 0, offset); // And increment the last one newStopRow[newStopRow.length - 1]++; return newStopRow; } public static byte[] getNextInclusive(final byte[] rowKeyPrefix) { return ByteArrayUtils.combineArrays( rowKeyPrefix, new byte[] { (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF}); } public static byte[] getPreviousPrefix(final byte[] rowKeyPrefix) { int offset = rowKeyPrefix.length; while (offset > 0) { if (rowKeyPrefix[offset - 1] != (byte) 0x00) { break; } offset--; } if (offset == 0) { return new byte[0]; } final byte[] newStopRow = Arrays.copyOfRange(rowKeyPrefix, 0, offset); // And decrement the last one newStopRow[newStopRow.length - 1]--; return newStopRow; } public static int compare(final byte[] array1, final byte[] array2) { if (array2 == null) { if (array1 == null) { return 0; } return -1; } if (array1 == null) { return 1; } for (int i = 0, j = 0; (i < array1.length) && (j < array2.length); i++, j++) { final int a = (array1[i] & 0xff); final int b = (array2[j] & 0xff); if (a != b) { return a - b; } } return array1.length - array2.length; } public static int compareToPrefix(final byte[] array, final byte[] prefix) { if (prefix == null) { if (array == null) { return 0; } return -1; } if (array == null) { return 1; } for (int i = 0, j = 0; (i < array.length) && (j < prefix.length); i++, j++) { final int a = (array[i] & 0xff); final int b = (prefix[j] & 0xff); if (a != b) { return a - b; } } if (prefix.length <= array.length) { return 0; } for (int i = array.length; i < prefix.length; i++) { final int a = (prefix[i] & 0xff); if (a != 0) { return -1; } } return 0; } public static boolean startsWith(final byte[] bytes, final byte[] prefix) { if ((bytes == null) || (prefix == null) || (prefix.length > bytes.length)) { return false; } for (int i = 0; i < prefix.length; i++) { if (bytes[i] != prefix[i]) { return false; } } return true; } public static boolean endsWith(final byte[] bytes, final byte[] suffix) { if ((bytes == null) || (suffix == null) || (suffix.length > bytes.length)) { return false; } final int suffixEnd = suffix.length - 1; final int bytesEnd = bytes.length - 1; for (int i = 0; i < suffix.length; i++) { if (bytes[bytesEnd - i] != suffix[suffixEnd - i]) { return false; } } return true; } public static boolean matchesPrefixRanges(final byte[] bytes, final List ranges) { return ranges.stream().anyMatch(range -> { return (ByteArrayUtils.compareToPrefix(bytes, range.getStart()) >= 0) && (ByteArrayUtils.compareToPrefix(bytes, range.getEnd()) <= 0); }); } public static String getHexString(final byte[] bytes) { final StringBuffer str = new StringBuffer(); for (final byte b : bytes) { str.append(String.format("%02X ", b)); } return str.toString(); } public static ByteArrayRange getSingleRange(final List ranges) { byte[] start = null; byte[] end = null; if (ranges == null) { return null; } for (final ByteArrayRange range : ranges) { if ((start == null) || (ByteArrayUtils.compare(range.getStart(), start) < 0)) { start = range.getStart(); } if ((end == null) || (ByteArrayUtils.compare(range.getEnd(), end) > 0)) { end = range.getEnd(); } } return new ByteArrayRange(start, end); } public static void addAllIntermediaryByteArrays( final List retVal, final ByteArrayRange range) { byte[] start; byte[] end; // they had better not both be null or this method would quickly eat up memory if (range.getStart() == null) { start = new byte[0]; } else { start = range.getStart(); } if (range.getEnd() == null) { // this isn't precisely the end because the actual end is infinite, it'd be far better to set // the start and end but this at least covers the edge case if they're not given end = new byte[] { (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF}; } else { end = range.getEnd(); } byte[] currentRowId = Arrays.copyOf(start, start.length); retVal.add(currentRowId); while (!Arrays.equals(currentRowId, end)) { currentRowId = Arrays.copyOf(currentRowId, currentRowId.length); // increment until we reach the end row ID boolean overflow = !ByteArrayUtils.increment(currentRowId); if (!overflow) { retVal.add(currentRowId); } else { // the increment caused an overflow which shouldn't // ever happen assuming the start row ID is less // than the end row ID LOGGER.warn( "Row IDs overflowed when ingesting data; start of range decomposition must be less than or equal to end of range. This may be because the start of the decomposed range is higher than the end of the range."); overflow = true; break; } } } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/CompoundIndexStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import com.google.common.collect.Collections2; import net.sf.json.JSONException; import net.sf.json.JSONObject; /** * Class that implements a compound index strategy. It combines a PartitionIndexStrategy with a * NumericIndexStrategy to enable the addition of a partitioning strategy to any numeric index * strategy. */ public class CompoundIndexStrategy implements NumericIndexStrategy { private PartitionIndexStrategy subStrategy1; private NumericIndexStrategy subStrategy2; private int defaultMaxDuplication; private int metaDataSplit = -1; public CompoundIndexStrategy( final PartitionIndexStrategy subStrategy1, final NumericIndexStrategy subStrategy2) { this.subStrategy1 = subStrategy1; this.subStrategy2 = subStrategy2; defaultMaxDuplication = (int) Math.ceil(Math.pow(2, getNumberOfDimensions())); } protected CompoundIndexStrategy() {} public PartitionIndexStrategy getPrimarySubStrategy() { return subStrategy1; } public NumericIndexStrategy getSecondarySubStrategy() { return subStrategy2; } @Override public byte[] toBinary() { final byte[] delegateBinary1 = PersistenceUtils.toBinary(subStrategy1); final byte[] delegateBinary2 = PersistenceUtils.toBinary(subStrategy2); final ByteBuffer buf = ByteBuffer.allocate( VarintUtils.unsignedIntByteLength(delegateBinary1.length) + delegateBinary1.length + delegateBinary2.length); VarintUtils.writeUnsignedInt(delegateBinary1.length, buf); buf.put(delegateBinary1); buf.put(delegateBinary2); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int delegateBinary1Length = VarintUtils.readUnsignedInt(buf); final byte[] delegateBinary1 = ByteArrayUtils.safeRead(buf, delegateBinary1Length); final byte[] delegateBinary2 = new byte[buf.remaining()]; buf.get(delegateBinary2); subStrategy1 = (PartitionIndexStrategy) PersistenceUtils.fromBinary(delegateBinary1); subStrategy2 = (NumericIndexStrategy) PersistenceUtils.fromBinary(delegateBinary2); defaultMaxDuplication = (int) Math.ceil(Math.pow(2, getNumberOfDimensions())); } /** * Get the total number of dimensions from all sub-strategies * * @return the number of dimensions */ public int getNumberOfDimensions() { final NumericDimensionDefinition[] dimensions = subStrategy2.getOrderedDimensionDefinitions(); if (dimensions == null) { return 0; } return dimensions.length; } @Override public QueryRanges getQueryRanges( final MultiDimensionalNumericData indexedRange, final IndexMetaData... hints) { return getQueryRanges(indexedRange, -1, hints); } @Override public QueryRanges getQueryRanges( final MultiDimensionalNumericData indexedRange, final int maxEstimatedRangeDecomposition, final IndexMetaData... hints) { final byte[][] partitionIds = subStrategy1.getQueryPartitionKeys(indexedRange, extractHints(hints, 0)); final QueryRanges queryRanges = subStrategy2.getQueryRanges( indexedRange, maxEstimatedRangeDecomposition, extractHints(hints, 1)); return new QueryRanges(partitionIds, queryRanges); } @Override public InsertionIds getInsertionIds(final MultiDimensionalNumericData indexedData) { return getInsertionIds(indexedData, defaultMaxDuplication); } @Override public InsertionIds getInsertionIds( final MultiDimensionalNumericData indexedData, final int maxEstimatedDuplicateIds) { final byte[][] partitionKeys = subStrategy1.getInsertionPartitionKeys(indexedData); final InsertionIds insertionIds = subStrategy2.getInsertionIds(indexedData, maxEstimatedDuplicateIds); final boolean partitionKeysEmpty = (partitionKeys == null) || (partitionKeys.length == 0); if ((insertionIds == null) || (insertionIds.getPartitionKeys() == null) || insertionIds.getPartitionKeys().isEmpty()) { if (partitionKeysEmpty) { return new InsertionIds(); } else { return new InsertionIds( Arrays.stream(partitionKeys).map( input -> new SinglePartitionInsertionIds(input)).collect(Collectors.toList())); } } else if (partitionKeysEmpty) { return insertionIds; } else { final List permutations = new ArrayList<>(insertionIds.getPartitionKeys().size() * partitionKeys.length); for (final byte[] partitionKey : partitionKeys) { permutations.addAll(Collections2.transform(insertionIds.getPartitionKeys(), input -> { if (input.getPartitionKey() != null) { return new SinglePartitionInsertionIds( ByteArrayUtils.combineArrays(partitionKey, input.getPartitionKey()), input.getSortKeys()); } else { return new SinglePartitionInsertionIds(partitionKey, input.getSortKeys()); } })); } return new InsertionIds(permutations); } } @Override public MultiDimensionalNumericData getRangeForId( final byte[] partitionKey, final byte[] sortKey) { return subStrategy2.getRangeForId(trimPartitionIdForSortStrategy(partitionKey), sortKey); } @Override public MultiDimensionalCoordinates getCoordinatesPerDimension( final byte[] partitionKey, final byte[] sortKey) { return subStrategy2.getCoordinatesPerDimension( trimPartitionIdForSortStrategy(partitionKey), sortKey); } private byte[] trimPartitionIdForSortStrategy(final byte[] partitionKey) { final byte[] trimmedKey = trimPartitionForSubstrategy(subStrategy1.getPartitionKeyLength(), false, partitionKey); return trimmedKey == null ? partitionKey : trimmedKey; } @Override public NumericDimensionDefinition[] getOrderedDimensionDefinitions() { return subStrategy2.getOrderedDimensionDefinitions(); } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((subStrategy1 == null) ? 0 : subStrategy1.hashCode()); result = (prime * result) + ((subStrategy2 == null) ? 0 : subStrategy2.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final CompoundIndexStrategy other = (CompoundIndexStrategy) obj; if (subStrategy1 == null) { if (other.subStrategy1 != null) { return false; } } else if (!subStrategy1.equals(other.subStrategy1)) { return false; } if (subStrategy2 == null) { if (other.subStrategy2 != null) { return false; } } else if (!subStrategy2.equals(other.subStrategy2)) { return false; } return true; } @Override public String getId() { return StringUtils.intToString(hashCode()); } @Override public double[] getHighestPrecisionIdRangePerDimension() { return subStrategy2.getHighestPrecisionIdRangePerDimension(); } @Override public int getPartitionKeyLength() { return subStrategy1.getPartitionKeyLength() + subStrategy2.getPartitionKeyLength(); } @Override public List createMetaData() { final List result = new ArrayList<>(); for (final IndexMetaData metaData : (List) subStrategy1.createMetaData()) { result.add( new CompoundIndexMetaDataWrapper( metaData, subStrategy1.getPartitionKeyLength(), (byte) 0)); } metaDataSplit = result.size(); for (final IndexMetaData metaData : subStrategy2.createMetaData()) { result.add( new CompoundIndexMetaDataWrapper( metaData, subStrategy1.getPartitionKeyLength(), (byte) 1)); } return result; } private int getMetaDataSplit() { if (metaDataSplit == -1) { metaDataSplit = subStrategy1.createMetaData().size(); } return metaDataSplit; } private IndexMetaData[] extractHints(final IndexMetaData[] hints, final int indexNo) { if ((hints == null) || (hints.length == 0)) { return hints; } final int splitPoint = getMetaDataSplit(); final int start = (indexNo == 0) ? 0 : splitPoint; final int stop = (indexNo == 0) ? splitPoint : hints.length; final IndexMetaData[] result = new IndexMetaData[stop - start]; int p = 0; for (int i = start; i < stop; i++) { result[p++] = ((CompoundIndexMetaDataWrapper) hints[i]).metaData; } return result; } /** * Delegate Metadata item for an underlying index. For CompoundIndexStrategy, this delegate wraps * the meta data for one of the two indices. The primary function of this class is to extract out * the parts of the ByteArrayId that are specific to each index during an 'update' operation. */ protected static class CompoundIndexMetaDataWrapper implements IndexMetaData { private IndexMetaData metaData; private int partition1Length; private byte index; protected CompoundIndexMetaDataWrapper() {} public CompoundIndexMetaDataWrapper( final IndexMetaData metaData, final int partition1Length, final byte index) { super(); this.partition1Length = partition1Length; this.metaData = metaData; this.index = index; } @Override public byte[] toBinary() { final byte[] metaBytes = PersistenceUtils.toBinary(metaData); final int length = metaBytes.length + VarintUtils.unsignedIntByteLength(metaBytes.length) + 1 + VarintUtils.unsignedIntByteLength(partition1Length); final ByteBuffer buf = ByteBuffer.allocate(length); VarintUtils.writeUnsignedInt(metaBytes.length, buf); buf.put(metaBytes); buf.put(index); VarintUtils.writeUnsignedInt(partition1Length, buf); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int metaBytesLength = VarintUtils.readUnsignedInt(buf); final byte[] metaBytes = new byte[metaBytesLength]; buf.get(metaBytes); metaData = (IndexMetaData) PersistenceUtils.fromBinary(metaBytes); index = buf.get(); partition1Length = VarintUtils.readUnsignedInt(buf); } @Override public void merge(final Mergeable merge) { if (merge instanceof CompoundIndexMetaDataWrapper) { final CompoundIndexMetaDataWrapper compound = (CompoundIndexMetaDataWrapper) merge; metaData.merge(compound.metaData); } } @Override public void insertionIdsAdded(final InsertionIds insertionIds) { metaData.insertionIdsAdded(trimPartitionForSubstrategy(insertionIds)); } private InsertionIds trimPartitionForSubstrategy(final InsertionIds insertionIds) { final List retVal = new ArrayList<>(); for (final SinglePartitionInsertionIds partitionIds : insertionIds.getPartitionKeys()) { final byte[] trimmedPartitionId = CompoundIndexStrategy.trimPartitionForSubstrategy( partition1Length, index == 0, partitionIds.getPartitionKey()); if (trimmedPartitionId == null) { return insertionIds; } else { retVal.add( new SinglePartitionInsertionIds(trimmedPartitionId, partitionIds.getSortKeys())); } } return new InsertionIds(retVal); } @Override public void insertionIdsRemoved(final InsertionIds insertionIds) { metaData.insertionIdsRemoved(trimPartitionForSubstrategy(insertionIds)); } /** Convert Tiered Index Metadata statistics to a JSON object */ @Override public JSONObject toJSONObject() throws JSONException { final JSONObject jo = new JSONObject(); jo.put("type", "CompoundIndexMetaDataWrapper"); jo.put("index", index); return jo; } } /** * @param partition1Length the length of the partition key contributed by the first substrategy * @param isFirstSubstrategy if the trimming is for the first substrategy * @param compoundPartitionId the compound partition id * @return if the partition id requires trimming, the new trimmed key will be returned, otherwise * if trimming isn't necessary it returns null */ private static byte[] trimPartitionForSubstrategy( final int partition1Length, final boolean isFirstSubstrategy, final byte[] compoundPartitionId) { if ((partition1Length > 0) && ((compoundPartitionId.length - partition1Length) > 0)) { if (isFirstSubstrategy) { return Arrays.copyOfRange(compoundPartitionId, 0, partition1Length); } else { return Arrays.copyOfRange( compoundPartitionId, partition1Length, compoundPartitionId.length); } } return null; } @Override public MultiDimensionalCoordinateRanges[] getCoordinateRangesPerDimension( final MultiDimensionalNumericData dataRange, final IndexMetaData... hints) { return subStrategy2.getCoordinateRangesPerDimension(dataRange, hints); } @Override public byte[][] getInsertionPartitionKeys(final MultiDimensionalNumericData insertionData) { final byte[][] partitionKeys1 = subStrategy1.getInsertionPartitionKeys(insertionData); final byte[][] partitionKeys2 = subStrategy2.getInsertionPartitionKeys(insertionData); if ((partitionKeys1 == null) || (partitionKeys1.length == 0)) { return partitionKeys2; } if ((partitionKeys2 == null) || (partitionKeys2.length == 0)) { return partitionKeys1; } // return permutations final byte[][] partitionKeys = new byte[partitionKeys1.length * partitionKeys2.length][]; int i = 0; for (final byte[] partitionKey1 : partitionKeys1) { for (final byte[] partitionKey2 : partitionKeys2) { partitionKeys[i++] = ByteArrayUtils.combineArrays(partitionKey1, partitionKey2); } } return partitionKeys; } @Override public byte[][] getQueryPartitionKeys( final MultiDimensionalNumericData queryData, final IndexMetaData... hints) { final byte[][] partitionKeys1 = subStrategy1.getQueryPartitionKeys(queryData, hints); final byte[][] partitionKeys2 = subStrategy2.getQueryPartitionKeys(queryData, hints); if ((partitionKeys1 == null) || (partitionKeys1.length == 0)) { return partitionKeys2; } if ((partitionKeys2 == null) || (partitionKeys2.length == 0)) { return partitionKeys1; } // return all permutations of partitionKeys final byte[][] partitionKeys = new byte[partitionKeys1.length * partitionKeys2.length][]; int i = 0; for (final byte[] partitionKey1 : partitionKeys1) { for (final byte[] partitionKey2 : partitionKeys2) { partitionKeys[i++] = ByteArrayUtils.combineArrays(partitionKey1, partitionKey2); } } return partitionKeys; } @Override public byte[][] getPredefinedSplits() { return subStrategy1.getPredefinedSplits(); } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/Coordinate.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import java.util.Arrays; public class Coordinate { private long coordinate; private byte[] binId; protected Coordinate() {} public Coordinate(final long coordinate, final byte[] binId) { this.coordinate = coordinate; this.binId = binId; } public long getCoordinate() { return coordinate; } public void setCoordinate(final long coordinate) { this.coordinate = coordinate; } public byte[] getBinId() { return binId; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + Arrays.hashCode(binId); result = (prime * result) + (int) (coordinate ^ (coordinate >>> 32)); result = (prime * result) + Arrays.hashCode(binId); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final Coordinate other = (Coordinate) obj; if (!Arrays.equals(binId, other.binId)) { return false; } if (coordinate != other.coordinate) { return false; } return true; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/CoordinateRange.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import java.nio.ByteBuffer; import java.util.Arrays; import org.locationtech.geowave.core.index.persist.Persistable; public class CoordinateRange implements Persistable { private long minCoordinate; private long maxCoordinate; private byte[] binId; protected CoordinateRange() {} public CoordinateRange(final long minCoordinate, final long maxCoordinate, final byte[] binId) { this.minCoordinate = minCoordinate; this.maxCoordinate = maxCoordinate; this.binId = binId; } public long getMinCoordinate() { return minCoordinate; } public long getMaxCoordinate() { return maxCoordinate; } public byte[] getBinId() { return binId; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + Arrays.hashCode(binId); result = (prime * result) + (int) (maxCoordinate ^ (maxCoordinate >>> 32)); result = (prime * result) + (int) (minCoordinate ^ (minCoordinate >>> 32)); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final CoordinateRange other = (CoordinateRange) obj; if (!Arrays.equals(binId, other.binId)) { return false; } if (maxCoordinate != other.maxCoordinate) { return false; } if (minCoordinate != other.minCoordinate) { return false; } return true; } @Override public byte[] toBinary() { final ByteBuffer buf = ByteBuffer.allocate( VarintUtils.unsignedLongByteLength(minCoordinate) + VarintUtils.unsignedLongByteLength(maxCoordinate) + (binId == null ? 0 : binId.length)); VarintUtils.writeUnsignedLong(minCoordinate, buf); VarintUtils.writeUnsignedLong(maxCoordinate, buf); if (binId != null) { buf.put(binId); } return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); minCoordinate = VarintUtils.readUnsignedLong(buf); maxCoordinate = VarintUtils.readUnsignedLong(buf); if (buf.remaining() > 0) { binId = new byte[buf.remaining()]; buf.get(binId); } else { binId = null; } } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/CustomIndexStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import java.util.function.BiPredicate; import org.locationtech.geowave.core.index.persist.Persistable; /** * This interface is the most straightforward mechanism to add custom indexing of any arbitrary * logic to a GeoWave data store. This can simply be two functions that tell GeoWave how to index an * entry on ingest and how to query the index based on a custom constraints type. * * @param The entry type (such as SimpleFeature, GridCoverage, or whatever type the adapter * uses) * @param The custom constraints type, can be any arbitrary type, although should be persistable * so that it can work outside of just client code (such as server-side filtering, * map-reduce, or spark) */ public interface CustomIndexStrategy extends Persistable { /** * This is the function that is called on ingest to tell GeoWave how to index the entry within * this custom index - the insertion IDs are a set of partition and sort keys, either of which * could be empty or null as needed (with the understanding that each partition key represents a * unique partition in the backend datastore) * * @param entry the entry to be indexed on ingest * @return the insertion IDs representing how to index the entry */ InsertionIds getInsertionIds(E entry); /** * This is the function that is called on query, when given a query with the constraints type. The * constraints type can be any arbitrary type although should be persistable so that it can work * outside of just client code (such as server-side filtering, map-reduce, or spark). * * The query ranges are a set of partition keys and ranges of sort keys that fully include all * rows that may match the constraints. * * @param constraints the query constraints * @return query ranges that represent valid partition and ranges of sort keys that fully include * all rows that may match the constraints */ QueryRanges getQueryRanges(C constraints); Class getConstraintsClass(); /** * Optionally a custom index strategy can enable additional filtering beyond just the query ranges * (termed "fine-grained" filtering in documentation). This requires reading rows from disk and * evaluating a predicate so it is inherently slower than using query ranges but it is flexible * enough to handle any additional evaluation criteria required. * * @return A predicate that should be used for "fine-grained" filter evaluation */ default PersistableBiPredicate getFilter(final C constraints) { return null; } public static interface PersistableBiPredicate extends BiPredicate, Persistable { } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/FloatCompareUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; /** Convenience methods for comparing floating point values. */ public class FloatCompareUtils { public static final double COMP_EPSILON = 2.22E-16; /** * The == operator is not reliable for doubles, so we are using this method to check if two * doubles are equal * * @param x * @param y * @return true if the double are equal, false if they are not */ public static boolean checkDoublesEqual(final double x, final double y) { return checkDoublesEqual(x, y, COMP_EPSILON); } /** * The == operator is not reliable for doubles, so we are using this method to check if two * doubles are equal * * @param x * @param y * @param epsilon * @return true if the double are equal, false if they are not */ public static boolean checkDoublesEqual(final double x, final double y, final double epsilon) { boolean xNeg = false; boolean yNeg = false; final double diff = (Math.abs(x) - Math.abs(y)); if (x < 0.0) { xNeg = true; } if (y < 0.0) { yNeg = true; } return ((diff <= epsilon) && (diff >= -epsilon) && (xNeg == yNeg)); } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/GeoWaveSerializationException.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; public class GeoWaveSerializationException extends RuntimeException { private static final long serialVersionUID = 7302723488358974170L; public GeoWaveSerializationException(final String message) { super(message); } public GeoWaveSerializationException(final Throwable cause) { super(cause); } public GeoWaveSerializationException(final String message, final Throwable cause) { super(message, cause); } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/HierarchicalNumericIndexStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import java.util.Arrays; /** * This interface defines a multi-tiered approach to indexing, in which a single strategy is reliant * on a set of sub-strategies */ public interface HierarchicalNumericIndexStrategy extends NumericIndexStrategy { public SubStrategy[] getSubStrategies(); public static class SubStrategy { private final NumericIndexStrategy indexStrategy; private final byte[] prefix; public SubStrategy(final NumericIndexStrategy indexStrategy, final byte[] prefix) { this.indexStrategy = indexStrategy; this.prefix = prefix; } public NumericIndexStrategy getIndexStrategy() { return indexStrategy; } public byte[] getPrefix() { return prefix; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((indexStrategy == null) ? 0 : indexStrategy.hashCode()); result = (prime * result) + Arrays.hashCode(prefix); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final SubStrategy other = (SubStrategy) obj; if (indexStrategy == null) { if (other.indexStrategy != null) { return false; } } else if (!indexStrategy.equals(other.indexStrategy)) { return false; } if (!Arrays.equals(prefix, other.prefix)) { return false; } return true; } } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/IndexConstraints.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; public interface IndexConstraints { public int getDimensionCount(); /** * Unconstrained? * * @return return if unconstrained on a dimension */ public boolean isEmpty(); } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/IndexData.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import org.locationtech.geowave.core.index.persist.Persistable; /** * Represents a set of index data. */ public interface IndexData extends java.io.Serializable, Persistable { public T getMin(); public T getMax(); public boolean isMinInclusive(); public boolean isMaxInclusive(); public T getCentroid(); public boolean isRange(); } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/IndexDimensionHint.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; /** * Provides a hint on an adapter field to tell GeoWave that the field should be used for a * particular type of index field. */ public class IndexDimensionHint { private final String hint; public IndexDimensionHint(final String hint) { this.hint = hint; } public String getHintString() { return hint; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (!(obj instanceof IndexDimensionHint)) { return false; } return hint.equals(((IndexDimensionHint) obj).hint); } @Override public int hashCode() { return hint.hashCode(); } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/IndexMetaData.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import net.sf.json.JSONException; import net.sf.json.JSONObject; public interface IndexMetaData extends Mergeable { /** * Update the aggregation result using the new entry provided * * @param insertionIds the new indices to compute an updated aggregation result on */ public void insertionIdsAdded(InsertionIds insertionIds); /** * Update the aggregation result by removing the entries provided * * @param insertionIds the new indices to compute an updated aggregation result on */ public void insertionIdsRemoved(InsertionIds insertionIds); /** Create a JSON object that shows all the metadata handled by this object */ public JSONObject toJSONObject() throws JSONException; } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/IndexPersistableRegistry.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import org.locationtech.geowave.core.index.CompoundIndexStrategy.CompoundIndexMetaDataWrapper; import org.locationtech.geowave.core.index.MultiDimensionalCoordinateRangesArray.ArrayOfArrays; import org.locationtech.geowave.core.index.dimension.BasicDimensionDefinition; import org.locationtech.geowave.core.index.dimension.UnboundedDimensionDefinition; import org.locationtech.geowave.core.index.dimension.bin.BasicBinningStrategy; import org.locationtech.geowave.core.index.numeric.BasicNumericDataset; import org.locationtech.geowave.core.index.numeric.BinnedNumericDataset; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.index.numeric.NumericValue; import org.locationtech.geowave.core.index.persist.InternalPersistableRegistry; import org.locationtech.geowave.core.index.persist.PersistableList; import org.locationtech.geowave.core.index.persist.PersistableRegistrySpi; import org.locationtech.geowave.core.index.sfc.BasicSFCIndexStrategy; import org.locationtech.geowave.core.index.sfc.SFCDimensionDefinition; import org.locationtech.geowave.core.index.sfc.hilbert.HilbertSFC; import org.locationtech.geowave.core.index.sfc.tiered.SingleTierSubStrategy; import org.locationtech.geowave.core.index.sfc.tiered.TieredSFCIndexStrategy; import org.locationtech.geowave.core.index.sfc.tiered.TieredSFCIndexStrategy.TierIndexMetaData; import org.locationtech.geowave.core.index.sfc.xz.XZHierarchicalIndexStrategy; import org.locationtech.geowave.core.index.sfc.xz.XZHierarchicalIndexStrategy.XZHierarchicalIndexMetaData; import org.locationtech.geowave.core.index.sfc.xz.XZOrderSFC; import org.locationtech.geowave.core.index.sfc.zorder.ZOrderSFC; import org.locationtech.geowave.core.index.simple.HashKeyIndexStrategy; import org.locationtech.geowave.core.index.simple.RoundRobinKeyIndexStrategy; import org.locationtech.geowave.core.index.simple.SimpleByteIndexStrategy; import org.locationtech.geowave.core.index.simple.SimpleDoubleIndexStrategy; import org.locationtech.geowave.core.index.simple.SimpleFloatIndexStrategy; import org.locationtech.geowave.core.index.simple.SimpleIntegerIndexStrategy; import org.locationtech.geowave.core.index.simple.SimpleLongIndexStrategy; import org.locationtech.geowave.core.index.simple.SimpleShortIndexStrategy; import org.locationtech.geowave.core.index.text.BasicTextDataset; import org.locationtech.geowave.core.index.text.EnumIndexStrategy; import org.locationtech.geowave.core.index.text.EnumSearch; import org.locationtech.geowave.core.index.text.ExplicitTextSearch; import org.locationtech.geowave.core.index.text.TextIndexStrategy; import org.locationtech.geowave.core.index.text.TextRange; import org.locationtech.geowave.core.index.text.TextSearch; import org.locationtech.geowave.core.index.text.TextSearchPredicate; import org.locationtech.geowave.core.index.text.TextValue; public class IndexPersistableRegistry implements PersistableRegistrySpi, InternalPersistableRegistry { @Override public PersistableIdAndConstructor[] getSupportedPersistables() { return new PersistableIdAndConstructor[] { new PersistableIdAndConstructor((short) 100, CompoundIndexMetaDataWrapper::new), new PersistableIdAndConstructor((short) 101, TierIndexMetaData::new), new PersistableIdAndConstructor((short) 102, CompoundIndexStrategy::new), new PersistableIdAndConstructor((short) 103, CoordinateRange::new), new PersistableIdAndConstructor((short) 104, MultiDimensionalCoordinateRanges::new), new PersistableIdAndConstructor((short) 105, ArrayOfArrays::new), new PersistableIdAndConstructor((short) 106, MultiDimensionalCoordinateRangesArray::new), new PersistableIdAndConstructor((short) 107, NullNumericIndexStrategy::new), new PersistableIdAndConstructor((short) 108, NumericIndexStrategyWrapper::new), new PersistableIdAndConstructor((short) 109, BasicDimensionDefinition::new), new PersistableIdAndConstructor((short) 110, UnboundedDimensionDefinition::new), new PersistableIdAndConstructor((short) 111, SFCDimensionDefinition::new), new PersistableIdAndConstructor((short) 112, BasicNumericDataset::new), new PersistableIdAndConstructor((short) 113, BinnedNumericDataset::new), new PersistableIdAndConstructor((short) 114, NumericRange::new), new PersistableIdAndConstructor((short) 115, NumericValue::new), new PersistableIdAndConstructor((short) 116, HilbertSFC::new), new PersistableIdAndConstructor((short) 117, SingleTierSubStrategy::new), new PersistableIdAndConstructor((short) 118, TieredSFCIndexStrategy::new), new PersistableIdAndConstructor((short) 119, XZHierarchicalIndexStrategy::new), new PersistableIdAndConstructor((short) 120, XZOrderSFC::new), new PersistableIdAndConstructor((short) 121, ZOrderSFC::new), new PersistableIdAndConstructor((short) 122, HashKeyIndexStrategy::new), new PersistableIdAndConstructor((short) 123, RoundRobinKeyIndexStrategy::new), new PersistableIdAndConstructor((short) 124, SimpleIntegerIndexStrategy::new), new PersistableIdAndConstructor((short) 125, SimpleLongIndexStrategy::new), new PersistableIdAndConstructor((short) 126, SimpleShortIndexStrategy::new), new PersistableIdAndConstructor((short) 127, XZHierarchicalIndexMetaData::new), new PersistableIdAndConstructor((short) 128, InsertionIds::new), new PersistableIdAndConstructor((short) 129, PartitionIndexStrategyWrapper::new), new PersistableIdAndConstructor((short) 130, SinglePartitionInsertionIds::new), new PersistableIdAndConstructor((short) 131, SimpleFloatIndexStrategy::new), new PersistableIdAndConstructor((short) 132, SimpleDoubleIndexStrategy::new), new PersistableIdAndConstructor((short) 133, SimpleByteIndexStrategy::new), new PersistableIdAndConstructor((short) 134, BasicSFCIndexStrategy::new), new PersistableIdAndConstructor((short) 135, TextSearch::new), new PersistableIdAndConstructor((short) 136, TextSearchPredicate::new), new PersistableIdAndConstructor((short) 137, TextIndexStrategy::new), new PersistableIdAndConstructor((short) 138, EnumIndexStrategy::new), new PersistableIdAndConstructor((short) 139, EnumSearch::new), new PersistableIdAndConstructor((short) 140, BasicBinningStrategy::new), new PersistableIdAndConstructor((short) 141, BasicTextDataset::new), new PersistableIdAndConstructor((short) 142, TextRange::new), new PersistableIdAndConstructor((short) 143, TextValue::new), new PersistableIdAndConstructor((short) 144, ExplicitTextSearch::new), new PersistableIdAndConstructor((short) 145, PersistableList::new)}; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/IndexStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import java.util.List; import org.locationtech.geowave.core.index.persist.Persistable; /** Interface which defines an index strategy. */ public interface IndexStrategy extends Persistable { public List createMetaData(); /** @return a unique ID associated with the index strategy */ public String getId(); } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/IndexUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import java.util.Arrays; import java.util.List; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.dimension.bin.BinRange; import org.locationtech.geowave.core.index.lexicoder.NumberLexicoder; import org.locationtech.geowave.core.index.numeric.BasicNumericDataset; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.index.numeric.NumericValue; import org.locationtech.geowave.core.index.simple.SimpleNumericIndexStrategy; public class IndexUtils { public static MultiDimensionalNumericData getFullBounds( final NumericIndexStrategy indexStrategy) { return getFullBounds(indexStrategy.getOrderedDimensionDefinitions()); } public static MultiDimensionalNumericData clampAtIndexBounds( final MultiDimensionalNumericData data, final NumericIndexStrategy indexStrategy) { final NumericDimensionDefinition[] dimensions = indexStrategy.getOrderedDimensionDefinitions(); final NumericData[] dataPerDimension = data.getDataPerDimension(); boolean clamped = false; for (int d = 0; d < dimensions.length; d++) { final NumericRange dimensionBounds = dimensions[d].getBounds(); if (dataPerDimension[d].isRange()) { boolean dimensionClamped = false; double min, max; if (dataPerDimension[d].getMin() < dimensionBounds.getMin()) { min = dimensionBounds.getMin(); dimensionClamped = true; } else { min = dataPerDimension[d].getMin(); } if (dataPerDimension[d].getMax() > dimensionBounds.getMax()) { max = dimensionBounds.getMax(); dimensionClamped = true; } else { max = dataPerDimension[d].getMax(); } if (dimensionClamped) { dataPerDimension[d] = new NumericRange(min, max); clamped = true; } } else if ((dataPerDimension[d].getMin() < dimensionBounds.getMin()) || (dataPerDimension[d].getMin() > dimensionBounds.getMax())) { dataPerDimension[d] = new NumericValue( Math.max( Math.min(dataPerDimension[d].getMin(), dimensionBounds.getMax()), dimensionBounds.getMin())); clamped = true; } } if (clamped) { return new BasicNumericDataset(dataPerDimension); } return data; } /** * Constraints that are empty indicate full table scan. A full table scan occurs if ANY one * dimension is unbounded. * * @param constraints * @return true if any one dimension is unbounded */ public static final boolean isFullTableScan(final List constraints) { for (final MultiDimensionalNumericData constraint : constraints) { if (constraint.isEmpty()) { return false; } } return constraints.isEmpty(); } public static MultiDimensionalNumericData getFullBounds( final NumericDimensionDefinition[] dimensionDefinitions) { final NumericRange[] boundsPerDimension = new NumericRange[dimensionDefinitions.length]; for (int d = 0; d < dimensionDefinitions.length; d++) { boundsPerDimension[d] = dimensionDefinitions[d].getBounds(); } return new BasicNumericDataset(boundsPerDimension); } public static final double getDimensionalBitsUsed( final NumericIndexStrategy indexStrategy, final double[] dataRangePerDimension) { double result = Long.MAX_VALUE; if (dataRangePerDimension.length == 0) { return 0; } final double cellRangePerDimension[] = indexStrategy.getHighestPrecisionIdRangePerDimension(); final double inflatedRangePerDimension[] = inflateRange(cellRangePerDimension, dataRangePerDimension); final double bitsPerDimension[] = getBitsPerDimension(indexStrategy, cellRangePerDimension); final BinRange[][] binsPerDimension = getBinsPerDimension(indexStrategy, inflatedRangePerDimension); final double[][] bitsFromTheRightPerDimension = getBitsFromTheRightPerDimension(binsPerDimension, cellRangePerDimension); // This ALWAYS chooses the index who dimension // cells cover the widest range thus fewest cells. In temporal, YEAR is // always chosen. // However, this is not necessarily bad. A smaller bin size may result // in more bins searched. // When searching across multiple bins associated with a dimension, The // first and last bin are // partial searches. The inner bins are 'full' scans over the bin. // Thus, smaller bin sizes could result more in more rows scanned. // On the flip, fewer larger less-granular bins can also have the same // result. // Bottom line: this is not straight forward // Example: YEAR // d[ 3600000.0] // cellRangePerDimension[30157.470702171326] // inflatedRangePerDimension[3618896.484260559] // bitsFromTheRightPerDimension[6.906890595608519]] // Example: DAY // cellRangePerDimension[ 2554.3212881088257] // inflatedRangePerDimension[ 3601593.016233444] // bitsFromTheRightPerDimension[ 10.461479447286157]] for (final double[] binnedBitsPerFromTheRightDimension : bitsFromTheRightPerDimension) { for (int d = 0; d < binnedBitsPerFromTheRightDimension.length; d++) { final double totalBitsUsed = (bitsPerDimension[d] - binnedBitsPerFromTheRightDimension[d]); if (totalBitsUsed < 0) { return 0; } result = Math.min(totalBitsUsed, result); } } // The least constraining dimension uses the least amount of bits of // fixed bits from the left. // For example, half of the world latitude is 1 bit, 1/4 of the world is // 2 bits etc. // Use the least constraining dimension, but multiply by the // # of dimensions. return Math.ceil(result + 1) * cellRangePerDimension.length; } public static double[] inflateRange( final double[] cellRangePerDimension, final double[] dataRangePerDimension) { final double[] result = new double[cellRangePerDimension.length]; for (int d = 0; d < result.length; d++) { result[d] = Math.ceil(dataRangePerDimension[d] / cellRangePerDimension[d]) * cellRangePerDimension[d]; } return result; } public static double[][] getBitsFromTheRightPerDimension( final BinRange[][] binsPerDimension, final double[] cellRangePerDimension) { int numBinnedQueries = 1; for (int d = 0; d < binsPerDimension.length; d++) { numBinnedQueries *= binsPerDimension[d].length; } // now we need to combine all permutations of bin ranges into // BinnedQuery objects final double[][] binnedQueries = new double[numBinnedQueries][]; for (int d = 0; d < binsPerDimension.length; d++) { for (int b = 0; b < binsPerDimension[d].length; b++) { for (int i = b; i < numBinnedQueries; i += binsPerDimension[d].length) { if (binnedQueries[i] == null) { binnedQueries[i] = new double[binsPerDimension.length]; } if ((binsPerDimension[d][b].getNormalizedMax() - binsPerDimension[d][b].getNormalizedMin()) <= 0.000000001) { binnedQueries[i][d] = 0; } else { binnedQueries[i][d] = log2( Math.ceil( (binsPerDimension[d][b].getNormalizedMax() - binsPerDimension[d][b].getNormalizedMin()) / cellRangePerDimension[d])); } } } } return binnedQueries; } public static int getBitPositionOnSortKeyFromSubsamplingArray( final NumericIndexStrategy indexStrategy, final double[] maxResolutionSubsamplingPerDimension) { if (indexStrategy instanceof SimpleNumericIndexStrategy) { final NumberLexicoder lexicoder = ((SimpleNumericIndexStrategy) indexStrategy).getLexicoder(); // this may not work on floating point values // pre-scale to minimize floating point round-off errors final double minScaled = lexicoder.getMinimumValue().doubleValue() / maxResolutionSubsamplingPerDimension[0]; final double maxScaled = lexicoder.getMaximumValue().doubleValue() / maxResolutionSubsamplingPerDimension[0]; return (int) Math.round(Math.ceil(log2(maxScaled - minScaled))); } return (int) Math.ceil( getDimensionalBitsUsed(indexStrategy, maxResolutionSubsamplingPerDimension)); } public static int getBitPositionFromSubsamplingArray( final NumericIndexStrategy indexStrategy, final double[] maxResolutionSubsamplingPerDimension) { return getBitPositionOnSortKeyFromSubsamplingArray( indexStrategy, maxResolutionSubsamplingPerDimension) + (8 * indexStrategy.getPartitionKeyLength()); } public static byte[] getNextRowForSkip(final byte[] row, final int bitPosition) { if (bitPosition <= 0) { return new byte[0]; } // Calculate the number of full bytes affected by the bit position int numBytes = (bitPosition + 1) / 8; // Calculate the number of bits used in the last byte final int extraBits = (bitPosition + 1) % 8; // If there was a remainder, add 1 to the number of bytes final boolean isRemainder = extraBits > 0; if (isRemainder) { numBytes++; } // Copy affected bytes final byte[] rowCopy = Arrays.copyOf(row, numBytes); final int lastByte = rowCopy.length - 1; // Turn on all bits after the bit position if (isRemainder) { rowCopy[lastByte] |= 0xFF >> (extraBits); } // Increment the bit represented by the bit position for (int i = lastByte; i >= 0; i--) { rowCopy[i]++; if (rowCopy[i] != 0) { // Turn on all bits after the bit position if (isRemainder) { rowCopy[lastByte] |= 0xFF >> (extraBits); } return rowCopy; } } return null; } private static final double[] getBitsPerDimension( final NumericIndexStrategy indexStrategy, final double[] rangePerDimension) { final NumericDimensionDefinition dim[] = indexStrategy.getOrderedDimensionDefinitions(); final double result[] = new double[rangePerDimension.length]; for (int d = 0; d < rangePerDimension.length; d++) { result[d] += Math.round(log2((dim[d].getRange() / rangePerDimension[d]))); } return result; } private static final BinRange[][] getBinsPerDimension( final NumericIndexStrategy indexStrategy, final double[] rangePerDimension) { final NumericDimensionDefinition dim[] = indexStrategy.getOrderedDimensionDefinitions(); final BinRange[][] result = new BinRange[rangePerDimension.length][]; for (int d = 0; d < rangePerDimension.length; d++) { final BinRange[] ranges = dim[d].getNormalizedRanges(new NumericRange(0, rangePerDimension[d])); result[d] = ranges; } return result; } private static double log2(final double v) { return Math.log(v) / Math.log(2); } public static byte[][] getQueryPartitionKeys( final NumericIndexStrategy strategy, final MultiDimensionalNumericData queryData, final IndexMetaData... hints) { final QueryRanges queryRanges = strategy.getQueryRanges(queryData, hints); return queryRanges.getPartitionQueryRanges().stream().map( input -> input.getPartitionKey()).toArray(i -> new byte[i][]); } public static byte[][] getInsertionPartitionKeys( final NumericIndexStrategy strategy, final MultiDimensionalNumericData insertionData) { final InsertionIds insertionIds = strategy.getInsertionIds(insertionData); return insertionIds.getPartitionKeys().stream().map(input -> input.getPartitionKey()).toArray( i -> new byte[i][]); } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/InsertionIds.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; import org.locationtech.geowave.core.index.persist.Persistable; import com.google.common.base.Function; import com.google.common.collect.Collections2; public class InsertionIds implements Persistable { private Collection partitionKeys; private List compositeInsertionIds; private int size = -1; public InsertionIds() { partitionKeys = new ArrayList<>(); } public InsertionIds(final List sortKeys) { this(new SinglePartitionInsertionIds(null, sortKeys)); } public InsertionIds(final byte[] partitionKey) { this(new SinglePartitionInsertionIds(partitionKey)); } public InsertionIds(final byte[] partitionKey, final List sortKeys) { this(new SinglePartitionInsertionIds(partitionKey, sortKeys)); } public InsertionIds(final SinglePartitionInsertionIds singePartitionKey) { this(Arrays.asList(singePartitionKey)); } public InsertionIds(final Collection partitionKeys) { this.partitionKeys = partitionKeys; } public Collection getPartitionKeys() { return partitionKeys; } public boolean isEmpty() { if (compositeInsertionIds != null) { return compositeInsertionIds.isEmpty(); } if ((partitionKeys == null) || partitionKeys.isEmpty()) { return true; } return false; } public boolean hasDuplicates() { if (compositeInsertionIds != null) { return compositeInsertionIds.size() >= 2; } if ((partitionKeys == null) || partitionKeys.isEmpty()) { return false; } if (partitionKeys.size() > 1) { return true; } final SinglePartitionInsertionIds partition = partitionKeys.iterator().next(); if ((partition.getSortKeys() == null) || (partition.getSortKeys().size() <= 1)) { return false; } return true; } public int getSize() { if (size >= 0) { return size; } if (compositeInsertionIds != null) { size = compositeInsertionIds.size(); return size; } if ((partitionKeys == null) || partitionKeys.isEmpty()) { size = 0; return size; } int internalSize = 0; for (final SinglePartitionInsertionIds k : partitionKeys) { final List i = k.getCompositeInsertionIds(); if ((i != null) && !i.isEmpty()) { internalSize += i.size(); } } size = internalSize; return size; } public QueryRanges asQueryRanges() { return new QueryRanges(Collections2.transform(partitionKeys, input -> { return new SinglePartitionQueryRanges( input.getPartitionKey(), Collections2.transform(input.getSortKeys(), new Function() { @Override public ByteArrayRange apply(final byte[] input) { return new ByteArrayRange(input, input, false); } })); })); } public List getCompositeInsertionIds() { if (compositeInsertionIds != null) { return compositeInsertionIds; } if ((partitionKeys == null) || partitionKeys.isEmpty()) { return Collections.EMPTY_LIST; } final List internalCompositeInsertionIds = new ArrayList<>(); for (final SinglePartitionInsertionIds k : partitionKeys) { final List i = k.getCompositeInsertionIds(); if ((i != null) && !i.isEmpty()) { internalCompositeInsertionIds.addAll(i); } } compositeInsertionIds = internalCompositeInsertionIds; return compositeInsertionIds; } public Pair getFirstPartitionAndSortKeyPair() { if (partitionKeys == null) { return null; } for (final SinglePartitionInsertionIds p : partitionKeys) { if ((p.getSortKeys() != null) && !p.getSortKeys().isEmpty()) { return new ImmutablePair<>(p.getPartitionKey(), p.getSortKeys().get(0)); } else if ((p.getPartitionKey() != null)) { return new ImmutablePair<>(p.getPartitionKey(), null); } } return null; } @Override public byte[] toBinary() { if ((partitionKeys != null) && !partitionKeys.isEmpty()) { final List partitionKeysBinary = new ArrayList<>(partitionKeys.size()); int totalSize = VarintUtils.unsignedIntByteLength(partitionKeys.size()); for (final SinglePartitionInsertionIds id : partitionKeys) { final byte[] binary = id.toBinary(); totalSize += (VarintUtils.unsignedIntByteLength(binary.length) + binary.length); partitionKeysBinary.add(binary); } final ByteBuffer buf = ByteBuffer.allocate(totalSize); VarintUtils.writeUnsignedInt(partitionKeys.size(), buf); for (final byte[] binary : partitionKeysBinary) { VarintUtils.writeUnsignedInt(binary.length, buf); buf.put(binary); } return buf.array(); } else { return ByteBuffer.allocate(4).putInt(0).array(); } } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int size = VarintUtils.readUnsignedInt(buf); if (size > 0) { partitionKeys = new ArrayList<>(size); for (int i = 0; i < size; i++) { final int length = VarintUtils.readUnsignedInt(buf); final byte[] pBytes = ByteArrayUtils.safeRead(buf, length); final SinglePartitionInsertionIds pId = new SinglePartitionInsertionIds(); pId.fromBinary(pBytes); partitionKeys.add(pId); } } else { partitionKeys = new ArrayList<>(); } } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/Mergeable.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import org.locationtech.geowave.core.index.persist.Persistable; public interface Mergeable extends Persistable { public void merge(Mergeable merge); } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/MultiDimensionalCoordinateRanges.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import org.locationtech.geowave.core.index.persist.Persistable; public class MultiDimensionalCoordinateRanges implements Persistable { // this is a generic placeholder for "tiers" private byte[] multiDimensionalId; private CoordinateRange[][] coordinateRangesPerDimension; public MultiDimensionalCoordinateRanges() { coordinateRangesPerDimension = new CoordinateRange[][] {}; } public MultiDimensionalCoordinateRanges( final byte[] multiDimensionalPrefix, final CoordinateRange[][] coordinateRangesPerDimension) { multiDimensionalId = multiDimensionalPrefix; this.coordinateRangesPerDimension = coordinateRangesPerDimension; } public byte[] getMultiDimensionalId() { return multiDimensionalId; } public int getNumDimensions() { return coordinateRangesPerDimension.length; } public CoordinateRange[] getRangeForDimension(final int dimension) { return coordinateRangesPerDimension[dimension]; } @Override public byte[] toBinary() { final List serializedRanges = new ArrayList<>(); final int idLength = (multiDimensionalId == null ? 0 : multiDimensionalId.length); int byteLength = VarintUtils.unsignedIntByteLength(idLength) + idLength; byteLength += VarintUtils.unsignedIntByteLength(coordinateRangesPerDimension.length); final int[] numPerDimension = new int[getNumDimensions()]; for (final int num : numPerDimension) { byteLength += VarintUtils.unsignedIntByteLength(num); } int d = 0; for (final CoordinateRange[] dim : coordinateRangesPerDimension) { numPerDimension[d++] = dim.length; for (final CoordinateRange range : dim) { final byte[] serializedRange = range.toBinary(); byteLength += (serializedRange.length + VarintUtils.unsignedIntByteLength(serializedRange.length)); serializedRanges.add(serializedRange); } } final ByteBuffer buf = ByteBuffer.allocate(byteLength); VarintUtils.writeUnsignedInt(idLength, buf); if (idLength > 0) { buf.put(multiDimensionalId); } VarintUtils.writeUnsignedInt(coordinateRangesPerDimension.length, buf); for (final int num : numPerDimension) { VarintUtils.writeUnsignedInt(num, buf); } for (final byte[] serializedRange : serializedRanges) { VarintUtils.writeUnsignedInt(serializedRange.length, buf); buf.put(serializedRange); } return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int idLength = VarintUtils.readUnsignedInt(buf); if (idLength > 0) { multiDimensionalId = ByteArrayUtils.safeRead(buf, idLength); } else { multiDimensionalId = null; } coordinateRangesPerDimension = new CoordinateRange[VarintUtils.readUnsignedInt(buf)][]; for (int d = 0; d < coordinateRangesPerDimension.length; d++) { coordinateRangesPerDimension[d] = new CoordinateRange[VarintUtils.readUnsignedInt(buf)]; } for (int d = 0; d < coordinateRangesPerDimension.length; d++) { for (int i = 0; i < coordinateRangesPerDimension[d].length; i++) { final byte[] serializedRange = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); coordinateRangesPerDimension[d][i] = new CoordinateRange(); coordinateRangesPerDimension[d][i].fromBinary(serializedRange); } } } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/MultiDimensionalCoordinateRangesArray.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.persist.Persistable; public class MultiDimensionalCoordinateRangesArray implements Persistable { private MultiDimensionalCoordinateRanges[] rangesArray; public MultiDimensionalCoordinateRangesArray() {} public MultiDimensionalCoordinateRangesArray( final MultiDimensionalCoordinateRanges[] rangesArray) { this.rangesArray = rangesArray; } public MultiDimensionalCoordinateRanges[] getRangesArray() { return rangesArray; } @Override public byte[] toBinary() { final byte[][] rangesBinaries = new byte[rangesArray.length][]; int binaryLength = VarintUtils.unsignedIntByteLength(rangesBinaries.length); for (int i = 0; i < rangesArray.length; i++) { rangesBinaries[i] = rangesArray[i].toBinary(); binaryLength += (VarintUtils.unsignedIntByteLength(rangesBinaries[i].length) + rangesBinaries[i].length); } final ByteBuffer buf = ByteBuffer.allocate(binaryLength); VarintUtils.writeUnsignedInt(rangesBinaries.length, buf); for (final byte[] rangesBinary : rangesBinaries) { VarintUtils.writeUnsignedInt(rangesBinary.length, buf); buf.put(rangesBinary); } return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); rangesArray = new MultiDimensionalCoordinateRanges[VarintUtils.readUnsignedInt(buf)]; for (int i = 0; i < rangesArray.length; i++) { final byte[] rangesBinary = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); rangesArray[i] = new MultiDimensionalCoordinateRanges(); rangesArray[i].fromBinary(rangesBinary); } } public static class ArrayOfArrays implements Persistable { private MultiDimensionalCoordinateRangesArray[] coordinateArrays; public ArrayOfArrays() {} public ArrayOfArrays(final MultiDimensionalCoordinateRangesArray[] coordinateArrays) { this.coordinateArrays = coordinateArrays; } public MultiDimensionalCoordinateRangesArray[] getCoordinateArrays() { return coordinateArrays; } @Override public byte[] toBinary() { final byte[][] rangesBinaries = new byte[coordinateArrays.length][]; int binaryLength = VarintUtils.unsignedIntByteLength(rangesBinaries.length); for (int i = 0; i < coordinateArrays.length; i++) { rangesBinaries[i] = coordinateArrays[i].toBinary(); binaryLength += (VarintUtils.unsignedIntByteLength(rangesBinaries[i].length) + rangesBinaries[i].length); } final ByteBuffer buf = ByteBuffer.allocate(binaryLength); VarintUtils.writeUnsignedInt(rangesBinaries.length, buf); for (final byte[] rangesBinary : rangesBinaries) { VarintUtils.writeUnsignedInt(rangesBinary.length, buf); buf.put(rangesBinary); } return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int coordinateArrayLength = VarintUtils.readUnsignedInt(buf); ByteArrayUtils.verifyBufferSize(buf, coordinateArrayLength); coordinateArrays = new MultiDimensionalCoordinateRangesArray[coordinateArrayLength]; for (int i = 0; i < coordinateArrayLength; i++) { final byte[] rangesBinary = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); coordinateArrays[i] = new MultiDimensionalCoordinateRangesArray(); coordinateArrays[i].fromBinary(rangesBinary); } } } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/MultiDimensionalCoordinates.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import java.util.Arrays; public class MultiDimensionalCoordinates { // this is a generic placeholder for tiers private final byte[] multiDimensionalId; private final Coordinate[] coordinatePerDimension; public MultiDimensionalCoordinates() { multiDimensionalId = new byte[] {}; coordinatePerDimension = new Coordinate[] {}; } public MultiDimensionalCoordinates( final byte[] multiDimensionalId, final Coordinate[] coordinatePerDimension) { super(); this.multiDimensionalId = multiDimensionalId; this.coordinatePerDimension = coordinatePerDimension; } public byte[] getMultiDimensionalId() { return multiDimensionalId; } public Coordinate getCoordinate(final int dimension) { return coordinatePerDimension[dimension]; } public int getNumDimensions() { return coordinatePerDimension.length; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + Arrays.hashCode(coordinatePerDimension); result = (prime * result) + Arrays.hashCode(multiDimensionalId); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final MultiDimensionalCoordinates other = (MultiDimensionalCoordinates) obj; if (!Arrays.equals(coordinatePerDimension, other.coordinatePerDimension)) { return false; } if (!Arrays.equals(multiDimensionalId, other.multiDimensionalId)) { return false; } return true; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/MultiDimensionalIndexData.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import org.locationtech.geowave.core.index.persist.Persistable; /** Interface which defines the methods associated with a multi-dimensional index data range. */ public interface MultiDimensionalIndexData extends IndexConstraints, Persistable { public IndexData[] getDataPerDimension(); public T[] getMaxValuesPerDimension(); public T[] getMinValuesPerDimension(); public T[] getCentroidPerDimension(); } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/NullNumericIndexStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import java.util.Collections; import java.util.List; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; /** * This is a completely empty numeric index strategy representing no dimensions, and always * returning empty IDs and ranges. It can be used in cases when the data is "indexed" by another * means, and not using multi-dimensional numeric data. */ public class NullNumericIndexStrategy implements NumericIndexStrategy { private String id; protected NullNumericIndexStrategy() { super(); } public NullNumericIndexStrategy(final String id) { this.id = id; } @Override public byte[] toBinary() { return StringUtils.stringToBinary(id); } @Override public void fromBinary(final byte[] bytes) { id = StringUtils.stringFromBinary(bytes); } @Override public QueryRanges getQueryRanges( final MultiDimensionalNumericData indexedRange, final IndexMetaData... hints) { return getQueryRanges(indexedRange, -1); } @Override public QueryRanges getQueryRanges( final MultiDimensionalNumericData indexedRange, final int maxRangeDecomposition, final IndexMetaData... hints) { // a null return here should be interpreted as negative to positive // infinite return new QueryRanges(null, null); } @Override public InsertionIds getInsertionIds(final MultiDimensionalNumericData indexedData) { return getInsertionIds(indexedData, 1); } @Override public NumericDimensionDefinition[] getOrderedDimensionDefinitions() { // there are no dimensions so return an empty array return new NumericDimensionDefinition[] {}; } @Override public String getId() { return id; } @Override public MultiDimensionalNumericData getRangeForId( final byte[] partitionKey, final byte[] sortKey) { // a null return here should be interpreted as negative to positive // infinite return null; } @Override public double[] getHighestPrecisionIdRangePerDimension() { // there are no dimensions so return an empty array return new double[] {}; } @Override public MultiDimensionalCoordinates getCoordinatesPerDimension( final byte[] partitionKey, final byte[] sortKey) { // there are no dimensions so return an empty array return new MultiDimensionalCoordinates(); } @Override public InsertionIds getInsertionIds( final MultiDimensionalNumericData indexedData, final int maxDuplicateInsertionIds) { // return a single empty sort key as the ID return new InsertionIds(null, Collections.singletonList(new byte[0])); } @Override public int getPartitionKeyLength() { return 0; } @Override public List createMetaData() { return Collections.emptyList(); } @Override public MultiDimensionalCoordinateRanges[] getCoordinateRangesPerDimension( final MultiDimensionalNumericData dataRange, final IndexMetaData... hints) { return new MultiDimensionalCoordinateRanges[] {new MultiDimensionalCoordinateRanges()}; } @Override public byte[][] getInsertionPartitionKeys(final MultiDimensionalNumericData insertionData) { return null; } @Override public byte[][] getQueryPartitionKeys( final MultiDimensionalNumericData queryData, final IndexMetaData... hints) { return null; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/NumericIndexStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; /** Interface which defines a numeric index strategy. */ public interface NumericIndexStrategy extends SortedIndexStrategy, PartitionIndexStrategy { /** * Return an integer coordinate in each dimension for the given partition and sort key plus a bin * ID if that dimension is continuous. * * @param partitionKey the partition key to determine the coordinates for * @param sortKey the sort key to determine the coordinates for * @return the integer coordinate that the given insertion ID represents and associated bin ID if * that dimension is continuous */ public MultiDimensionalCoordinates getCoordinatesPerDimension( byte[] partitionKey, byte[] sortKey); /** * Return an integer coordinate range in each dimension for the given data range plus a bin ID if * that dimension is continuous * * @param dataRange the range to determine the coordinates for * @param hints index hints * @return the integer coordinate ranges that the given data ID represents and associated bin IDs * if a dimension is continuous */ public MultiDimensionalCoordinateRanges[] getCoordinateRangesPerDimension( MultiDimensionalNumericData dataRange, IndexMetaData... hints); /** * Returns an array of dimension definitions that defines this index strategy, the array is in the * order that is expected within multidimensional numeric data that is passed to this index * strategy * * @return the ordered array of dimension definitions that represents this index strategy */ public NumericDimensionDefinition[] getOrderedDimensionDefinitions(); /** * * Get the range/size of a single insertion ID for each dimension at the highest precision * supported by this index strategy * * @return the range of a single insertion ID for each dimension */ public double[] getHighestPrecisionIdRangePerDimension(); /** * * Get the offset in bytes before the dimensional index. This can accounts for tier IDs and bin * IDs * * @return the byte offset prior to the dimensional index */ @Override public int getPartitionKeyLength(); } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/NumericIndexStrategyWrapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import java.nio.ByteBuffer; import java.util.List; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.persist.PersistenceUtils; public class NumericIndexStrategyWrapper implements NumericIndexStrategy { private String id; private NumericIndexStrategy indexStrategy; protected NumericIndexStrategyWrapper() {} public NumericIndexStrategyWrapper(final String id, final NumericIndexStrategy indexStrategy) { this.id = id; this.indexStrategy = indexStrategy; } @Override public String getId() { return id; } @Override public byte[] toBinary() { final byte[] idBinary = StringUtils.stringToBinary(id); final byte[] delegateBinary = PersistenceUtils.toBinary(indexStrategy); final ByteBuffer buf = ByteBuffer.allocate( VarintUtils.unsignedIntByteLength(idBinary.length) + idBinary.length + delegateBinary.length); VarintUtils.writeUnsignedInt(idBinary.length, buf); buf.put(idBinary); buf.put(delegateBinary); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int idBinaryLength = VarintUtils.readUnsignedInt(buf); final byte[] idBinary = ByteArrayUtils.safeRead(buf, idBinaryLength); final byte[] delegateBinary = new byte[buf.remaining()]; buf.get(delegateBinary); id = StringUtils.stringFromBinary(idBinary); indexStrategy = (NumericIndexStrategy) PersistenceUtils.fromBinary(delegateBinary); } @Override public QueryRanges getQueryRanges( final MultiDimensionalNumericData indexedRange, final IndexMetaData... hints) { return indexStrategy.getQueryRanges(indexedRange, hints); } @Override public QueryRanges getQueryRanges( final MultiDimensionalNumericData indexedRange, final int maxRangeDecomposition, final IndexMetaData... hints) { return indexStrategy.getQueryRanges(indexedRange, maxRangeDecomposition, hints); } @Override public InsertionIds getInsertionIds(final MultiDimensionalNumericData indexedData) { return indexStrategy.getInsertionIds(indexedData); } @Override public MultiDimensionalNumericData getRangeForId( final byte[] partitionKey, final byte[] sortKey) { return indexStrategy.getRangeForId(partitionKey, sortKey); } @Override public MultiDimensionalCoordinates getCoordinatesPerDimension( final byte[] partitionKey, final byte[] sortKey) { return indexStrategy.getCoordinatesPerDimension(partitionKey, sortKey); } @Override public NumericDimensionDefinition[] getOrderedDimensionDefinitions() { return indexStrategy.getOrderedDimensionDefinitions(); } @Override public double[] getHighestPrecisionIdRangePerDimension() { return indexStrategy.getHighestPrecisionIdRangePerDimension(); } @Override public InsertionIds getInsertionIds( final MultiDimensionalNumericData indexedData, final int maxDuplicateInsertionIds) { return indexStrategy.getInsertionIds(indexedData, maxDuplicateInsertionIds); } @Override public int getPartitionKeyLength() { return indexStrategy.getPartitionKeyLength(); } @Override public List createMetaData() { return indexStrategy.createMetaData(); } @Override public MultiDimensionalCoordinateRanges[] getCoordinateRangesPerDimension( final MultiDimensionalNumericData dataRange, final IndexMetaData... hints) { return indexStrategy.getCoordinateRangesPerDimension(dataRange, hints); } @Override public byte[][] getInsertionPartitionKeys(final MultiDimensionalNumericData insertionData) { return indexStrategy.getInsertionPartitionKeys(insertionData); } @Override public byte[][] getQueryPartitionKeys( final MultiDimensionalNumericData queryData, final IndexMetaData... hints) { return indexStrategy.getQueryPartitionKeys(queryData, hints); } @Override public byte[][] getPredefinedSplits() { return indexStrategy.getPredefinedSplits(); } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/PartitionIndexStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; public interface PartitionIndexStrategy extends IndexStrategy { byte[][] getInsertionPartitionKeys(EntryRangeType insertionData); byte[][] getQueryPartitionKeys(QueryRangeType queryData, IndexMetaData... hints); /** * * Get the offset in bytes before the dimensional index. This can accounts for tier IDs and bin * IDs * * @return the byte offset prior to the dimensional index */ int getPartitionKeyLength(); default byte[][] getPredefinedSplits() { return new byte[0][]; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/PartitionIndexStrategyWrapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import java.util.List; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.persist.PersistenceUtils; public class PartitionIndexStrategyWrapper implements NumericIndexStrategy { private PartitionIndexStrategy partitionIndexStrategy; public PartitionIndexStrategyWrapper() {} public PartitionIndexStrategyWrapper( final PartitionIndexStrategy partitionIndexStrategy) { this.partitionIndexStrategy = partitionIndexStrategy; } @Override public QueryRanges getQueryRanges( final MultiDimensionalNumericData indexedRange, final IndexMetaData... hints) { // TODO Auto-generated method stub return null; } @Override public QueryRanges getQueryRanges( final MultiDimensionalNumericData indexedRange, final int maxEstimatedRangeDecomposition, final IndexMetaData... hints) { // TODO Auto-generated method stub return null; } @Override public InsertionIds getInsertionIds(final MultiDimensionalNumericData indexedData) { // TODO Auto-generated method stub return null; } @Override public InsertionIds getInsertionIds( final MultiDimensionalNumericData indexedData, final int maxEstimatedDuplicateIds) { // TODO Auto-generated method stub return null; } @Override public MultiDimensionalNumericData getRangeForId( final byte[] partitionKey, final byte[] sortKey) { // TODO Auto-generated method stub return null; } @Override public String getId() { return partitionIndexStrategy.getId(); } @Override public List createMetaData() { return partitionIndexStrategy.createMetaData(); } @Override public byte[] toBinary() { return PersistenceUtils.toBinary(partitionIndexStrategy); } @Override public void fromBinary(final byte[] bytes) { partitionIndexStrategy = (PartitionIndexStrategy) PersistenceUtils.fromBinary( bytes); } @Override public MultiDimensionalCoordinates getCoordinatesPerDimension( final byte[] partitionKey, final byte[] sortKey) { return new MultiDimensionalCoordinates(); } @Override public MultiDimensionalCoordinateRanges[] getCoordinateRangesPerDimension( final MultiDimensionalNumericData dataRange, final IndexMetaData... hints) { return null; } @Override public NumericDimensionDefinition[] getOrderedDimensionDefinitions() { return null; } @Override public double[] getHighestPrecisionIdRangePerDimension() { return null; } @Override public int getPartitionKeyLength() { return partitionIndexStrategy.getPartitionKeyLength(); } @Override public byte[][] getInsertionPartitionKeys(final MultiDimensionalNumericData insertionData) { return partitionIndexStrategy.getInsertionPartitionKeys(insertionData); } @Override public byte[][] getQueryPartitionKeys( final MultiDimensionalNumericData queryData, final IndexMetaData... hints) { return partitionIndexStrategy.getQueryPartitionKeys(queryData, hints); } @Override public byte[][] getPredefinedSplits() { return partitionIndexStrategy.getPredefinedSplits(); } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/QueryRanges.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.stream.Collectors; import org.locationtech.geowave.core.index.ByteArrayRange.MergeOperation; public class QueryRanges { private final Collection partitionRanges; private List compositeQueryRanges; public QueryRanges() { // this implies an infinite range partitionRanges = null; } public QueryRanges(final byte[][] partitionKeys, final QueryRanges queryRanges) { if ((queryRanges == null) || (queryRanges.partitionRanges == null) || queryRanges.partitionRanges.isEmpty()) { partitionRanges = fromPartitionKeys(partitionKeys); } else if ((partitionKeys == null) || (partitionKeys.length == 0)) { partitionRanges = queryRanges.partitionRanges; } else { partitionRanges = new ArrayList<>(partitionKeys.length * queryRanges.partitionRanges.size()); for (final byte[] partitionKey : partitionKeys) { for (final SinglePartitionQueryRanges sortKeyRange : queryRanges.partitionRanges) { byte[] newPartitionKey; if (partitionKey == null) { newPartitionKey = sortKeyRange.getPartitionKey(); } else if (sortKeyRange.getPartitionKey() == null) { newPartitionKey = partitionKey; } else { newPartitionKey = ByteArrayUtils.combineArrays(partitionKey, sortKeyRange.getPartitionKey()); } partitionRanges.add( new SinglePartitionQueryRanges(newPartitionKey, sortKeyRange.getSortKeyRanges())); } } } } public QueryRanges(final List queryRangesList) { // group by partition final Map> sortRangesPerPartition = new HashMap<>(); for (final QueryRanges qr : queryRangesList) { for (final SinglePartitionQueryRanges r : qr.getPartitionQueryRanges()) { final Collection ranges = sortRangesPerPartition.get(new ByteArray(r.getPartitionKey())); if (ranges == null) { sortRangesPerPartition.put( new ByteArray(r.getPartitionKey()), new ArrayList<>(r.getSortKeyRanges())); } else { ranges.addAll(r.getSortKeyRanges()); } } } partitionRanges = new ArrayList<>(sortRangesPerPartition.size()); for (final Entry> e : sortRangesPerPartition.entrySet()) { Collection mergedRanges; if (e.getValue() != null) { mergedRanges = ByteArrayRange.mergeIntersections(e.getValue(), MergeOperation.UNION); } else { mergedRanges = null; } partitionRanges.add(new SinglePartitionQueryRanges(e.getKey().getBytes(), mergedRanges)); } } public QueryRanges(final Collection partitionRanges) { this.partitionRanges = partitionRanges; } public QueryRanges(final ByteArrayRange singleSortKeyRange) { partitionRanges = Collections.singletonList(new SinglePartitionQueryRanges(singleSortKeyRange)); } public QueryRanges(final byte[][] partitionKeys) { partitionRanges = fromPartitionKeys(partitionKeys); } public boolean isEmpty() { return partitionRanges == null || partitionRanges.size() == 0; } private static Collection fromPartitionKeys( final byte[][] partitionKeys) { if (partitionKeys == null) { return null; } return Arrays.stream(partitionKeys).map(input -> new SinglePartitionQueryRanges(input)).collect( Collectors.toList()); } public Collection getPartitionQueryRanges() { return partitionRanges; } public List getCompositeQueryRanges() { if (partitionRanges == null) { return null; } if (compositeQueryRanges != null) { return compositeQueryRanges; } if (partitionRanges.isEmpty()) { compositeQueryRanges = new ArrayList<>(); return compositeQueryRanges; } final List internalQueryRanges = new ArrayList<>(); for (final SinglePartitionQueryRanges partition : partitionRanges) { if ((partition.getSortKeyRanges() == null) || partition.getSortKeyRanges().isEmpty()) { internalQueryRanges.add( new ByteArrayRange(partition.getPartitionKey(), partition.getPartitionKey())); } else if (partition.getPartitionKey() == null) { internalQueryRanges.addAll(partition.getSortKeyRanges()); } else { for (final ByteArrayRange sortKeyRange : partition.getSortKeyRanges()) { internalQueryRanges.add( new ByteArrayRange( ByteArrayUtils.combineArrays( partition.getPartitionKey(), sortKeyRange.getStart()), ByteArrayUtils.combineArrays(partition.getPartitionKey(), sortKeyRange.getEnd()), sortKeyRange.singleValue)); } } } compositeQueryRanges = internalQueryRanges; return compositeQueryRanges; } public boolean isMultiRange() { if (compositeQueryRanges != null) { return compositeQueryRanges.size() >= 2; } if (partitionRanges.isEmpty()) { return false; } if (partitionRanges.size() > 1) { return true; } final SinglePartitionQueryRanges partition = partitionRanges.iterator().next(); if ((partition.getSortKeyRanges() != null) && (partition.getSortKeyRanges().size() <= 1)) { return false; } return true; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/SPIServiceRegistry.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Set; import javax.imageio.spi.ServiceRegistry; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Compensate for VFSClassloader's failure to discovery SPI registered classes (used by JBOSS and * Accumulo). * *

To Use: * *

(1) Register class loaders: * *

(2) Look up SPI providers: * *

final Iterator serializationProviders = new * SPIServiceRegistry(FieldSerializationProviderSpi.class).load( * FieldSerializationProviderSpi.class); */ public class SPIServiceRegistry extends ServiceRegistry { private static final Logger LOGGER = LoggerFactory.getLogger(SPIServiceRegistry.class); @SuppressWarnings("unchecked") public SPIServiceRegistry(final Class category) { super((Iterator) Arrays.asList(category).iterator()); } public SPIServiceRegistry(final Iterator> categories) { super(categories); } private static final Set ClassLoaders = Collections.synchronizedSet(new HashSet()); private final Set localClassLoaders = Collections.synchronizedSet(new HashSet()); public static void registerClassLoader(final ClassLoader loader) { ClassLoaders.add(loader); } public void registerLocalClassLoader(final ClassLoader loader) { localClassLoaders.add(loader); } public Iterator load(final Class service) { final Set checkset = new HashSet<>(); final Set clSet = getClassLoaders(); final Iterator loaderIt = clSet.iterator(); return new Iterator() { Iterator spiIT = null; @Override public boolean hasNext() { while (((spiIT == null) || !spiIT.hasNext()) && (loaderIt.hasNext())) { final ClassLoader l = loaderIt.next(); if (checkset.contains(l)) { continue; } checkset.add(l); spiIT = ServiceRegistry.lookupProviders(service, l); } return (spiIT != null) && spiIT.hasNext(); } @Override public T next() { return spiIT.next(); } @Override public void remove() {} }; } /** * Returns all class loaders to be used for scanning plugins. The following class loaders are * always included in the search: * *

* *

  • {@linkplain Class#getClassLoader This object class loader} *
  • {@linkplain Thread#getContextClassLoader The thread context class loader} *
  • {@linkplain ClassLoader#getSystemClassLoader The system class loader}
* * Both locally registered (this instance) and globally registered classloaders are included it * the search. * *

Redundancies and parent classloaders are removed where possible. Possible error conditions * include security exceptions. Security exceptions are not logger UNLESS the set of searchable * classloaders is empty. * * @return Classloaders to be used for scanning plugins. */ public final Set getClassLoaders() { final List exceptions = new LinkedList<>(); final Set loaders = new HashSet<>(); try { final ClassLoader cl = SPIServiceRegistry.class.getClassLoader(); if (cl != null) { loaders.add(cl); } } catch (final SecurityException ex) { LOGGER.warn("Unable to get the class loader", ex); exceptions.add("SPIServiceRegistry's class loader : " + ex.getLocalizedMessage()); } try { final ClassLoader cl = ClassLoader.getSystemClassLoader(); if (cl != null) { loaders.add(cl); } } catch (final SecurityException ex) { LOGGER.warn("Unable to get the system class loader", ex); exceptions.add("System class loader : " + ex.getLocalizedMessage()); } try { final ClassLoader cl = Thread.currentThread().getContextClassLoader(); if (cl != null) { loaders.add(cl); } } catch (final SecurityException ex) { LOGGER.warn("Unable to get the context class loader", ex); exceptions.add("Thread's class loader : " + ex.getLocalizedMessage()); } loaders.addAll(ClassLoaders); loaders.addAll(localClassLoaders); /** Remove those loaders that are parents to other loaders. */ final ClassLoader[] loaderSet = loaders.toArray(new ClassLoader[loaders.size()]); for (int i = 0; i < loaderSet.length; i++) { ClassLoader parent = loaderSet[i].getParent(); try { while (parent != null) { loaders.remove(parent); parent = parent.getParent(); } } catch (final SecurityException ex) { LOGGER.warn("Unable to get the class loader", ex); exceptions.add( loaderSet[i].toString() + "'s parent class loader : " + ex.getLocalizedMessage()); } } if (loaders.isEmpty()) { LOGGER.warn("No class loaders available. Check security exceptions (logged next)."); for (final String exString : exceptions) { LOGGER.warn(exString); } } return loaders; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/SinglePartitionInsertionIds.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.List; import org.locationtech.geowave.core.index.persist.Persistable; public class SinglePartitionInsertionIds implements Persistable { private List compositeInsertionIds; private byte[] partitionKey; private List sortKeys; public SinglePartitionInsertionIds() {} public SinglePartitionInsertionIds(final byte[] partitionKey) { this(partitionKey, (byte[]) null); } public SinglePartitionInsertionIds(final byte[] partitionKey, final byte[] sortKey) { this.partitionKey = partitionKey; sortKeys = sortKey == null ? null : new ArrayList<>(Collections.singletonList(sortKey)); } public SinglePartitionInsertionIds( final byte[] partitionKey, final SinglePartitionInsertionIds insertionId2) { this(new SinglePartitionInsertionIds(partitionKey, (List) null), insertionId2); } public SinglePartitionInsertionIds( final SinglePartitionInsertionIds insertionId1, final SinglePartitionInsertionIds insertionId2) { partitionKey = ByteArrayUtils.combineArrays(insertionId1.partitionKey, insertionId2.partitionKey); if ((insertionId1.sortKeys == null) || insertionId1.sortKeys.isEmpty()) { sortKeys = insertionId2.sortKeys; } else if ((insertionId2.sortKeys == null) || insertionId2.sortKeys.isEmpty()) { sortKeys = insertionId1.sortKeys; } else { // use all permutations of range keys sortKeys = new ArrayList<>(insertionId1.sortKeys.size() * insertionId2.sortKeys.size()); for (final byte[] sortKey1 : insertionId1.sortKeys) { for (final byte[] sortKey2 : insertionId2.sortKeys) { sortKeys.add(ByteArrayUtils.combineArrays(sortKey1, sortKey2)); } } } } public SinglePartitionInsertionIds(final byte[] partitionKey, final List sortKeys) { this.partitionKey = partitionKey; this.sortKeys = sortKeys; } public List getCompositeInsertionIds() { if (compositeInsertionIds != null) { return compositeInsertionIds; } if ((sortKeys == null) || sortKeys.isEmpty()) { compositeInsertionIds = Arrays.asList(partitionKey); return compositeInsertionIds; } if (partitionKey == null) { compositeInsertionIds = sortKeys; return compositeInsertionIds; } final List internalInsertionIds = new ArrayList<>(sortKeys.size()); for (final byte[] sortKey : sortKeys) { internalInsertionIds.add(ByteArrayUtils.combineArrays(partitionKey, sortKey)); } compositeInsertionIds = internalInsertionIds; return compositeInsertionIds; } public byte[] getPartitionKey() { return partitionKey; } public List getSortKeys() { return sortKeys; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((partitionKey == null) ? 0 : Arrays.hashCode(partitionKey)); if (sortKeys != null) { for (final byte[] sortKey : sortKeys) { result = (prime * result) + (sortKey == null ? 0 : Arrays.hashCode(sortKey)); } } return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final SinglePartitionInsertionIds other = (SinglePartitionInsertionIds) obj; if (partitionKey == null) { if (other.partitionKey != null) { return false; } } else if (!Arrays.equals(partitionKey, other.partitionKey)) { return false; } if (sortKeys == null) { if (other.sortKeys != null) { return false; } } else if (sortKeys.size() != other.sortKeys.size()) { return false; } else { final Iterator it1 = sortKeys.iterator(); final Iterator it2 = other.sortKeys.iterator(); while (it1.hasNext() && it2.hasNext()) { if ((!Arrays.equals(it1.next(), it2.next()))) { return false; } } } return true; } @Override public byte[] toBinary() { int pLength; if (partitionKey == null) { pLength = 0; } else { pLength = partitionKey.length; } int sSize; int byteBufferSize = VarintUtils.unsignedIntByteLength(pLength) + pLength; if (sortKeys == null) { sSize = 0; } else { sSize = sortKeys.size(); for (final byte[] sKey : sortKeys) { byteBufferSize += VarintUtils.unsignedIntByteLength(sKey.length) + sKey.length; } } byteBufferSize += VarintUtils.unsignedIntByteLength(sSize); final ByteBuffer buf = ByteBuffer.allocate(byteBufferSize); VarintUtils.writeUnsignedInt(pLength, buf); if (pLength > 0) { buf.put(partitionKey); } VarintUtils.writeUnsignedInt(sSize, buf); if (sSize > 0) { for (final byte[] sKey : sortKeys) { VarintUtils.writeUnsignedInt(sKey.length, buf); buf.put(sKey); } } return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int pLength = VarintUtils.readUnsignedInt(buf); if (pLength > 0) { final byte[] pBytes = ByteArrayUtils.safeRead(buf, pLength); partitionKey = pBytes; } else { partitionKey = null; } final int sSize = VarintUtils.readUnsignedInt(buf); if (sSize > 0) { sortKeys = new ArrayList<>(sSize); for (int i = 0; i < sSize; i++) { final int keyLength = VarintUtils.readUnsignedInt(buf); final byte[] sortKey = ByteArrayUtils.safeRead(buf, keyLength); sortKeys.add(sortKey); } } else { sortKeys = null; } } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/SinglePartitionQueryRanges.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import java.util.Collection; import java.util.Collections; import java.util.List; public class SinglePartitionQueryRanges { private final byte[] partitionKey; private final Collection sortKeyRanges; public SinglePartitionQueryRanges( final byte[] partitionKey, final Collection sortKeyRanges) { this.partitionKey = partitionKey; this.sortKeyRanges = sortKeyRanges; } public SinglePartitionQueryRanges(final byte[] partitionKey) { this.partitionKey = partitionKey; sortKeyRanges = null; } public SinglePartitionQueryRanges(final List sortKeyRanges) { this.sortKeyRanges = sortKeyRanges; partitionKey = null; } public SinglePartitionQueryRanges(final ByteArrayRange singleSortKeyRange) { sortKeyRanges = Collections.singletonList(singleSortKeyRange); partitionKey = null; } public byte[] getPartitionKey() { return partitionKey; } public Collection getSortKeyRanges() { return sortKeyRanges; } public ByteArrayRange getSingleRange() { byte[] start = null; byte[] end = null; for (final ByteArrayRange range : sortKeyRanges) { if ((start == null) || (ByteArrayUtils.compare(range.getStart(), start) < 0)) { start = range.getStart(); } if ((end == null) || (ByteArrayUtils.compare(range.getEnd(), end) > 0)) { end = range.getEnd(); } } return new ByteArrayRange(start, end); } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/SortedIndexStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; /** Interface which defines an index strategy. */ public interface SortedIndexStrategy extends IndexStrategy { /** * Returns a list of query ranges for an specified numeric range. * * @param indexedRange defines the numeric range for the query * @return a List of query ranges */ public QueryRanges getQueryRanges(QueryRangeType indexedRange, IndexMetaData... hints); /** * Returns a list of query ranges for an specified numeric range. * * @param indexedRange defines the numeric range for the query * @param maxEstimatedRangeDecomposition the maximum number of ranges provided by a single query * decomposition, this is a best attempt and not a guarantee * @return a List of query ranges */ public QueryRanges getQueryRanges( QueryRangeType indexedRange, int maxEstimatedRangeDecomposition, IndexMetaData... hints); /** * Returns a list of id's for insertion. The index strategy will use a reasonable default for the * maximum duplication of insertion IDs * * @param indexedData defines the numeric data to be indexed * @return a List of insertion ID's */ public InsertionIds getInsertionIds(EntryRangeType indexedData); /** * Returns a list of id's for insertion. * * @param indexedData defines the numeric data to be indexed * @param maxEstimatedDuplicateIds the maximum number of insertion IDs that can be used, this is a * best attempt and not a guarantee * @return a List of insertion ID's */ public InsertionIds getInsertionIds(EntryRangeType indexedData, int maxEstimatedDuplicateIds); /** * Returns the range that the given ID represents * * @param partitionKey the partition key part of the insertion ID to determine a range for * @param sortKey the sort key part of the insertion ID to determine a range for * @return the range that the given insertion ID represents, inclusive on the start and exclusive * on the end for the range */ public EntryRangeType getRangeForId(byte[] partitionKey, byte[] sortKey); } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/StringUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import java.nio.ByteBuffer; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Convenience methods for converting to and from strings. The encoding and decoding of strings uses * UTF-8, and these methods should be used for serializing and deserializing text-based data, not * for converting binary data to a String representation. Use ByteArrayUtils for converting data * that is binary in nature to a String for transport. */ public class StringUtils { private static final Logger LOGGER = LoggerFactory.getLogger(StringUtils.class); public static final Charset UTF8_CHARSET = Charset.forName("UTF-8"); private static final String DEFAULT_GEOWAVE_CHARSET = "ISO-8859-1"; public static final String GEOWAVE_CHARSET_PROPERTY_NAME = "geowave.charset"; private static Charset geowaveCharset = null; public static Charset getGeoWaveCharset() { if (geowaveCharset == null) { final String charset = System.getProperty(GEOWAVE_CHARSET_PROPERTY_NAME, DEFAULT_GEOWAVE_CHARSET); geowaveCharset = Charset.forName(charset); } return geowaveCharset; } /** * Utility to convert a String to bytes * * @param string incoming String to convert * @return a byte array */ public static byte[] stringToBinary(final String string) { return string.getBytes(getGeoWaveCharset()); } /** * Utility to convert a list of Strings to bytes * * @param strings incoming Strings to convert * @return a byte array */ public static byte[] stringsToBinary(final String strings[]) { int len = VarintUtils.unsignedIntByteLength(strings.length); final List strsBytes = new ArrayList<>(); for (final String str : strings) { final byte[] strByte = str.getBytes(getGeoWaveCharset()); strsBytes.add(strByte); len += (strByte.length + VarintUtils.unsignedIntByteLength(strByte.length)); } final ByteBuffer buf = ByteBuffer.allocate(len); VarintUtils.writeUnsignedInt(strings.length, buf); for (final byte[] str : strsBytes) { VarintUtils.writeUnsignedInt(str.length, buf); buf.put(str); } return buf.array(); } /** * Utility to convert bytes to a String * * @param binary a byte array to convert to a String * @return a String representation of the byte array */ public static String stringFromBinary(final byte[] binary) { return new String(binary, getGeoWaveCharset()); } /** * Utility to convert bytes to a String * * @param binary a byte array to convert to a String * @return a String representation of the byte array */ public static String[] stringsFromBinary(final byte[] binary) { final ByteBuffer buf = ByteBuffer.wrap(binary); final int count = VarintUtils.readUnsignedInt(buf); final String[] result = new String[count]; for (int i = 0; i < count; i++) { final int size = VarintUtils.readUnsignedInt(buf); final byte[] strBytes = ByteArrayUtils.safeRead(buf, size); result[i] = new String(strBytes, getGeoWaveCharset()); } return result; } /** * Convert a number to a string. In this case we ensure that it is safe for Accumulo table names * by replacing '-' with '_' * * @param number the number to convert * @return the safe string representing that number */ public static String intToString(final int number) { return org.apache.commons.lang3.StringUtils.replace(Integer.toString(number), "-", "_"); } public static Map parseParams(final String params) throws NullPointerException { final Map paramsMap = new HashMap<>(); final String[] paramsSplit = params.split(";"); for (final String param : paramsSplit) { final String[] keyValue = param.split("="); if (keyValue.length != 2) { LOGGER.warn("Unable to parse param '" + param + "'"); continue; } paramsMap.put(keyValue[0].trim(), keyValue[1].trim()); } return paramsMap; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/VarintUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import java.math.BigDecimal; import java.math.BigInteger; import java.nio.ByteBuffer; import com.google.common.annotations.VisibleForTesting; /** * Based on {@link com.clearspring.analytics.util.Varint}. Provides additional functionality to * encode varints directly to ByteBuffers. */ public class VarintUtils { @VisibleForTesting static long TIME_EPOCH = 1546300800000L; // Jan 1, 2019 UTC /** * Convert an int to an int that uses zig-zag encoding to prevent negative numbers from using the * maximum number of bytes. * * @see com.clearspring.analytics.util.Varint */ @VisibleForTesting static int signedToUnsignedInt(final int value) { return (value << 1) ^ (value >> 31); } /** * Convert an int that has been zig-zag encoded back to normal. * * @see com.clearspring.analytics.util.Varint */ @VisibleForTesting static int unsignedToSignedInt(final int value) { final int temp = (((value << 31) >> 31) ^ value) >> 1; return temp ^ (value & (1 << 31)); } public static int signedIntByteLength(final int value) { return unsignedIntByteLength(signedToUnsignedInt(value)); } public static int unsignedIntByteLength(final int value) { final int numRelevantBits = 32 - Integer.numberOfLeadingZeros(value); int numBytes = (numRelevantBits + 6) / 7; if (numBytes == 0) { numBytes = 1; } return numBytes; } public static int unsignedShortByteLength(final short value) { return unsignedIntByteLength(value & 0xFFFF); } public static void writeSignedInt(final int value, final ByteBuffer buffer) { writeUnsignedInt(signedToUnsignedInt(value), buffer); } public static byte[] writeSignedInt(final int value) { return writeUnsignedInt(signedToUnsignedInt(value)); } public static void writeUnsignedInt(int value, final ByteBuffer buffer) { while ((value & 0xFFFFFF80) != 0) { buffer.put((byte) ((value & 0x7F) | 0x80)); value >>>= 7; } buffer.put((byte) (value & 0x7F)); } public static byte[] writeUnsignedInt(int value) { final byte[] retVal = new byte[unsignedIntByteLength(value)]; int i = 0; while ((value & 0xFFFFFF80) != 0) { retVal[i++] = (byte) ((value & 0x7F) | 0x80); value >>>= 7; } retVal[i] = (byte) (value & 0x7F); return retVal; } public static void writeUnsignedShort(final short value, final ByteBuffer buffer) { writeUnsignedInt(value & 0xFFFF, buffer); } public static byte[] writeUnsignedShort(final short value) { return writeUnsignedInt(value & 0xFFFF); } public static void writeUnsignedIntReversed(int value, final ByteBuffer buffer) { final int startPosition = buffer.position(); final int byteLength = unsignedIntByteLength(value); int position = (startPosition + byteLength) - 1; while ((value & 0xFFFFFF80) != 0) { buffer.put(position, (byte) ((value & 0x7F) | 0x80)); value >>>= 7; position--; } buffer.put(position, (byte) (value & 0x7F)); buffer.position(startPosition + byteLength); } public static byte[] writeUnsignedIntReversed(int value) { final int byteLength = unsignedIntByteLength(value); final byte[] retVal = new byte[byteLength]; int i = retVal.length - 1; while ((value & 0xFFFFFF80) != 0) { retVal[i--] = (byte) ((value & 0x7F) | 0x80); value >>>= 7; } retVal[0] = (byte) (value & 0x7F); return retVal; } public static int readSignedInt(final ByteBuffer buffer) { return unsignedToSignedInt(readUnsignedInt(buffer)); } public static int readUnsignedInt(final ByteBuffer buffer) { int value = 0; int i = 0; int currByte; while (((currByte = buffer.get()) & 0x80) != 0) { value |= (currByte & 0x7F) << i; i += 7; } return value | (currByte << i); } public static short readUnsignedShort(final ByteBuffer buffer) { final int value = readUnsignedInt(buffer); return (short) (value & 0xFFFF); } public static int readUnsignedIntReversed(final ByteBuffer buffer) { int value = 0; int i = 0; int currByte; int position = buffer.position(); while (((currByte = buffer.get(position)) & 0x80) != 0) { value |= (currByte & 0x7F) << i; i += 7; position--; } if (position > 0) { buffer.position(position - 1); } return value | (currByte << i); } /** * Convert a long to a long that uses zig-zag encoding to prevent negative numbers from using the * maximum number of bytes. * * @see com.clearspring.analytics.util.Varint */ @VisibleForTesting static long signedToUnsignedLong(final long value) { return (value << 1) ^ (value >> 63); } /** * Convert a long that has been zig-zag encoded back to normal. * * @see com.clearspring.analytics.util.Varint */ @VisibleForTesting static long unsignedToSignedLong(final long value) { final long temp = (((value << 63) >> 63) ^ value) >> 1; return temp ^ (value & (1L << 63)); } public static int signedLongByteLength(final long value) { return unsignedLongByteLength(signedToUnsignedLong(value)); } public static int unsignedLongByteLength(final long value) { final int numRelevantBits = 64 - Long.numberOfLeadingZeros(value); int numBytes = (numRelevantBits + 6) / 7; if (numBytes == 0) { numBytes = 1; } return numBytes; } public static void writeSignedLong(final long value, final ByteBuffer buffer) { writeUnsignedLong(signedToUnsignedLong(value), buffer); } public static byte[] writeSignedLong(final long value) { return writeUnsignedLong(signedToUnsignedLong(value)); } public static void writeUnsignedLong(long value, final ByteBuffer buffer) { while ((value & 0xFFFFFFFFFFFFFF80L) != 0L) { buffer.put((byte) ((value & 0x7F) | 0x80)); value >>>= 7; } buffer.put((byte) (value & 0x7F)); } public static byte[] writeUnsignedLong(long value) { final byte[] retVal = new byte[unsignedLongByteLength(value)]; int i = 0; while ((value & 0xFFFFFFFFFFFFFF80L) != 0L) { retVal[i++] = (byte) ((value & 0x7F) | 0x80); value >>>= 7; } retVal[i] = (byte) (value & 0x7F); return retVal; } public static long readSignedLong(final ByteBuffer buffer) { return unsignedToSignedLong(readUnsignedLong(buffer)); } public static long readUnsignedLong(final ByteBuffer buffer) { long value = 0; int i = 0; long currByte; while (((currByte = buffer.get()) & 0x80L) != 0) { value |= (currByte & 0x7F) << i; i += 7; } return value | (currByte << i); } /** * Get the byte length of a varint encoded timestamp. * * @param time the timestamp * @return the number of bytes the encoded timestamp will use */ public static int timeByteLength(final long time) { return signedLongByteLength(time - TIME_EPOCH); } /** * Encode a timestamp using varint encoding. * * @param time the timestamp * @param buffer the {@code ByteBuffer} to write the timestamp to */ public static void writeTime(final long time, final ByteBuffer buffer) { writeSignedLong(time - TIME_EPOCH, buffer); } /** * Encode a timestamp using varint encoding. * * @param time the timestamp * @return the timestamp as bytes */ public static byte[] writeTime(final long time) { return writeSignedLong(time - TIME_EPOCH); } /** * Read a timestamp from a {@code ByteBuffer} that was previously encoded with {@link #writeTime}. * * @param buffer the {@code ByteBuffer} to read from * @return the decoded timestamp */ public static long readTime(final ByteBuffer buffer) { return VarintUtils.readSignedLong(buffer) + TIME_EPOCH; } /** * Encode a BigDecimal as a byte[]. The structure of the byte[] is opaque, so to deserialize, use * {@link #readBigDecimal(ByteBuffer)} * * @param num The number to serialize as a {@link ByteBuffer} * @return a byte array that represents the given BigDecimal. */ public static byte[] writeBigDecimal(final BigDecimal num) { if (num == null) { return new byte[0]; } final byte[] unscaled = num.unscaledValue().toByteArray(); final ByteBuffer buf = ByteBuffer.allocate(VarintUtils.signedIntByteLength(num.scale()) + 4 + unscaled.length); VarintUtils.writeSignedInt(num.scale(), buf); buf.putInt(unscaled.length); buf.put(unscaled); return buf.array(); } /** * Read a BigDecimal number from a {@link ByteBuffer} that was previously encoded by using * {@link #writeBigDecimal(BigDecimal)} * * @param buffer The {@link ByteBuffer} that contains the BigDecimal next in its contents. * @return The BigDecimal that was stored in the ByteBuffer, and the ByteBuffer's position is * modified past the BigDecimal. */ public static BigDecimal readBigDecimal(final ByteBuffer buffer) { if (buffer.remaining() == 0) { return null; } final int scale = VarintUtils.readSignedInt(buffer); final int unscaledLength = buffer.getInt(); final byte[] unscaled = new byte[unscaledLength]; buffer.get(unscaled); return new BigDecimal(new BigInteger(unscaled), scale); } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/dimension/BasicDimensionDefinition.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.dimension; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.dimension.bin.BinRange; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; /** * The Basic Dimension Definition class defines a Space Filling Curve dimension as a minimum and * maximum range with values linearly interpolated within the range. Values outside of the range * will be clamped within the range. */ public class BasicDimensionDefinition implements NumericDimensionDefinition { protected double min; protected double max; public BasicDimensionDefinition() {} /** * Constructor which defines and enforces the bounds of a numeric dimension definition. * * @param min the minimum bounds of the dimension * @param max the maximum bounds of the dimension */ public BasicDimensionDefinition(final double min, final double max) { this.min = min; this.max = max; } @Override public double normalize(double value) { value = clamp(value); return ((value - min) / (max - min)); } @Override public BinRange[] getNormalizedRanges(final NumericData range) { if (range == null) { return new BinRange[0]; } return new BinRange[] { new BinRange( // by default clamp to the min and max clamp(range.getMin()), clamp(range.getMax()))}; } @Override public NumericData getFullRange() { return new NumericRange(min, max); } protected double clamp(final double x) { return clamp(x, min, max); } protected static double clamp(final double x, final double min, final double max) { if (x < min) { return min; } if (x > max) { return max; } return x; } @Override public int hashCode() { final int prime = 31; int result = 1; final String className = getClass().getName(); result = (prime * result) + ((className == null) ? 0 : className.hashCode()); long temp; temp = Double.doubleToLongBits(max); result = (prime * result) + (int) (temp ^ (temp >>> 32)); temp = Double.doubleToLongBits(min); result = (prime * result) + (int) (temp ^ (temp >>> 32)); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final BasicDimensionDefinition other = (BasicDimensionDefinition) obj; if (Double.doubleToLongBits(max) != Double.doubleToLongBits(other.max)) { return false; } if (Double.doubleToLongBits(min) != Double.doubleToLongBits(other.min)) { return false; } return true; } @Override public byte[] toBinary() { final ByteBuffer buf = ByteBuffer.allocate(16); buf.putDouble(min); buf.putDouble(max); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); min = buf.getDouble(); max = buf.getDouble(); } @Override public double denormalize(double value) { if ((value < 0) || (value > 1)) { value = clamp(value, 0, 1); } return (value * (max - min)) + min; } @Override public NumericRange getDenormalizedRange(final BinRange range) { return new NumericRange(range.getNormalizedMin(), range.getNormalizedMax()); } @Override public int getFixedBinIdSize() { return 0; } @Override public double getRange() { return max - min; } @Override public NumericRange getBounds() { return new NumericRange(min, max); } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/dimension/NumericDimensionDefinition.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.dimension; import org.locationtech.geowave.core.index.dimension.bin.BinRange; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.index.persist.Persistable; /** * The Numeric Dimension Definition interface defines the attributes and methods of a class which * forms the Space Filling Curve dimension. */ public interface NumericDimensionDefinition extends Persistable { double getRange(); /** * Used to normalize a value within the bounds of the range to a percentage of the range between 0 * and 1 * * @return normalized value */ double normalize(double value); /** * Used to denormalize the numeric data set from a value between 0 and 1 scaled to fit within its * native bounds * * @return the denormalized value */ double denormalize(double value); /** * Returns the set of normalized ranges * * @param range a numeric range of the data set * @return an array of BinRange[] objects */ BinRange[] getNormalizedRanges(NumericData range); /** * Returns a range in the native bounds of the dimension definition, denormalized from a bin and * separate range * * @param range a numeric range of the data set, with a bin * @return a NumericRange representing the given bin and range */ NumericRange getDenormalizedRange(BinRange range); /** * If this numeric dimension definition uses bins, it is given a fixed length for the bin ID * * @return the fixed length for this dimensions bin ID */ int getFixedBinIdSize(); /** * Returns the native bounds of the dimension definition * * @return a range representing the minimum value and the maximum value for this dimension * definition */ NumericRange getBounds(); /** * @return the entire allowed range */ NumericData getFullRange(); } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/dimension/UnboundedDimensionDefinition.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.dimension; import org.locationtech.geowave.core.index.dimension.bin.BinRange; import org.locationtech.geowave.core.index.dimension.bin.IndexBinningStrategy; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.index.persist.PersistenceUtils; /** * Because space filling curves require an extent (minimum & maximum), the unbounded implementation * relies on an external binning strategy to translate an unbounded variable into bounded bins */ public class UnboundedDimensionDefinition extends BasicDimensionDefinition { protected IndexBinningStrategy binningStrategy; public UnboundedDimensionDefinition() { super(); } /** @param binningStrategy a bin strategy associated with the dimension */ public UnboundedDimensionDefinition(final IndexBinningStrategy binningStrategy) { super(binningStrategy.getBinMin(), binningStrategy.getBinMax()); this.binningStrategy = binningStrategy; } /** @param index a numeric value to be normalized */ @Override public BinRange[] getNormalizedRanges(final NumericData index) { if (index.getMin().isInfinite() && index.getMax().isInfinite()) { return new BinRange[] {BinRange.unbound()}; } return binningStrategy.getNormalizedRanges(index); } /** @return a bin strategy associated with the dimension */ public IndexBinningStrategy getBinningStrategy() { return binningStrategy; } @Override public NumericRange getDenormalizedRange(final BinRange range) { return binningStrategy.getDenormalizedRanges(range); } @Override public int getFixedBinIdSize() { return binningStrategy.getFixedBinIdSize(); } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((binningStrategy == null) ? 0 : binningStrategy.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (!super.equals(obj)) { return false; } if (getClass() != obj.getClass()) { return false; } final UnboundedDimensionDefinition other = (UnboundedDimensionDefinition) obj; if (binningStrategy == null) { if (other.binningStrategy != null) { return false; } } else if (!binningStrategy.equals(other.binningStrategy)) { return false; } return true; } @Override public byte[] toBinary() { return PersistenceUtils.toBinary(binningStrategy); } @Override public void fromBinary(final byte[] bytes) { binningStrategy = (IndexBinningStrategy) PersistenceUtils.fromBinary(bytes); min = binningStrategy.getBinMin(); max = binningStrategy.getBinMax(); } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/lexicoder/ByteLexicoder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.lexicoder; /** * A lexicoder for signed values (in the range from Byte.MIN_VALUE to Byte.MAX_VALUE). Does an * exclusive or on the most significant bit to invert the sign, so that lexicographic ordering of * the byte arrays matches the natural order of the numbers. */ public class ByteLexicoder implements NumberLexicoder { protected ByteLexicoder() {} @Override public byte[] toByteArray(final Byte value) { return new byte[] {((byte) (value ^ 0x80))}; } @Override public Byte fromByteArray(final byte[] bytes) { return (byte) (bytes[0] ^ 0x80); } @Override public Byte getMinimumValue() { return Byte.MIN_VALUE; } @Override public Byte getMaximumValue() { return Byte.MAX_VALUE; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/lexicoder/DoubleLexicoder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.lexicoder; import com.google.common.primitives.Longs; /** A lexicoder for preserving the native Java sort order of Double values. */ public class DoubleLexicoder implements NumberLexicoder { @Override public byte[] toByteArray(final Double value) { long l = Double.doubleToRawLongBits(value); if (l < 0) { l = ~l; } else { l = l ^ 0x8000000000000000l; } return Longs.toByteArray(l); } @Override public Double fromByteArray(final byte[] bytes) { long l = Longs.fromByteArray(bytes); if (l < 0) { l = l ^ 0x8000000000000000l; } else { l = ~l; } return Double.longBitsToDouble(l); } @Override public Double getMinimumValue() { return -Double.MAX_VALUE; } @Override public Double getMaximumValue() { return Double.MAX_VALUE; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/lexicoder/FloatLexicoder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.lexicoder; import com.google.common.primitives.Ints; /** A lexicoder for preserving the native Java sort order of Float values. */ public class FloatLexicoder implements NumberLexicoder { @Override public byte[] toByteArray(final Float value) { int i = Float.floatToRawIntBits(value); if (i < 0) { i = ~i; } else { i = i ^ 0x80000000; } return Ints.toByteArray(i); } @Override public Float fromByteArray(final byte[] bytes) { int i = Ints.fromByteArray(bytes); if (i < 0) { i = i ^ 0x80000000; } else { i = ~i; } return Float.intBitsToFloat(i); } @Override public Float getMinimumValue() { return -Float.MAX_VALUE; } @Override public Float getMaximumValue() { return Float.MAX_VALUE; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/lexicoder/IntegerLexicoder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.lexicoder; import com.google.common.primitives.Ints; /** * A lexicoder for signed integers (in the range from Integer.MIN_VALUE to Integer.MAX_VALUE). Does * an exclusive or on the most significant bit to invert the sign, so that lexicographic ordering of * the byte arrays matches the natural order of the numbers. * *

See Apache Accumulo (org.apache.accumulo.core.client.lexicoder.IntegerLexicoder) */ public class IntegerLexicoder implements NumberLexicoder { protected IntegerLexicoder() {} @Override public byte[] toByteArray(final Integer value) { return Ints.toByteArray(value ^ 0x80000000); } @Override public Integer fromByteArray(final byte[] bytes) { final int value = Ints.fromByteArray(bytes); return value ^ 0x80000000; } @Override public Integer getMinimumValue() { return Integer.MIN_VALUE; } @Override public Integer getMaximumValue() { return Integer.MAX_VALUE; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/lexicoder/Lexicoders.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.lexicoder; /** A class containing instances of lexicoders. */ public class Lexicoders { public static final ByteLexicoder BYTE = new ByteLexicoder(); public static final ShortLexicoder SHORT = new ShortLexicoder(); public static final IntegerLexicoder INT = new IntegerLexicoder(); public static final LongLexicoder LONG = new LongLexicoder(); public static final DoubleLexicoder DOUBLE = new DoubleLexicoder(); public static final FloatLexicoder FLOAT = new FloatLexicoder(); } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/lexicoder/LongLexicoder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.lexicoder; import com.google.common.primitives.Longs; /** * A lexicoder for signed integers (in the range from Long.MIN_VALUE to Long.MAX_VALUE). Does an * exclusive or on the most significant bit to invert the sign, so that lexicographic ordering of * the byte arrays matches the natural order of the numbers. * *

See Apache Accumulo (org.apache.accumulo.core.client.lexicoder.LongLexicoder) */ public class LongLexicoder implements NumberLexicoder { protected LongLexicoder() {} @Override public byte[] toByteArray(final Long value) { return Longs.toByteArray(lexicode(value)); } @Override public Long fromByteArray(final byte[] bytes) { final long value = Longs.fromByteArray(bytes); return lexicode(value); } @Override public Long getMinimumValue() { return Long.MIN_VALUE; } @Override public Long getMaximumValue() { return Long.MAX_VALUE; } public Long lexicode(final Long value) { return value ^ 0x8000000000000000l; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/lexicoder/NumberLexicoder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.lexicoder; /** * A lexicoder for a number type. Converts back and forth between a number and a byte array. A * lexicographical sorting of the byte arrays will yield the natural order of the numbers that they * represent. * * @param a number type */ public interface NumberLexicoder { /** * Get a byte[] that represents the number value. * * @param value a number * @return the byte array representing the number */ public byte[] toByteArray(T value); /** * Get the value of a byte array * * @param bytes a byte array representing a number * @return the number */ public T fromByteArray(byte[] bytes); /** * Get the minimum value of the range of numbers that this lexicoder can encode and decode (i.e. * the number represented by all 0 bits). * * @return the minimum value in the lexicoder's range */ public T getMinimumValue(); /** * Get the maximum value of the range of numbers that this lexicoder can encode and decode (i.e. * the number represented by all 1 bits). * * @return the maximum value in the lexicoder's range */ public T getMaximumValue(); } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/lexicoder/ShortLexicoder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.lexicoder; import com.google.common.primitives.Shorts; /** * A lexicoder for signed integers (in the range from Short.MIN_VALUE to Short.MAX_VALUE). Does an * exclusive or on the most significant bit to invert the sign, so that lexicographic ordering of * the byte arrays matches the natural order of the numbers. */ public class ShortLexicoder implements NumberLexicoder { protected ShortLexicoder() {} @Override public byte[] toByteArray(final Short value) { return Shorts.toByteArray((short) (value ^ 0x8000)); } @Override public Short fromByteArray(final byte[] bytes) { final short value = Shorts.fromByteArray(bytes); return (short) (value ^ 0x8000); } @Override public Short getMinimumValue() { return Short.MIN_VALUE; } @Override public Short getMaximumValue() { return Short.MAX_VALUE; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/numeric/BasicNumericDataset.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.numeric; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.PersistenceUtils; /** * The Basic Index Result class creates an object associated with a generic query. This class can be * used when the dimensions and/or axis are generic. */ public class BasicNumericDataset implements MultiDimensionalNumericData { private NumericData[] dataPerDimension; /** Open ended/unconstrained */ public BasicNumericDataset() { dataPerDimension = new NumericData[0]; } /** * Constructor used to create a new Basic Numeric Dataset object. * * @param dataPerDimension an array of numeric data objects */ public BasicNumericDataset(final NumericData[] dataPerDimension) { this.dataPerDimension = dataPerDimension; } /** @return all of the maximum values (for each dimension) */ @Override public Double[] getMaxValuesPerDimension() { final NumericData[] ranges = getDataPerDimension(); final Double[] maxPerDimension = new Double[ranges.length]; for (int d = 0; d < ranges.length; d++) { maxPerDimension[d] = ranges[d].getMax(); } return maxPerDimension; } /** @return all of the minimum values (for each dimension) */ @Override public Double[] getMinValuesPerDimension() { final NumericData[] ranges = getDataPerDimension(); final Double[] minPerDimension = new Double[ranges.length]; for (int d = 0; d < ranges.length; d++) { minPerDimension[d] = ranges[d].getMin(); } return minPerDimension; } /** @return all of the centroid values (for each dimension) */ @Override public Double[] getCentroidPerDimension() { final NumericData[] ranges = getDataPerDimension(); final Double[] centroid = new Double[ranges.length]; for (int d = 0; d < ranges.length; d++) { centroid[d] = ranges[d].getCentroid(); } return centroid; } /** @return an array of NumericData objects */ @Override public NumericData[] getDataPerDimension() { return dataPerDimension; } /** @return the number of dimensions associated with this data set */ @Override public int getDimensionCount() { return dataPerDimension.length; } @Override public boolean isEmpty() { if ((dataPerDimension == null) || (dataPerDimension.length == 0)) { return true; } return !Arrays.stream(dataPerDimension).noneMatch(d -> d == null); } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + Arrays.hashCode(dataPerDimension); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final BasicNumericDataset other = (BasicNumericDataset) obj; if (!Arrays.equals(dataPerDimension, other.dataPerDimension)) { return false; } return true; } @Override public byte[] toBinary() { int totalBytes = VarintUtils.unsignedIntByteLength(dataPerDimension.length); final List serializedData = new ArrayList<>(); for (final NumericData data : dataPerDimension) { final byte[] binary = PersistenceUtils.toBinary(data); totalBytes += (binary.length + VarintUtils.unsignedIntByteLength(binary.length)); serializedData.add(binary); } final ByteBuffer buf = ByteBuffer.allocate(totalBytes); VarintUtils.writeUnsignedInt(dataPerDimension.length, buf); for (final byte[] binary : serializedData) { VarintUtils.writeUnsignedInt(binary.length, buf); buf.put(binary); } return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int numDimensions = VarintUtils.readUnsignedInt(buf); dataPerDimension = new NumericData[numDimensions]; for (int d = 0; d < numDimensions; d++) { final byte[] binary = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); dataPerDimension[d] = (NumericData) PersistenceUtils.fromBinary(binary); } } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/numeric/BinnedNumericDataset.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.numeric; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.dimension.bin.BinRange; import org.locationtech.geowave.core.index.persist.PersistenceUtils; /** * The Binned Numeric Dataset class creates an object that associates a multi-dimensional index * range to a particular bin ID. */ public class BinnedNumericDataset implements MultiDimensionalNumericData { private byte[] binId; private MultiDimensionalNumericData indexRanges; public BinnedNumericDataset() {} /** * @param binId a unique ID associated with the BinnedQuery object * @param indexRanges multi-dimensional range data */ public BinnedNumericDataset(final byte[] binId, final MultiDimensionalNumericData indexRanges) { this.binId = binId; this.indexRanges = indexRanges; } /** @return an array of NumericData objects associated with this object. */ @Override public NumericData[] getDataPerDimension() { return indexRanges.getDataPerDimension(); } /** @return an array of max values associated with each dimension */ @Override public Double[] getMaxValuesPerDimension() { return indexRanges.getMaxValuesPerDimension(); } /** @return an array of min values associated with each dimension */ @Override public Double[] getMinValuesPerDimension() { return indexRanges.getMinValuesPerDimension(); } /** @return an array of centroid values associated with each dimension */ @Override public Double[] getCentroidPerDimension() { return indexRanges.getCentroidPerDimension(); } /** @return the number of total dimensions */ @Override public int getDimensionCount() { return indexRanges.getDimensionCount(); } /** @return a unique ID associated with this object */ public byte[] getBinId() { return binId; } /** * This method is responsible for translating a query into appropriate normalized and binned (if * necessary) queries that can be used by the underlying index implementation. For example, for * unbounded dimensions such as time, an incoming query of July 2012 to July 2013 may get * translated into 2 binned queries representing the 2012 portion of the query and the 2013 * portion, each normalized to millis from the beginning of the year. * * @param numericData the incoming query into the index implementation, to be translated into * normalized, binned queries * @param dimensionDefinitions the definition for the dimensions * @return normalized indexes */ public static List applyBins( final MultiDimensionalNumericData numericData, final NumericDimensionDefinition[] dimensionDefinitions) { if (dimensionDefinitions.length == 0) { return Collections.emptyList(); } final BinRange[][] binRangesPerDimension = getBinnedRangesPerDimension(numericData, dimensionDefinitions); // now we need to combine all permutations of bin ranges into // BinnedQuery objects final List binnedQueries = new ArrayList<>(); generatePermutations(binRangesPerDimension, binnedQueries, 0, null); return binnedQueries; } private static void generatePermutations( final BinRange[][] binRangesPerDimension, final List result, final int dimension, final BinnedNumericDataset current) { if (dimension == binRangesPerDimension.length) { result.add(current); return; } for (int i = 0; i < binRangesPerDimension[dimension].length; ++i) { BinnedNumericDataset next; final NumericData[] rangePerDimension; if (current == null) { rangePerDimension = new NumericRange[binRangesPerDimension.length]; next = new BinnedNumericDataset( binRangesPerDimension[dimension][i].getBinId(), new BasicNumericDataset(rangePerDimension)); } else { // because binned queries were intended to be immutable, // re-instantiate the object rangePerDimension = new NumericRange[binRangesPerDimension.length]; for (int d = 0; d < dimension; d++) { rangePerDimension[d] = current.getDataPerDimension()[d]; } final byte[] combinedBinId = ByteArrayUtils.combineArrays( current.getBinId(), binRangesPerDimension[dimension][i].getBinId()); next = new BinnedNumericDataset(combinedBinId, new BasicNumericDataset(rangePerDimension)); } rangePerDimension[dimension] = new NumericRange( binRangesPerDimension[dimension][i].getNormalizedMin(), binRangesPerDimension[dimension][i].getNormalizedMax()); generatePermutations(binRangesPerDimension, result, dimension + 1, next); } } public static BinRange[][] getBinnedRangesPerDimension( final MultiDimensionalNumericData numericData, final NumericDimensionDefinition[] dimensionDefinitions) { if (dimensionDefinitions.length == 0) { return new BinRange[0][]; } final BinRange[][] binRangesPerDimension = new BinRange[dimensionDefinitions.length][]; for (int d = 0; d < dimensionDefinitions.length; d++) { binRangesPerDimension[d] = dimensionDefinitions[d].getNormalizedRanges(numericData.getDataPerDimension()[d]); } return binRangesPerDimension; } @Override public boolean isEmpty() { return indexRanges.isEmpty(); } @Override public byte[] toBinary() { final byte[] indexRangesBinary = PersistenceUtils.toBinary(indexRanges); final ByteBuffer buf = ByteBuffer.allocate( VarintUtils.unsignedIntByteLength(binId.length) + indexRangesBinary.length + binId.length); VarintUtils.writeUnsignedInt(binId.length, buf); buf.put(binId); buf.put(indexRangesBinary); return null; } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); binId = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); final byte[] indexRangesBinary = new byte[buf.remaining()]; buf.get(indexRangesBinary); indexRanges = (MultiDimensionalNumericData) PersistenceUtils.fromBinary(indexRangesBinary); } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/numeric/MultiDimensionalNumericData.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.numeric; import org.locationtech.geowave.core.index.MultiDimensionalIndexData; /** Interface which defines the methods associated with a multi-dimensional numeric data range. */ public interface MultiDimensionalNumericData extends MultiDimensionalIndexData { /** @return an array of object QueryRange */ @Override public NumericData[] getDataPerDimension(); } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/numeric/NumericData.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.numeric; import org.locationtech.geowave.core.index.IndexData; /** Interface used to define numeric data associated with a space filling curve. */ public interface NumericData extends IndexData { } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/numeric/NumericRange.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.numeric; import java.nio.ByteBuffer; /** Concrete implementation defining a numeric range associated with a space filling curve. */ public class NumericRange implements NumericData { /** */ private static final long serialVersionUID = 1L; private double min; private double max; private boolean minInclusive; private boolean maxInclusive; public NumericRange() {} /** * Constructor used to create a IndexRange object * * @param min the minimum bounds of a unique index range * @param max the maximum bounds of a unique index range */ public NumericRange(final double min, final double max) { this(min, max, true, true); } public NumericRange( final double min, final double max, final boolean minInclusive, final boolean maxInclusive) { this.min = min; this.max = max; this.minInclusive = minInclusive; this.maxInclusive = maxInclusive; } /** @return min the minimum bounds of a index range object */ @Override public Double getMin() { return min; } /** @return max the maximum bounds of a index range object */ @Override public Double getMax() { return max; } @Override public boolean isMinInclusive() { return minInclusive; } @Override public boolean isMaxInclusive() { return maxInclusive; } /** @return centroid the center of a unique index range object */ @Override public Double getCentroid() { return (min + max) / 2; } /** Flag to determine if the object is a range */ @Override public boolean isRange() { return true; } @Override public String toString() { return "NumericRange [min=" + min + ", max=" + max + "]"; } @Override public int hashCode() { final int prime = 31; int result = 1; long temp; temp = Double.doubleToLongBits(max); result = (prime * result) + (int) (temp ^ (temp >>> 32)); temp = Double.doubleToLongBits(min); result = (prime * result) + (int) (temp ^ (temp >>> 32)); result = (prime * result) + (minInclusive ? 1 : 0); result = (prime * result) + (maxInclusive ? 1 : 0); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } // changing this check will fail some unit tests. if (!NumericRange.class.isAssignableFrom(obj.getClass())) { return false; } final NumericRange other = (NumericRange) obj; return (Math.abs(max - other.max) < NumericValue.EPSILON) && (Math.abs(min - other.min) < NumericValue.EPSILON); } @Override public byte[] toBinary() { final ByteBuffer buf = ByteBuffer.allocate(18); buf.putDouble(min); buf.putDouble(max); buf.put(minInclusive ? (byte) 1 : (byte) 0); buf.put(maxInclusive ? (byte) 1 : (byte) 0); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); min = buf.getDouble(); max = buf.getDouble(); if (buf.remaining() > 0) { minInclusive = buf.get() > 0; maxInclusive = buf.get() > 0; } else { minInclusive = true; maxInclusive = true; } } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/numeric/NumericValue.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.numeric; import java.nio.ByteBuffer; /** * Concrete implementation defining a single numeric value associated with a space filling curve. */ public class NumericValue implements NumericData { /** */ private static final long serialVersionUID = 1L; private double value; public NumericValue() {} /** * Constructor used to create a new NumericValue object * * @param value the particular numeric value */ public NumericValue(final double value) { this.value = value; } /** @return value the value of a numeric value object */ @Override public Double getMin() { return value; } /** @return value the value of a numeric value object */ @Override public Double getMax() { return value; } @Override public boolean isMinInclusive() { return true; } @Override public boolean isMaxInclusive() { return true; } /** @return value the value of a numeric value object */ @Override public Double getCentroid() { return value; } /** Determines if this object is a range or not */ @Override public boolean isRange() { return false; } @Override public String toString() { return "NumericRange [value=" + value + "]"; } @Override public int hashCode() { final int prime = 31; int result = 1; long temp; temp = Double.doubleToLongBits(value); result = (prime * result) + (int) (temp ^ (temp >>> 32)); return result; } protected static final double EPSILON = 1E-10; @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final NumericValue other = (NumericValue) obj; return (Math.abs(value - other.value) < EPSILON); } @Override public byte[] toBinary() { final ByteBuffer buf = ByteBuffer.allocate(8); buf.putDouble(value); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); value = buf.getDouble(); } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/persist/InternalPersistableRegistry.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.persist; /** * Marker interface for internal GeoWave persistable registries. Third-party additions to GeoWave * should NOT use this interface. Any persistable registry that does not implement this interface * will be automatically converted to the negative persistable ID space. This allows third-parties * to be able to use the full range of positive persistable IDs without worrying about colliding * with a pre-existing internal persistable ID. */ public interface InternalPersistableRegistry { } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/persist/Persistable.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.persist; /** * A simple interface for persisting objects, PersistenceUtils provides convenience methods for * serializing and de-serializing these objects */ public interface Persistable { /** * Convert fields and data within an object to binary form for transmission or storage. * * @return an array of bytes representing a binary stream representation of the object. */ byte[] toBinary(); /** Convert a stream of binary bytes to fields and data within an object. */ void fromBinary(byte[] bytes); } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/persist/PersistableFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.persist; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import java.util.function.Supplier; import org.locationtech.geowave.core.index.SPIServiceRegistry; import org.locationtech.geowave.core.index.persist.PersistableRegistrySpi.PersistableIdAndConstructor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class PersistableFactory { private static final Logger LOGGER = LoggerFactory.getLogger(PersistableFactory.class); private final Map, Short> classRegistry; private final Map> constructorRegistry; private static PersistableFactory singletonInstance = null; public static synchronized PersistableFactory getInstance() { if (singletonInstance == null) { final PersistableFactory internalFactory = new PersistableFactory(); final Iterator persistableRegistries = new SPIServiceRegistry(PersistableFactory.class).load(PersistableRegistrySpi.class); while (persistableRegistries.hasNext()) { final PersistableRegistrySpi persistableRegistry = persistableRegistries.next(); if (persistableRegistry != null) { internalFactory.addRegistry(persistableRegistry); } } singletonInstance = internalFactory; } return singletonInstance; } private PersistableFactory() { classRegistry = new HashMap<>(); constructorRegistry = new HashMap<>(); } protected void addRegistry(final PersistableRegistrySpi registry) { final PersistableIdAndConstructor[] persistables = registry.getSupportedPersistables(); final boolean external = !(registry instanceof InternalPersistableRegistry); for (final PersistableIdAndConstructor p : persistables) { addPersistableType( external ? (short) (-Math.abs(p.getPersistableId())) : p.getPersistableId(), p.getPersistableConstructor()); } } protected void addPersistableType( final short persistableId, final Supplier constructor) { final Class persistableClass = constructor.get().getClass(); if (classRegistry.containsKey(persistableClass)) { LOGGER.error( "'" + persistableClass.getCanonicalName() + "' already registered with id '" + classRegistry.get(persistableClass) + "'. Cannot register '" + persistableClass + "' with id '" + persistableId + "'"); return; } if (constructorRegistry.containsKey(persistableId)) { String currentClass = "unknown"; for (final Entry, Short> e : classRegistry.entrySet()) { if (persistableId == e.getValue().shortValue()) { currentClass = e.getKey().getCanonicalName(); break; } } LOGGER.error( "'" + persistableId + "' already registered for class '" + (currentClass) + "'. Cannot register '" + persistableClass + "' with id '" + persistableId + "'"); return; } classRegistry.put(persistableClass, persistableId); constructorRegistry.put(persistableId, constructor); } public Persistable newInstance(final short id) { final Supplier constructor = constructorRegistry.get(id); if (constructor != null) { return constructor.get(); } return null; } public Map, Short> getClassIdMapping() { return classRegistry; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/persist/PersistableList.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.persist; import java.nio.ByteBuffer; import java.util.List; import com.google.common.collect.Lists; /** * A Persistable list of Persistables. */ public class PersistableList implements Persistable { private final List persistables; public PersistableList() { persistables = Lists.newArrayList(); } public PersistableList(final List persistables) { this.persistables = persistables; } @Override public byte[] toBinary() { final List parts = Lists.newArrayListWithCapacity(persistables.size()); int length = 4; for (final Persistable persistable : persistables) { final byte[] binary = PersistenceUtils.toBinary(persistable); length += binary.length + 4; parts.add(binary); } final ByteBuffer buffer = ByteBuffer.allocate(length); buffer.putInt(persistables.size()); for (final byte[] part : parts) { buffer.putInt(part.length); buffer.put(part); } return buffer.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); final int length = buffer.getInt(); persistables.clear(); for (int i = 0; i < length; i++) { final int partLength = buffer.getInt(); final byte[] part = new byte[partLength]; buffer.get(part); persistables.add(PersistenceUtils.fromBinary(part)); } } public List getPersistables() { return persistables; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/persist/PersistableRegistrySpi.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.persist; import java.util.function.Supplier; /** * Registers new persistables with GeoWave. Each persistable has an ID of type short that uniquely * identifies the class. Internal GeoWave persistable registries also implement the * {@link InternalPersistableRegistry} marker interface that alleviates potential ID conflicts with * third-party plugins. Any third-party persistable that does not implement the internal marker * interface will automatically be converted to the negative ID space (i.e. a persistable ID of 30 * will become -30). This allows third-party developers to use any persistable ID without having to * worry about conflicting with current or future internal persistables. */ public interface PersistableRegistrySpi { public PersistableIdAndConstructor[] getSupportedPersistables(); public static class PersistableIdAndConstructor { private final short persistableId; private final Supplier persistableConstructor; public PersistableIdAndConstructor( final short persistableId, final Supplier persistableConstructor) { this.persistableId = persistableId; this.persistableConstructor = persistableConstructor; } public short getPersistableId() { return persistableId; } public Supplier getPersistableConstructor() { return persistableConstructor; } } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/persist/PersistenceUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.persist; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collection; import java.util.List; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Lists; /** A set of convenience methods for serializing and deserializing persistable objects */ public class PersistenceUtils { private static final Logger LOGGER = LoggerFactory.getLogger(PersistenceUtils.class); public static byte[] toBinary(final Collection persistables) { if (persistables.isEmpty()) { return new byte[] {}; } int byteCount = VarintUtils.unsignedIntByteLength(persistables.size()); final List persistableBinaries = new ArrayList<>(); for (final Persistable persistable : persistables) { final byte[] binary = toBinary(persistable); byteCount += (VarintUtils.unsignedIntByteLength(binary.length) + binary.length); persistableBinaries.add(binary); } final ByteBuffer buf = ByteBuffer.allocate(byteCount); VarintUtils.writeUnsignedInt(persistables.size(), buf); for (final byte[] binary : persistableBinaries) { VarintUtils.writeUnsignedInt(binary.length, buf); buf.put(binary); } return buf.array(); } public static byte[] toBinary(final Persistable[] persistables) { if (persistables.length == 0) { return new byte[] {}; } int byteCount = VarintUtils.unsignedIntByteLength(persistables.length); final List persistableBinaries = Lists.newArrayListWithCapacity(persistables.length); for (final Persistable persistable : persistables) { final byte[] binary = toBinary(persistable); byteCount += (VarintUtils.unsignedIntByteLength(binary.length) + binary.length); persistableBinaries.add(binary); } final ByteBuffer buf = ByteBuffer.allocate(byteCount); VarintUtils.writeUnsignedInt(persistables.length, buf); for (final byte[] binary : persistableBinaries) { VarintUtils.writeUnsignedInt(binary.length, buf); buf.put(binary); } return buf.array(); } public static byte[] toClassId(final Persistable persistable) { if (persistable == null) { return new byte[0]; } final Short classId = PersistableFactory.getInstance().getClassIdMapping().get(persistable.getClass()); if (classId != null) { final ByteBuffer buf = ByteBuffer.allocate(2); buf.putShort(classId); return buf.array(); } return new byte[0]; } public static byte[] toClassId(final String className) { if ((className == null) || className.isEmpty()) { return new byte[0]; } Short classId; try { classId = PersistableFactory.getInstance().getClassIdMapping().get(Class.forName(className)); if (classId != null) { final ByteBuffer buf = ByteBuffer.allocate(2); buf.putShort(classId); return buf.array(); } } catch (final ClassNotFoundException e) { LOGGER.warn("Unable to find class", e); } return new byte[0]; } public static Persistable fromClassId(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final short classId = buf.getShort(); final Persistable retVal = PersistableFactory.getInstance().newInstance(classId); return retVal; } public static byte[] toBinary(final Persistable persistable) { if (persistable == null) { return new byte[0]; } final Short classId = PersistableFactory.getInstance().getClassIdMapping().get(persistable.getClass()); if (classId != null) { final byte[] persistableBinary = persistable.toBinary(); final ByteBuffer buf = ByteBuffer.allocate(2 + persistableBinary.length); buf.putShort(classId); buf.put(persistableBinary); return buf.array(); } return new byte[0]; } public static List fromBinaryAsList(final byte[] bytes) { if ((bytes == null) || (bytes.length == 0)) { // the original binary didn't even contain the size of the // array, assume that nothing was persisted return Lists.newArrayList(); } final ByteBuffer buf = ByteBuffer.wrap(bytes); final int size = VarintUtils.readUnsignedInt(buf); final List persistables = Lists.newArrayListWithCapacity(size); for (int i = 0; i < size; i++) { final byte[] persistableBinary = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); persistables.add(fromBinary(persistableBinary)); } return persistables; } public static Persistable fromBinary(final byte[] bytes) { if ((bytes == null) || (bytes.length < 2)) { return null; } final ByteBuffer buf = ByteBuffer.wrap(bytes); final short classId = buf.getShort(); final Persistable retVal = PersistableFactory.getInstance().newInstance(classId); if (retVal == null) { LOGGER.error( "Unable to find persistable with class ID: " + classId + "\nFull Binary is: " + ByteArrayUtils.getHexString(bytes)); return null; } final byte[] persistableBinary = new byte[bytes.length - 2]; buf.get(persistableBinary); retVal.fromBinary(persistableBinary); return retVal; } public static byte[] stripClassId(final byte[] bytes) { if ((bytes == null) || (bytes.length < 2)) { return null; } final ByteBuffer buf = ByteBuffer.wrap(bytes); buf.getShort(); final byte[] persistableBinary = new byte[bytes.length - 2]; buf.get(persistableBinary); return persistableBinary; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/sfc/BasicSFCIndexStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.sfc; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.IndexMetaData; import org.locationtech.geowave.core.index.IndexUtils; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.MultiDimensionalCoordinateRanges; import org.locationtech.geowave.core.index.MultiDimensionalCoordinates; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.index.QueryRanges; import org.locationtech.geowave.core.index.SinglePartitionInsertionIds; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.dimension.bin.BinRange; import org.locationtech.geowave.core.index.numeric.BinnedNumericDataset; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.index.sfc.binned.BinnedSFCUtils; import org.locationtech.geowave.core.index.sfc.tiered.TieredSFCIndexStrategy; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class BasicSFCIndexStrategy implements NumericIndexStrategy { private static final Logger LOGGER = LoggerFactory.getLogger(BasicSFCIndexStrategy.class); private SpaceFillingCurve sfc; private NumericDimensionDefinition[] baseDefinitions; public BasicSFCIndexStrategy() {} public BasicSFCIndexStrategy( final SpaceFillingCurve sfc, final NumericDimensionDefinition[] baseDefinitions) { this.sfc = sfc; this.baseDefinitions = baseDefinitions; } @Override public QueryRanges getQueryRanges( final MultiDimensionalNumericData indexedRange, final IndexMetaData... hints) { return getQueryRanges(indexedRange, -1); } @Override public QueryRanges getQueryRanges( final MultiDimensionalNumericData indexedRange, final int maxRangeDecomposition, final IndexMetaData... hints) { final List binnedQueries = BinnedNumericDataset.applyBins(indexedRange, baseDefinitions); return new QueryRanges( BinnedSFCUtils.getQueryRanges(binnedQueries, sfc, maxRangeDecomposition, null)); } @Override public MultiDimensionalNumericData getRangeForId( final byte[] partitionKey, final byte[] sortKey) { final List insertionIds = new SinglePartitionInsertionIds(partitionKey, sortKey).getCompositeInsertionIds(); if (insertionIds.isEmpty()) { LOGGER.warn("Unexpected empty insertion ID in getRangeForId()"); return null; } final byte[] rowId = insertionIds.get(0); return BinnedSFCUtils.getRangeForId(rowId, baseDefinitions, sfc); } @Override public MultiDimensionalCoordinates getCoordinatesPerDimension( final byte[] partitionKey, final byte[] sortKey) { final byte[] rowId = ByteArrayUtils.combineArrays( partitionKey == null ? null : partitionKey, sortKey == null ? null : sortKey); return new MultiDimensionalCoordinates( new byte[0], BinnedSFCUtils.getCoordinatesForId(rowId, baseDefinitions, sfc)); } @Override public InsertionIds getInsertionIds(final MultiDimensionalNumericData indexedData) { return getInsertionIds(indexedData, 1); } @Override public InsertionIds getInsertionIds( final MultiDimensionalNumericData indexedData, final int maxDuplicateInsertionIds) { if (indexedData.isEmpty()) { LOGGER.warn("Cannot index empty fields, skipping writing row to index '" + getId() + "'"); return new InsertionIds(); } // we need to duplicate per bin so we can't adhere to max duplication // anyways final List ranges = BinnedNumericDataset.applyBins(indexedData, baseDefinitions); final Set retVal = new HashSet<>(ranges.size()); for (final BinnedNumericDataset range : ranges) { final SinglePartitionInsertionIds binRowIds = TieredSFCIndexStrategy.getRowIdsAtTier(range, null, sfc, null, 0); if (binRowIds != null) { retVal.add(binRowIds); } } return new InsertionIds(retVal); } @Override public NumericDimensionDefinition[] getOrderedDimensionDefinitions() { return baseDefinitions; } @Override public String getId() { return StringUtils.intToString(hashCode()); } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + Arrays.hashCode(baseDefinitions); result = (prime * result) + ((sfc == null) ? 0 : sfc.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if ((obj == null) || (getClass() != obj.getClass())) { return false; } final BasicSFCIndexStrategy other = (BasicSFCIndexStrategy) obj; if (!Arrays.equals(baseDefinitions, other.baseDefinitions)) { return false; } if (sfc == null) { if (other.sfc != null) { return false; } } else if (!sfc.equals(other.sfc)) { return false; } return true; } @Override public byte[] toBinary() { int byteBufferLength = VarintUtils.unsignedIntByteLength(baseDefinitions.length); final List dimensionBinaries = new ArrayList<>(baseDefinitions.length); final byte[] sfcBinary = PersistenceUtils.toBinary(sfc); byteBufferLength += (VarintUtils.unsignedIntByteLength(sfcBinary.length) + sfcBinary.length); for (final NumericDimensionDefinition dimension : baseDefinitions) { final byte[] dimensionBinary = PersistenceUtils.toBinary(dimension); byteBufferLength += (VarintUtils.unsignedIntByteLength(dimensionBinary.length) + dimensionBinary.length); dimensionBinaries.add(dimensionBinary); } final ByteBuffer buf = ByteBuffer.allocate(byteBufferLength); VarintUtils.writeUnsignedInt(baseDefinitions.length, buf); VarintUtils.writeUnsignedInt(sfcBinary.length, buf); buf.put(sfcBinary); for (final byte[] dimensionBinary : dimensionBinaries) { VarintUtils.writeUnsignedInt(dimensionBinary.length, buf); buf.put(dimensionBinary); } return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int numDimensions = VarintUtils.readUnsignedInt(buf); baseDefinitions = new NumericDimensionDefinition[numDimensions]; final byte[] sfcBinary = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); sfc = (SpaceFillingCurve) PersistenceUtils.fromBinary(sfcBinary); for (int i = 0; i < numDimensions; i++) { final byte[] dim = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); baseDefinitions[i] = (NumericDimensionDefinition) PersistenceUtils.fromBinary(dim); } } @Override public double[] getHighestPrecisionIdRangePerDimension() { return sfc.getInsertionIdRangePerDimension(); } @Override public int getPartitionKeyLength() { int rowIdOffset = 1; for (int dimensionIdx = 0; dimensionIdx < baseDefinitions.length; dimensionIdx++) { final int binSize = baseDefinitions[dimensionIdx].getFixedBinIdSize(); if (binSize > 0) { rowIdOffset += binSize; } } return rowIdOffset; } @Override public List createMetaData() { return Collections.emptyList(); } @Override public MultiDimensionalCoordinateRanges[] getCoordinateRangesPerDimension( final MultiDimensionalNumericData dataRange, final IndexMetaData... hints) { final BinRange[][] binRangesPerDimension = BinnedNumericDataset.getBinnedRangesPerDimension(dataRange, baseDefinitions); return new MultiDimensionalCoordinateRanges[] { BinnedSFCUtils.getCoordinateRanges( binRangesPerDimension, sfc, baseDefinitions.length, null)}; } @Override public byte[][] getInsertionPartitionKeys(final MultiDimensionalNumericData insertionData) { return IndexUtils.getInsertionPartitionKeys(this, insertionData); } @Override public byte[][] getQueryPartitionKeys( final MultiDimensionalNumericData queryData, final IndexMetaData... hints) { return IndexUtils.getQueryPartitionKeys(this, queryData, hints); } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/sfc/RangeDecomposition.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.sfc; import org.locationtech.geowave.core.index.ByteArrayRange; /** * This class encapsulates a set of ranges returned from a space filling curve decomposition. */ public class RangeDecomposition { private final ByteArrayRange[] ranges; /** * Constructor used to create a new Range Decomposition object. * * @param ranges ranges for the space filling curve */ public RangeDecomposition(final ByteArrayRange[] ranges) { this.ranges = ranges; } /** @return the ranges associated with this Range Decomposition */ public ByteArrayRange[] getRanges() { return ranges; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/sfc/SFCDimensionDefinition.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.sfc; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.dimension.bin.BinRange; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.index.persist.PersistenceUtils; /** * This class wraps a dimension definition with a cardinality (bits of precision) on a space filling * curve */ public class SFCDimensionDefinition implements NumericDimensionDefinition { private int bitsOfPrecision; private NumericDimensionDefinition dimensionDefinition; public SFCDimensionDefinition() {} /** * @param dimensionDefinition an object which defines a dimension used to create a space filling * curve * @param bitsOfPrecision the number of bits associated with the specified dimension object */ public SFCDimensionDefinition( final NumericDimensionDefinition dimensionDefinition, final int bitsOfPrecision) { this.bitsOfPrecision = bitsOfPrecision; this.dimensionDefinition = dimensionDefinition; } @Override public NumericData getFullRange() { return dimensionDefinition.getFullRange(); } /** @return bitsOfPrecision the bits of precision for the dimension object */ public int getBitsOfPrecision() { return bitsOfPrecision; } /** * @param range numeric data to be normalized * @return a BinRange[] based on numeric data */ @Override public BinRange[] getNormalizedRanges(final NumericData range) { return dimensionDefinition.getNormalizedRanges(range); } public NumericDimensionDefinition getDimensionDefinition() { return dimensionDefinition; } @Override public double normalize(final double value) { return dimensionDefinition.normalize(value); } @Override public double denormalize(final double value) { return dimensionDefinition.denormalize(value); } @Override public NumericRange getDenormalizedRange(final BinRange range) { return dimensionDefinition.getDenormalizedRange(range); } @Override public int getFixedBinIdSize() { return dimensionDefinition.getFixedBinIdSize(); } @Override public double getRange() { return dimensionDefinition.getRange(); } @Override public NumericRange getBounds() { return dimensionDefinition.getBounds(); } @Override public byte[] toBinary() { final byte[] dimensionBinary = PersistenceUtils.toBinary(dimensionDefinition); final ByteBuffer buf = ByteBuffer.allocate( dimensionBinary.length + VarintUtils.unsignedIntByteLength(bitsOfPrecision)); VarintUtils.writeUnsignedInt(bitsOfPrecision, buf); buf.put(dimensionBinary); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); bitsOfPrecision = VarintUtils.readUnsignedInt(buf); final byte[] dimensionBinary = new byte[buf.remaining()]; buf.get(dimensionBinary); dimensionDefinition = (NumericDimensionDefinition) PersistenceUtils.fromBinary(dimensionBinary); } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + bitsOfPrecision; result = (prime * result) + ((dimensionDefinition == null) ? 0 : dimensionDefinition.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final SFCDimensionDefinition other = (SFCDimensionDefinition) obj; if (bitsOfPrecision != other.bitsOfPrecision) { return false; } if (dimensionDefinition == null) { if (other.dimensionDefinition != null) { return false; } } else if (!dimensionDefinition.equals(other.dimensionDefinition)) { return false; } return true; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/sfc/SFCFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.sfc; import org.locationtech.geowave.core.index.sfc.hilbert.HilbertSFC; import org.locationtech.geowave.core.index.sfc.xz.XZOrderSFC; import org.locationtech.geowave.core.index.sfc.zorder.ZOrderSFC; /** * Factory used to generate an instance of a known space filling curve type */ public class SFCFactory { /** * * Generates a SFC instance based on the dimensions definition and the space filling curve type * * @param dimensionDefs specifies the min, max, and cardinality for this instance of the SFC * @param sfc specifies the type (Hilbert, ZOrder) of space filling curve to generate * @return a space filling curve instance generated based on the supplied parameters */ public static SpaceFillingCurve createSpaceFillingCurve( final SFCDimensionDefinition[] dimensionDefs, final SFCType sfc) { switch (sfc) { case HILBERT: return new HilbertSFC(dimensionDefs); case ZORDER: return new ZOrderSFC(dimensionDefs); case XZORDER: return new XZOrderSFC(dimensionDefs); } return null; } /** * Implemented and registered Space Filling curve types */ public static enum SFCType { HILBERT, ZORDER, XZORDER } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/sfc/SpaceFillingCurve.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.sfc; import java.math.BigInteger; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.persist.Persistable; /** * * Base class which defines common methods for any space filling curve. Hosts standard access * methods shared between implementation. A space filling curve is expected to provide a reversible * n-dimensional <-> 1-dimensional mapping. */ public interface SpaceFillingCurve extends Persistable { /** * * Maps a n-dimensional value to a single dimension, i.e. [12,33] -> 0033423 * * @param values n-dimensional value to be encoded in the SFC. The size of value corresponds to * the number of dimensions * @return value derived from the the SFC transform. The value is left padded based on the number * if bits in the SFC dimension */ public byte[] getId(Double[] values); /** * * Gets n-dimensional ranges from a single dimension, i.e. 0033423 -> [12,33] * * @param id the SFC ID to calculate the ranges of values represented. * @return the valid ranges per dimension of a single SFC ID derived from the the SFC transform. */ public MultiDimensionalNumericData getRanges(byte[] id); /** * * Gets n-dimensional coordinates from a single dimension * * @param id the SFC ID to calculate the coordinates for each dimension. * @return the coordinate in each dimension for the given ID */ public long[] getCoordinates(byte[] id); /** * * Returns a collection of ranges on the 1-d space filling curve that correspond to the * n-dimensional range described in the query parameter. * *

This method will decompose the range all the way down to the unit interval of 1. * * @param query describes the n-dimensional query window that will be decomposed * @return an object containing the ranges on the SFC that overlap the parameters supplied in the * query object */ public RangeDecomposition decomposeRangeFully(MultiDimensionalNumericData query); /** * * Returns a collection of ranges on the 1-d space filling curve that correspond to the * n-dimensional range described in the query parameter. * *

This method will roll up the ranges based on the maxRanges parameter. Ranges will be * "connected" based on the minimization of distance between the end of one range and the start of * the next. * * @param query describes the n-dimensional query window that will be decomposed * @return an object containing the ranges on the SFC that overlap the parameters supplied in the * query object */ public RangeDecomposition decomposeRange( MultiDimensionalNumericData query, boolean overInclusiveOnEdge, int maxRanges); /** * * Determines the estimated number of rows a multi-dimensional range will span within this space * filling curve * * @param data describes the n-dimensional range to estimate the row count for * @return an estimate of the row count for the ranges given within this space filling curve */ public BigInteger getEstimatedIdCount(MultiDimensionalNumericData data); /** * * Determines the coordinates within this space filling curve for a dimension given a range * * @param minValue describes the minimum of a range in a single dimension used to determine the * SFC coordinate range * @param maxValue describes the maximum of a range in a single dimension used to determine the * SFC coordinate range * @param dimension the dimension * @return the range of coordinates as an array where the first element is the min and the second * element is the max */ public long[] normalizeRange(double minValue, double maxValue, int dimension); /** * * Get the range/size of a single insertion ID for each dimension * * @return the range of a single insertion ID for each dimension */ public double[] getInsertionIdRangePerDimension(); } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/sfc/binned/BinnedSFCUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.sfc.binned; import java.math.BigInteger; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.Coordinate; import org.locationtech.geowave.core.index.CoordinateRange; import org.locationtech.geowave.core.index.MultiDimensionalCoordinateRanges; import org.locationtech.geowave.core.index.SinglePartitionInsertionIds; import org.locationtech.geowave.core.index.SinglePartitionQueryRanges; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.dimension.bin.BinRange; import org.locationtech.geowave.core.index.numeric.BasicNumericDataset; import org.locationtech.geowave.core.index.numeric.BinnedNumericDataset; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.index.sfc.RangeDecomposition; import org.locationtech.geowave.core.index.sfc.SpaceFillingCurve; public class BinnedSFCUtils { public static List getQueryRanges( final List binnedQueries, final SpaceFillingCurve sfc, final int maxRanges, final Byte tier) { final List queryRanges = new ArrayList<>(); int maxRangeDecompositionPerBin = maxRanges; if ((maxRanges > 1) && (binnedQueries.size() > 1)) { maxRangeDecompositionPerBin = (int) Math.ceil((double) maxRanges / (double) binnedQueries.size()); } for (final BinnedNumericDataset binnedQuery : binnedQueries) { final RangeDecomposition rangeDecomp = sfc.decomposeRange(binnedQuery, true, maxRangeDecompositionPerBin); final byte[] tierAndBinId = tier != null ? ByteArrayUtils.combineArrays(new byte[] {tier // we're assuming tiers only go to 127 (the max byte // value) }, binnedQuery.getBinId()) : binnedQuery.getBinId(); queryRanges.add( new SinglePartitionQueryRanges(tierAndBinId, Arrays.asList(rangeDecomp.getRanges()))); } return queryRanges; } public static MultiDimensionalCoordinateRanges getCoordinateRanges( final BinRange[][] binRangesPerDimension, final SpaceFillingCurve sfc, final int numDimensions, final Byte tier) { final CoordinateRange[][] coordinateRangesPerDimension = new CoordinateRange[numDimensions][]; for (int d = 0; d < coordinateRangesPerDimension.length; d++) { coordinateRangesPerDimension[d] = new CoordinateRange[binRangesPerDimension[d].length]; for (int i = 0; i < binRangesPerDimension[d].length; i++) { final long[] range = sfc.normalizeRange( binRangesPerDimension[d][i].getNormalizedMin(), binRangesPerDimension[d][i].getNormalizedMax(), d); coordinateRangesPerDimension[d][i] = new CoordinateRange(range[0], range[1], binRangesPerDimension[d][i].getBinId()); } } if (tier == null) { return new MultiDimensionalCoordinateRanges(new byte[0], coordinateRangesPerDimension); } return new MultiDimensionalCoordinateRanges(new byte[] {tier}, coordinateRangesPerDimension); } public static SinglePartitionInsertionIds getSingleBinnedInsertionId( final BigInteger rowCount, final Byte multiDimensionalId, final BinnedNumericDataset index, final SpaceFillingCurve sfc) { if (rowCount.equals(BigInteger.ONE)) { final byte[] tierAndBinId = multiDimensionalId != null ? ByteArrayUtils.combineArrays(new byte[] {multiDimensionalId}, index.getBinId()) : index.getBinId(); final Double[] minValues = index.getMinValuesPerDimension(); final Double[] maxValues = index.getMaxValuesPerDimension(); byte[] singleId = null; if (Arrays.equals(maxValues, minValues)) { singleId = sfc.getId(minValues); } else { final byte[] minId = sfc.getId(minValues); final byte[] maxId = sfc.getId(maxValues); if (Arrays.equals(minId, maxId)) { singleId = minId; } } if (singleId != null) { return new SinglePartitionInsertionIds(tierAndBinId, singleId); } } return null; } public static Coordinate[] getCoordinatesForId( final byte[] rowId, final NumericDimensionDefinition[] baseDefinitions, final SpaceFillingCurve sfc) { final SFCIdAndBinInfo sfcIdAndBinInfo = getSFCIdAndBinInfo(rowId, baseDefinitions); final long[] coordinateValues = sfc.getCoordinates(sfcIdAndBinInfo.sfcId); if (coordinateValues == null) { return null; } final Coordinate[] retVal = new Coordinate[coordinateValues.length]; for (int i = 0; i < coordinateValues.length; i++) { final byte[] bin = sfcIdAndBinInfo.binIds.get(i); retVal[i] = new Coordinate(coordinateValues[i], bin); } return retVal; } public static MultiDimensionalNumericData getRangeForId( final byte[] rowId, final NumericDimensionDefinition[] baseDefinitions, final SpaceFillingCurve sfc) { final SFCIdAndBinInfo sfcIdAndBinInfo = getSFCIdAndBinInfo(rowId, baseDefinitions); final MultiDimensionalNumericData numericData = sfc.getRanges(sfcIdAndBinInfo.sfcId); // now we need to unapply the bins to the data, denormalizing the // ranges to the native bounds if (sfcIdAndBinInfo.rowIdOffset > 1) { final NumericData[] data = numericData.getDataPerDimension(); for (final Entry entry : sfcIdAndBinInfo.binIds.entrySet()) { final int dimension = entry.getKey(); final NumericRange range = baseDefinitions[dimension].getDenormalizedRange( new BinRange( entry.getValue(), data[dimension].getMin(), data[dimension].getMax(), false)); data[dimension] = range; } return new BasicNumericDataset(data); } return numericData; } private static SFCIdAndBinInfo getSFCIdAndBinInfo( final byte[] rowId, final NumericDimensionDefinition[] baseDefinitions) { final Map binIds = new HashMap<>(); // one for the tier int rowIdOffset = 1; for (int dimensionIdx = 0; dimensionIdx < baseDefinitions.length; dimensionIdx++) { final int binSize = baseDefinitions[dimensionIdx].getFixedBinIdSize(); if (binSize > 0) { binIds.put(dimensionIdx, Arrays.copyOfRange(rowId, rowIdOffset, rowIdOffset + binSize)); rowIdOffset += binSize; } } final byte[] sfcId = Arrays.copyOfRange(rowId, rowIdOffset, rowId.length); return new SFCIdAndBinInfo(sfcId, binIds, rowIdOffset); } private static class SFCIdAndBinInfo { private final byte[] sfcId; private final Map binIds; private final int rowIdOffset; public SFCIdAndBinInfo( final byte[] sfcId, final Map binIds, final int rowIdOffset) { super(); this.sfcId = sfcId; this.binIds = binIds; this.rowIdOffset = rowIdOffset; } } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/sfc/hilbert/HilbertSFC.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.sfc.hilbert; import java.math.BigInteger; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.index.sfc.RangeDecomposition; import org.locationtech.geowave.core.index.sfc.SFCDimensionDefinition; import org.locationtech.geowave.core.index.sfc.SpaceFillingCurve; import com.github.benmanes.caffeine.cache.Cache; import com.github.benmanes.caffeine.cache.Caffeine; import com.google.uzaygezen.core.CompactHilbertCurve; import com.google.uzaygezen.core.MultiDimensionalSpec; /** * Implementation of a Compact Hilbert space filling curve */ public class HilbertSFC implements SpaceFillingCurve { private static class QueryCacheKey { private final HilbertSFC sfc; private final Double[] minsPerDimension; private final Double[] maxesPerDimension; private final boolean overInclusiveOnEdge; private final int maxFilteredIndexedRanges; public QueryCacheKey( final HilbertSFC sfc, final Double[] minsPerDimension, final Double[] maxesPerDimension, final boolean overInclusiveOnEdge, final int maxFilteredIndexedRanges) { this.sfc = sfc; this.minsPerDimension = minsPerDimension; this.maxesPerDimension = maxesPerDimension; this.overInclusiveOnEdge = overInclusiveOnEdge; this.maxFilteredIndexedRanges = maxFilteredIndexedRanges; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + maxFilteredIndexedRanges; result = (prime * result) + Arrays.hashCode(maxesPerDimension); result = (prime * result) + Arrays.hashCode(minsPerDimension); result = (prime * result) + (overInclusiveOnEdge ? 1231 : 1237); result = (prime * result) + ((sfc == null) ? 0 : sfc.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final QueryCacheKey other = (QueryCacheKey) obj; if (maxFilteredIndexedRanges != other.maxFilteredIndexedRanges) { return false; } if (!Arrays.equals(maxesPerDimension, other.maxesPerDimension)) { return false; } if (!Arrays.equals(minsPerDimension, other.minsPerDimension)) { return false; } if (overInclusiveOnEdge != other.overInclusiveOnEdge) { return false; } if (sfc == null) { if (other.sfc != null) { return false; } } else if (!sfc.equals(other.sfc)) { return false; } return true; } } private static final int MAX_CACHED_QUERIES = 500; private final static Cache QUERY_DECOMPOSITION_CACHE = Caffeine.newBuilder().maximumSize(MAX_CACHED_QUERIES).initialCapacity( MAX_CACHED_QUERIES).build(); protected CompactHilbertCurve compactHilbertCurve; protected SFCDimensionDefinition[] dimensionDefinitions; protected int totalPrecision; /** Tunables * */ private static final boolean REMOVE_VACUUM = true; protected HilbertSFCOperations getIdOperations; protected HilbertSFCOperations decomposeQueryOperations; public HilbertSFC() {} /** * Use the SFCFactory.createSpaceFillingCurve method - don't call this constructor directly */ public HilbertSFC(final SFCDimensionDefinition[] dimensionDefs) { init(dimensionDefs); } protected void init(final SFCDimensionDefinition[] dimensionDefs) { final List bitsPerDimension = new ArrayList<>(); totalPrecision = 0; for (final SFCDimensionDefinition dimension : dimensionDefs) { bitsPerDimension.add(dimension.getBitsOfPrecision()); totalPrecision += dimension.getBitsOfPrecision(); } compactHilbertCurve = new CompactHilbertCurve(new MultiDimensionalSpec(bitsPerDimension)); dimensionDefinitions = dimensionDefs; setOptimalOperations(totalPrecision, bitsPerDimension, dimensionDefs); } protected void setOptimalOperations( final int totalPrecision, final List bitsPerDimension, final SFCDimensionDefinition[] dimensionDefs) { boolean primitiveForGetId = true; final boolean primitiveForQueryDecomposition = totalPrecision <= 62L; for (final Integer bits : bitsPerDimension) { if (bits > 48) { // if in any one dimension, more than 48 bits are used, we need // to use bigdecimals primitiveForGetId = false; break; } } if (primitiveForGetId) { final PrimitiveHilbertSFCOperations primitiveOps = new PrimitiveHilbertSFCOperations(); primitiveOps.init(dimensionDefs); getIdOperations = primitiveOps; if (primitiveForQueryDecomposition) { decomposeQueryOperations = primitiveOps; } else { final UnboundedHilbertSFCOperations unboundedOps = new UnboundedHilbertSFCOperations(); unboundedOps.init(dimensionDefs); decomposeQueryOperations = unboundedOps; } } else { final UnboundedHilbertSFCOperations unboundedOps = new UnboundedHilbertSFCOperations(); unboundedOps.init(dimensionDefs); getIdOperations = unboundedOps; if (primitiveForQueryDecomposition) { final PrimitiveHilbertSFCOperations primitiveOps = new PrimitiveHilbertSFCOperations(); primitiveOps.init(dimensionDefs); decomposeQueryOperations = primitiveOps; } else { decomposeQueryOperations = unboundedOps; } } } /** * {@inheritDoc} */ @Override public byte[] getId(final Double[] values) { return getIdOperations.convertToHilbert(values, compactHilbertCurve, dimensionDefinitions); } /** * {@inheritDoc} */ @Override public RangeDecomposition decomposeRangeFully(final MultiDimensionalNumericData query) { return decomposeRange(query, true, -1); } // TODO: improve this method - min/max not being calculated optimally /** * {@inheritDoc} */ @Override public RangeDecomposition decomposeRange( final MultiDimensionalNumericData query, final boolean overInclusiveOnEdge, final int maxFilteredIndexedRanges) { final int maxRanges = (maxFilteredIndexedRanges < 0) ? Integer.MAX_VALUE : maxFilteredIndexedRanges; final QueryCacheKey key = new QueryCacheKey( this, query.getMinValuesPerDimension(), query.getMaxValuesPerDimension(), overInclusiveOnEdge, maxRanges); return QUERY_DECOMPOSITION_CACHE.get( key, k -> decomposeQueryOperations.decomposeRange( query.getDataPerDimension(), compactHilbertCurve, dimensionDefinitions, totalPrecision, maxRanges, REMOVE_VACUUM, overInclusiveOnEdge)); } protected static byte[] fitExpectedByteCount(final int expectedByteCount, final byte[] bytes) { final int leftPadding = expectedByteCount - bytes.length; if (leftPadding > 0) { final byte[] zeroes = new byte[leftPadding]; Arrays.fill(zeroes, (byte) 0); return ByteArrayUtils.combineArrays(zeroes, bytes); } else if (leftPadding < 0) { final byte[] truncatedBytes = new byte[expectedByteCount]; if (bytes[0] != 0) { Arrays.fill(truncatedBytes, (byte) 255); } else { System.arraycopy(bytes, -leftPadding, truncatedBytes, 0, expectedByteCount); } return truncatedBytes; } return bytes; } @Override public byte[] toBinary() { final List dimensionDefBinaries = new ArrayList<>(dimensionDefinitions.length); int bufferLength = 0; for (final SFCDimensionDefinition sfcDimension : dimensionDefinitions) { final byte[] sfcDimensionBinary = PersistenceUtils.toBinary(sfcDimension); bufferLength += (sfcDimensionBinary.length + VarintUtils.unsignedIntByteLength(sfcDimensionBinary.length)); dimensionDefBinaries.add(sfcDimensionBinary); } bufferLength += VarintUtils.unsignedIntByteLength(dimensionDefinitions.length); final ByteBuffer buf = ByteBuffer.allocate(bufferLength); VarintUtils.writeUnsignedInt(dimensionDefinitions.length, buf); for (final byte[] dimensionDefBinary : dimensionDefBinaries) { VarintUtils.writeUnsignedInt(dimensionDefBinary.length, buf); buf.put(dimensionDefBinary); } return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int numDimensions = VarintUtils.readUnsignedInt(buf); dimensionDefinitions = new SFCDimensionDefinition[numDimensions]; for (int i = 0; i < numDimensions; i++) { final byte[] dim = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); dimensionDefinitions[i] = (SFCDimensionDefinition) PersistenceUtils.fromBinary(dim); } init(dimensionDefinitions); } @Override public int hashCode() { final int prime = 31; int result = 1; final String className = getClass().getName(); result = (prime * result) + ((className == null) ? 0 : className.hashCode()); result = (prime * result) + Arrays.hashCode(dimensionDefinitions); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final HilbertSFC other = (HilbertSFC) obj; if (!Arrays.equals(dimensionDefinitions, other.dimensionDefinitions)) { return false; } return true; } @Override public BigInteger getEstimatedIdCount(final MultiDimensionalNumericData data) { return getIdOperations.getEstimatedIdCount(data, dimensionDefinitions); } @Override public MultiDimensionalNumericData getRanges(final byte[] id) { return getIdOperations.convertFromHilbert(id, compactHilbertCurve, dimensionDefinitions); } @Override public long[] normalizeRange(final double minValue, final double maxValue, final int dimension) { return getIdOperations.normalizeRange( minValue, maxValue, dimension, dimensionDefinitions[dimension]); } @Override public long[] getCoordinates(final byte[] id) { return getIdOperations.indicesFromHilbert(id, compactHilbertCurve, dimensionDefinitions); } @Override public double[] getInsertionIdRangePerDimension() { return getIdOperations.getInsertionIdRangePerDimension(dimensionDefinitions); } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/sfc/hilbert/HilbertSFCOperations.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.sfc.hilbert; import java.math.BigInteger; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.sfc.RangeDecomposition; import org.locationtech.geowave.core.index.sfc.SFCDimensionDefinition; import com.google.uzaygezen.core.CompactHilbertCurve; /** * This interface is used to abstract the details of operations used by the hilbert space filling * curve, in particular to enable both primitive-based operations for performance (in cases where * the bits of precision can be adequately represented by primitives) and non-primitive based * operations for unbounded bits of precision. */ public interface HilbertSFCOperations { /** * initialize this set of operations with the given dimension definitions * * @param dimensionDefinitions the dimension definitions to use */ public void init(SFCDimensionDefinition[] dimensionDefinitions); /** * Convert the raw values (ordered per dimension) to a single SFC value * * @param values a raw value per dimension in order * @param compactHilbertCurve the compact Hilbert curve to use for the conversion * @param dimensionDefinitions a set of dimension definitions to use to normalize the raw values * @return the Hilbert SFC value */ public byte[] convertToHilbert( Double[] values, CompactHilbertCurve compactHilbertCurve, SFCDimensionDefinition[] dimensionDefinitions); /** * Convert the single SFC value to the ranges of raw values that it represents * * @param hilbertValue the computed hilbert value to invert back to native coordinates * @param compactHilbertCurve the compact Hilbert curve to use for the conversion * @param dimensionDefinitions a set of dimension definitions to use to normalize the raw values * @return the ranges of values that the hilbert represents, inclusive on start and exclusive on * end for each range */ public MultiDimensionalNumericData convertFromHilbert( byte[] hilbertValue, CompactHilbertCurve compactHilbertCurve, SFCDimensionDefinition[] dimensionDefinitions); /** * Convert the single SFC value to the per dimension SFC coordinates that it represents * * @param hilbertValue the computed hilbert value to invert back to integer coordinates per * dimension * @param compactHilbertCurve the compact Hilbert curve to use for the conversion * @param dimensionDefinitions a set of dimension definitions to use to determine the bits of * precision per dimension that is expected in the compact hilbert curve * @return the integer coordinate value per dimension that the given hilbert value represents */ public long[] indicesFromHilbert( byte[] hilbertValue, CompactHilbertCurve compactHilbertCurve, SFCDimensionDefinition[] dimensionDefinitions); /** * Decompose the raw range per dimension values into an optimal set of compact Hilbert SFC ranges * * @param rangePerDimension the raw range per dimension * @param compactHilbertCurve the compact Hilbert curve to use for the conversion * @param dimensionDefinitions a set of dimension definitions to use to normalize the raw values * @param totalPrecision the total precision of the dimension definitions, for convenience * @param maxFilteredIndexedRanges the maximum number of ranges, if < 0 it will be unlimited * @param removeVacuum a flag to pass to the compact hilbert curve range decomposition * @return the optimal SFC range decomposition for the raw-valued ranges */ public RangeDecomposition decomposeRange( NumericData[] rangePerDimension, CompactHilbertCurve compactHilbertCurve, SFCDimensionDefinition[] dimensionDefinitions, int totalPrecision, int maxFilteredIndexedRanges, boolean removeVacuum, boolean overInclusiveOnEdge); /** * Get a quick (minimal complexity calculation) estimate of the total row IDs a particular data * would require to fully cover with SFC values * * @param data the dataset * @param dimensionDefinitions a set of dimension definitions to use to normalize the raw values * @return the total estimated row IDs the data would require to fully cover with SFC values */ public BigInteger getEstimatedIdCount( MultiDimensionalNumericData data, SFCDimensionDefinition[] dimensionDefinitions); /** * Determines the coordinates a given range will span within this space filling curve. * * @param minValue the minimum value * @param maxValue the maximum value * @param dimension the ordinal of the dimension on this space filling curve * @param boundedDimensionDefinition the dimension definition to use to normalize the raw values * @return the range of coordinates (ie. [0] would be the min coordinate and [1] would be the max * coordinate) */ public long[] normalizeRange( double minValue, double maxValue, int dimension, final SFCDimensionDefinition boundedDimensionDefinition) throws IllegalArgumentException; /** * * Get the range/size of a single insertion ID for each dimension * * @param dimensionDefinitions a set of dimension definitions to use to calculate the range of * each insertion ID * @return the range of a single insertion ID for each dimension */ public double[] getInsertionIdRangePerDimension(SFCDimensionDefinition[] dimensionDefinitions); } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/sfc/hilbert/PrimitiveHilbertSFCOperations.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.sfc.hilbert; import java.math.BigInteger; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.locationtech.geowave.core.index.ByteArrayRange; import org.locationtech.geowave.core.index.numeric.BasicNumericDataset; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.index.sfc.RangeDecomposition; import org.locationtech.geowave.core.index.sfc.SFCDimensionDefinition; import com.google.common.base.Functions; import com.google.common.collect.ImmutableList; import com.google.uzaygezen.core.BacktrackingQueryBuilder; import com.google.uzaygezen.core.BitVector; import com.google.uzaygezen.core.BitVectorFactories; import com.google.uzaygezen.core.CompactHilbertCurve; import com.google.uzaygezen.core.FilteredIndexRange; import com.google.uzaygezen.core.LongContent; import com.google.uzaygezen.core.PlainFilterCombiner; import com.google.uzaygezen.core.QueryBuilder; import com.google.uzaygezen.core.RegionInspector; import com.google.uzaygezen.core.SimpleRegionInspector; import com.google.uzaygezen.core.ZoomingSpaceVisitorAdapter; import com.google.uzaygezen.core.ranges.LongRange; import com.google.uzaygezen.core.ranges.LongRangeHome; /** * This supports Compact Hilbert SFC operations using a primitive long internally to represent * intermediate results. This can be significantly faster than using unbounded representations such * as BigInteger, but can only support up to certain levels of precision. For getID() operations it * is currently used if no single dimension is more than 48 bits of precision, and for query * decomposition it is currently used if the total precision is <= 62 bits. */ public class PrimitiveHilbertSFCOperations implements HilbertSFCOperations { protected static final long UNIT_CELL_SIZE = (long) Math.pow(2, 19); protected long[] binsPerDimension; protected long minHilbertValue; protected long maxHilbertValue; @Override public void init(final SFCDimensionDefinition[] dimensionDefs) { binsPerDimension = new long[dimensionDefs.length]; int totalPrecision = 0; for (int d = 0; d < dimensionDefs.length; d++) { final SFCDimensionDefinition dimension = dimensionDefs[d]; binsPerDimension[d] = (long) Math.pow(2, dimension.getBitsOfPrecision()); totalPrecision += dimension.getBitsOfPrecision(); } minHilbertValue = 0; maxHilbertValue = (long) (Math.pow(2, totalPrecision) - 1); } @Override public byte[] convertToHilbert( final Double[] values, final CompactHilbertCurve compactHilbertCurve, final SFCDimensionDefinition[] dimensionDefinitions) { final List dimensionValues = new ArrayList<>(); // Compare the number of dimensions to the number of values sent in if (dimensionDefinitions.length != values.length) { throw new ArrayIndexOutOfBoundsException( "Number of dimensions supplied (" + values.length + ") is different than initialized (" + dimensionDefinitions.length + ")."); } // Loop through each value, then normalize the value based on the // dimension definition for (int i = 0; i < dimensionDefinitions.length; i++) { dimensionValues.add( normalizeDimension( dimensionDefinitions[i], values[i], binsPerDimension[i], false, false)); } // Convert the normalized values to a BitVector final BitVector hilbertBitVector = convertToHilbert(dimensionValues, compactHilbertCurve, dimensionDefinitions); return hilbertBitVector.toBigEndianByteArray(); } /** * * Converts the incoming values (one per dimension) into a BitVector using the Compact Hilbert * instance. BitVector is a wrapper to allow values longer than 64 bits. * * @param values n-dimensional point to transoform to a point on the hilbert SFC * @return point on hilbert SFC */ private BitVector convertToHilbert( final List values, final CompactHilbertCurve compactHilbertCurve, final SFCDimensionDefinition[] dimensionDefinitions) { final BitVector[] bitVectors = new BitVector[values.size()]; final BitVector hilbertBitVector = BitVectorFactories.OPTIMAL.apply(compactHilbertCurve.getSpec().sumBitsPerDimension()); for (int i = 0; i < values.size(); i++) { bitVectors[i] = BitVectorFactories.OPTIMAL.apply(dimensionDefinitions[i].getBitsOfPrecision()); bitVectors[i].copyFrom(values.get(i)); } synchronized (compactHilbertCurve) { compactHilbertCurve.index(bitVectors, 0, hilbertBitVector); } return hilbertBitVector; } @Override public long[] indicesFromHilbert( final byte[] hilbertValue, final CompactHilbertCurve compactHilbertCurve, final SFCDimensionDefinition[] dimensionDefinitions) { // because it returns an array of longs right now, just use a static // method that the unbounded operations can use as well return internalIndicesFromHilbert(hilbertValue, compactHilbertCurve, dimensionDefinitions); } protected static long[] internalIndicesFromHilbert( final byte[] hilbertValue, final CompactHilbertCurve compactHilbertCurve, final SFCDimensionDefinition[] dimensionDefinitions) { final BitVector[] perDimensionBitVectors = indexInverse(hilbertValue, compactHilbertCurve, dimensionDefinitions); final long[] retVal = new long[dimensionDefinitions.length]; for (int i = 0; i < retVal.length; i++) { retVal[i] = perDimensionBitVectors[i].toExactLong(); } return retVal; } @Override public MultiDimensionalNumericData convertFromHilbert( final byte[] hilbertValue, final CompactHilbertCurve compactHilbertCurve, final SFCDimensionDefinition[] dimensionDefinitions) { final BitVector[] perDimensionBitVectors = indexInverse(hilbertValue, compactHilbertCurve, dimensionDefinitions); final NumericRange[] retVal = new NumericRange[dimensionDefinitions.length]; for (int i = 0; i < retVal.length; i++) { retVal[i] = denormalizeDimension( dimensionDefinitions[i], perDimensionBitVectors[i].toExactLong(), binsPerDimension[i]); } return new BasicNumericDataset(retVal); } protected static BitVector[] indexInverse( byte[] hilbertValue, final CompactHilbertCurve compactHilbertCurve, final SFCDimensionDefinition[] dimensionDefinitions) { final BitVector[] perDimensionBitVectors = new BitVector[dimensionDefinitions.length]; final int bits = compactHilbertCurve.getSpec().sumBitsPerDimension(); final BitVector hilbertBitVector = BitVectorFactories.OPTIMAL.apply(bits); final int bytes = ((bits + 7) / 8); if (hilbertValue.length < bytes) { hilbertValue = Arrays.copyOf(hilbertValue, bytes); } hilbertBitVector.copyFromBigEndian(hilbertValue); for (int i = 0; i < dimensionDefinitions.length; i++) { perDimensionBitVectors[i] = BitVectorFactories.OPTIMAL.apply(dimensionDefinitions[i].getBitsOfPrecision()); } synchronized (compactHilbertCurve) { compactHilbertCurve.indexInverse(hilbertBitVector, perDimensionBitVectors); } return perDimensionBitVectors; } /** * * Used to normalize the value based on the dimension definition, which includes the dimensional * bounds and the bits of precision. This ensures the maximum amount of fidelity for represented * values. * * @param boundedDimensionDefinition describes the min, max, and cardinality of a dimension * @param value value to be normalized * @param bins precomputed number of bins in this dimension the number of bins expected based on * the cardinality of the definition * @param isMin flag indicating if this value is a minimum of a range in which case it needs to be * inclusive on a boundary, otherwise it is exclusive * @return value after normalization * @throws IllegalArgumentException thrown when the value passed doesn't fit with in the dimension * definition provided */ public long normalizeDimension( final SFCDimensionDefinition boundedDimensionDefinition, final double value, final long bins, final boolean isMin, final boolean overInclusiveOnEdge) throws IllegalArgumentException { final double normalizedValue = boundedDimensionDefinition.normalize(value); if ((normalizedValue < 0) || (normalizedValue > 1)) { throw new IllegalArgumentException( "Value (" + value + ") is not within dimension bounds. The normalized value (" + normalizedValue + ") must be within (0,1)"); } // scale it to a value within the bits of precision, // because max is handled as exclusive and min is inclusive, we need to // handle the edge differently if ((isMin && !overInclusiveOnEdge) || (!isMin && overInclusiveOnEdge)) { // this will round up on the edge return (long) Math.min(Math.floor(normalizedValue * bins), bins - 1); } else { // this will round down on the edge return (long) Math.max(Math.ceil(normalizedValue * bins) - 1L, 0); } } /** * * Used to normalize the value based on the dimension definition, which includes the dimensional * bounds and the bits of precision. This ensures the maximum amount of fidelity for represented * values. * * @param boundedDimensionDefinition describes the min, max, and cardinality of a dimension * @param value hilbert value to be denormalized * @param bins precomputed number of bins in this dimension the number of bins expected based on * the cardinality of the definition * @return range of values representing this hilbert value (exlusive on the end) * @throws IllegalArgumentException thrown when the value passed doesn't fit with in the hilbert * SFC for the dimension definition provided */ private NumericRange denormalizeDimension( final SFCDimensionDefinition boundedDimensionDefinition, final long value, final long bins) throws IllegalArgumentException { final double min = (double) (value) / (double) bins; final double max = (double) (value + 1) / (double) bins; if ((min < 0) || (min > 1)) { throw new IllegalArgumentException( "Value (" + value + ") is not within bounds. The normalized value (" + min + ") must be within (0,1)"); } if ((max < 0) || (max > 1)) { throw new IllegalArgumentException( "Value (" + value + ") is not within bounds. The normalized value (" + max + ") must be within (0,1)"); } // scale it to a value within the dimension definition range return new NumericRange( boundedDimensionDefinition.denormalize(min), boundedDimensionDefinition.denormalize(max)); } @Override public RangeDecomposition decomposeRange( final NumericData[] rangePerDimension, final CompactHilbertCurve compactHilbertCurve, final SFCDimensionDefinition[] dimensionDefinitions, final int totalPrecision, final int maxFilteredIndexedRanges, final boolean removeVacuum, final boolean overInclusiveOnEdge) { // List of query range minimum // and // maximum // values final List minRangeList = new ArrayList<>(); final List maxRangeList = new ArrayList<>(); final LongContent zero = new LongContent(0L); final List region = new ArrayList<>(dimensionDefinitions.length); for (int d = 0; d < dimensionDefinitions.length; d++) { final long normalizedMin = normalizeDimension( dimensionDefinitions[d], rangePerDimension[d].getMin(), binsPerDimension[d], true, overInclusiveOnEdge); long normalizedMax = normalizeDimension( dimensionDefinitions[d], rangePerDimension[d].getMax(), binsPerDimension[d], false, overInclusiveOnEdge); if (normalizedMin > normalizedMax) { // if they're both equal, which is possible because we treat max // as exclusive, set bin max to bin min (ie. treat it as // inclusive in this case) normalizedMax = normalizedMin; } minRangeList.add(normalizedMin); maxRangeList.add(normalizedMax); region.add(LongRange.of(normalizedMin, normalizedMax + 1L)); } final long minQuadSize = getMinimumQuadSize(minRangeList, maxRangeList); final RegionInspector regionInspector = SimpleRegionInspector.create( ImmutableList.of(region), new LongContent(minQuadSize), Functions.identity(), LongRangeHome.INSTANCE, zero); final PlainFilterCombiner intervalCombiner = new PlainFilterCombiner<>(LongRange.of(0, 1)); final QueryBuilder queryBuilder = BacktrackingQueryBuilder.create( regionInspector, intervalCombiner, maxFilteredIndexedRanges, removeVacuum, LongRangeHome.INSTANCE, zero); synchronized (compactHilbertCurve) { compactHilbertCurve.accept(new ZoomingSpaceVisitorAdapter(compactHilbertCurve, queryBuilder)); } final List> hilbertRanges = queryBuilder.get().getFilteredIndexRanges(); final ByteArrayRange[] sfcRanges = new ByteArrayRange[hilbertRanges.size()]; final int expectedByteCount = (int) Math.ceil(totalPrecision / 8.0); if (expectedByteCount <= 0) { // special case for no precision return new RangeDecomposition( new ByteArrayRange[] {new ByteArrayRange(new byte[0], new byte[0])}); } for (int i = 0; i < hilbertRanges.size(); i++) { final FilteredIndexRange range = hilbertRanges.get(i); // sanity check that values fit within the expected range // it seems that uzaygezen can produce a value at 2^totalPrecision // rather than 2^totalPrecision - 1 final long startValue = clamp(minHilbertValue, maxHilbertValue, range.getIndexRange().getStart()); final long endValue = clamp(minHilbertValue, maxHilbertValue, range.getIndexRange().getEnd() - 1); // make sure its padded if necessary final byte[] start = HilbertSFC.fitExpectedByteCount( expectedByteCount, ByteBuffer.allocate(8).putLong(startValue).array()); // make sure its padded if necessary final byte[] end = HilbertSFC.fitExpectedByteCount( expectedByteCount, ByteBuffer.allocate(8).putLong(endValue).array()); sfcRanges[i] = new ByteArrayRange(start, end); } final RangeDecomposition rangeDecomposition = new RangeDecomposition(sfcRanges); return rangeDecomposition; } private static long clamp(final long min, final long max, final long value) { return Math.max(Math.min(value, max), 0); } /** * * Returns the smallest range that will be fully decomposed (i.e. decomposition stops when the * range is equal or smaller than this value). Values is based on the _maximumRangeDecompsed and * _minRangeDecompsed instance members. * * @param minRangeList minimum values for each dimension (ordered) * @param maxRangeList maximum values for each dimension (ordered) * @return largest range that will be fully decomposed */ private long getMinimumQuadSize(final List minRangeList, final List maxRangeList) { long maxRange = 1; final int dimensionality = Math.min(minRangeList.size(), maxRangeList.size()); for (int d = 0; d < dimensionality; d++) { maxRange = Math.max(maxRange, (Math.abs(maxRangeList.get(d) - minRangeList.get(d)) + 1)); } final long maxRangeDecomposed = (long) Math.pow(maxRange, dimensionality); if (maxRangeDecomposed <= UNIT_CELL_SIZE) { return 1L; } return maxRangeDecomposed / UNIT_CELL_SIZE; } /** * The estimated ID count is the cross product of normalized range of all dimensions per the bits * of precision provided by the dimension definitions. */ @Override public BigInteger getEstimatedIdCount( final MultiDimensionalNumericData data, final SFCDimensionDefinition[] dimensionDefinitions) { final Double[] mins = data.getMinValuesPerDimension(); final Double[] maxes = data.getMaxValuesPerDimension(); long estimatedIdCount = 1L; for (int d = 0; d < data.getDimensionCount(); d++) { final long binMin = normalizeDimension(dimensionDefinitions[d], mins[d], binsPerDimension[d], true, false); long binMax = normalizeDimension(dimensionDefinitions[d], maxes[d], binsPerDimension[d], false, false); if (binMin > binMax) { // if they're both equal, which is possible because we treat max // as exclusive, set bin max to bin min (ie. treat it as // inclusive in this case) binMax = binMin; } estimatedIdCount *= (Math.abs(binMax - binMin) + 1); } return BigInteger.valueOf(estimatedIdCount); } @Override public double[] getInsertionIdRangePerDimension( final SFCDimensionDefinition[] dimensionDefinitions) { final double[] retVal = new double[dimensionDefinitions.length]; for (int i = 0; i < dimensionDefinitions.length; i++) { retVal[i] = dimensionDefinitions[i].getRange() / binsPerDimension[i]; } return retVal; } @Override public long[] normalizeRange( final double minValue, final double maxValue, final int dimension, final SFCDimensionDefinition boundedDimensionDefinition) throws IllegalArgumentException { return new long[] { normalizeDimension( boundedDimensionDefinition, minValue, binsPerDimension[dimension], true, true), normalizeDimension( boundedDimensionDefinition, maxValue, binsPerDimension[dimension], false, true)}; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/sfc/hilbert/UnboundedHilbertSFCOperations.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.sfc.hilbert; import java.math.BigDecimal; import java.math.BigInteger; import java.math.RoundingMode; import java.util.ArrayList; import java.util.List; import org.locationtech.geowave.core.index.ByteArrayRange; import org.locationtech.geowave.core.index.numeric.BasicNumericDataset; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.index.sfc.RangeDecomposition; import org.locationtech.geowave.core.index.sfc.SFCDimensionDefinition; import com.google.common.base.Functions; import com.google.common.collect.ImmutableList; import com.google.uzaygezen.core.BacktrackingQueryBuilder; import com.google.uzaygezen.core.BigIntegerContent; import com.google.uzaygezen.core.BitVector; import com.google.uzaygezen.core.BitVectorFactories; import com.google.uzaygezen.core.CompactHilbertCurve; import com.google.uzaygezen.core.FilteredIndexRange; import com.google.uzaygezen.core.PlainFilterCombiner; import com.google.uzaygezen.core.QueryBuilder; import com.google.uzaygezen.core.RegionInspector; import com.google.uzaygezen.core.SimpleRegionInspector; import com.google.uzaygezen.core.ZoomingSpaceVisitorAdapter; import com.google.uzaygezen.core.ranges.BigIntegerRange; import com.google.uzaygezen.core.ranges.BigIntegerRangeHome; /** * This supports Compact Hilbert SFC operations using a BigInteger internally to represent * intermediate results. This can be significantly slower than using a primitive long for * intermediate results but can support arbitrarily many bits of precision. */ public class UnboundedHilbertSFCOperations implements HilbertSFCOperations { private static final BigDecimal TWO = BigDecimal.valueOf(2); protected static final BigInteger UNIT_CELL_SIZE = BigDecimal.valueOf(Math.pow(2, 19)).toBigInteger(); protected BigDecimal[] binsPerDimension; protected BigInteger minHilbertValue; protected BigInteger maxHilbertValue; @Override public void init(final SFCDimensionDefinition[] dimensionDefs) { binsPerDimension = new BigDecimal[dimensionDefs.length]; int totalPrecision = 0; for (int d = 0; d < dimensionDefs.length; d++) { final SFCDimensionDefinition dimension = dimensionDefs[d]; binsPerDimension[d] = TWO.pow(dimension.getBitsOfPrecision()); totalPrecision += dimension.getBitsOfPrecision(); } minHilbertValue = BigInteger.ZERO; maxHilbertValue = TWO.pow(totalPrecision).subtract(BigDecimal.ONE).toBigInteger(); } @Override public byte[] convertToHilbert( final Double[] values, final CompactHilbertCurve compactHilbertCurve, final SFCDimensionDefinition[] dimensionDefinitions) { final List dimensionValues = new ArrayList<>(); // Compare the number of dimensions to the number of values sent in if (dimensionDefinitions.length != values.length) { throw new ArrayIndexOutOfBoundsException( "Number of dimensions supplied (" + values.length + ") is different than initialized (" + dimensionDefinitions.length + ")."); } // Loop through each value, then normalize the value based on the // dimension definition for (int i = 0; i < dimensionDefinitions.length; i++) { dimensionValues.add( normalizeDimension( dimensionDefinitions[i], values[i], binsPerDimension[i], false, false)); } // Convert the normalized values to a BitVector final BitVector hilbertBitVector = convertToHilbert(dimensionValues, compactHilbertCurve, dimensionDefinitions); return hilbertBitVector.toBigEndianByteArray(); } /** * * Converts the incoming values (one per dimension) into a BitVector using the Compact Hilbert * instance. BitVector is a wrapper to allow values longer than 64 bits. * * @param values n-dimensional point to transoform to a point on the hilbert SFC * @return point on hilbert SFC */ private BitVector convertToHilbert( final List values, final CompactHilbertCurve compactHilbertCurve, final SFCDimensionDefinition[] dimensionDefinitions) { final BitVector[] bitVectors = new BitVector[values.size()]; final BitVector hilbertBitVector = BitVectorFactories.OPTIMAL.apply(compactHilbertCurve.getSpec().sumBitsPerDimension()); for (int i = 0; i < values.size(); i++) { bitVectors[i] = BitVectorFactories.OPTIMAL.apply(dimensionDefinitions[i].getBitsOfPrecision()); bitVectors[i].copyFrom(values.get(i)); } synchronized (compactHilbertCurve) { compactHilbertCurve.index(bitVectors, 0, hilbertBitVector); } return hilbertBitVector; } /** * * Used to normalize the value based on the dimension definition, which includes the dimensional * bounds and the bits of precision. This ensures the maximum amount of fidelity for represented * values. * * @param boundedDimensionDefinition describes the min, max, and cardinality of a dimension * @param value value to be normalized * @param bins precomputed number of bins in this dimension the number of bins expected bas on the * cardinality of the definition * @param isMin flag indicating if this value is a minimum of a range in which case it needs to be * inclusive on a boundary, otherwise it is exclusive * @return value after normalization * @throws IllegalArgumentException thrown when the value passed doesn't fit with in the dimension * definition provided */ private BigInteger normalizeDimension( final SFCDimensionDefinition boundedDimensionDefinition, final double value, final BigDecimal bins, final boolean isMin, final boolean overInclusiveOnEdge) throws IllegalArgumentException { final double normalizedValue = boundedDimensionDefinition.normalize(value); if ((normalizedValue < 0) || (normalizedValue > 1)) { throw new IllegalArgumentException( "Value (" + value + ") is not within dimension bounds. The normalized value (" + normalizedValue + ") must be within (0,1)"); } final BigDecimal val = BigDecimal.valueOf(normalizedValue); // scale it to a value within the bits of precision final BigDecimal valueScaledWithinPrecision = val.multiply(bins); if ((isMin && !overInclusiveOnEdge) || (!isMin && overInclusiveOnEdge)) { // round it down, and make sure it isn't above bins - 1 (exactly 1 // for the normalized value could produce a bit shifted value equal // to bins without this check) return valueScaledWithinPrecision.setScale(0, RoundingMode.FLOOR).min( bins.subtract(BigDecimal.ONE)).toBigInteger(); } else { // round it up, subtract one to set the range between [0, // 2^cardinality-1) // and make sure it isn't below 0 (exactly 0 for the normalized // value // could produce a bit shifted value of -1 without this check) return valueScaledWithinPrecision.setScale(0, RoundingMode.CEILING).subtract( BigDecimal.ONE).max(BigDecimal.ZERO).toBigInteger(); } } @Override public long[] indicesFromHilbert( final byte[] hilbertValue, final CompactHilbertCurve compactHilbertCurve, final SFCDimensionDefinition[] dimensionDefinitions) { // warning: this very much won't be unbounded because it returns an // array of longs right now // but we may as well re-use the calculation from the primitive // operations return PrimitiveHilbertSFCOperations.internalIndicesFromHilbert( hilbertValue, compactHilbertCurve, dimensionDefinitions); } @Override public MultiDimensionalNumericData convertFromHilbert( final byte[] hilbertValue, final CompactHilbertCurve compactHilbertCurve, final SFCDimensionDefinition[] dimensionDefinitions) { final BitVector[] perDimensionBitVectors = PrimitiveHilbertSFCOperations.indexInverse( hilbertValue, compactHilbertCurve, dimensionDefinitions); final NumericRange[] retVal = new NumericRange[dimensionDefinitions.length]; for (int i = 0; i < retVal.length; i++) { retVal[i] = denormalizeDimension( dimensionDefinitions[i], perDimensionBitVectors[i].toBigInteger(), binsPerDimension[i]); } return new BasicNumericDataset(retVal); } /** * * Used to normalize the value based on the dimension definition, which includes the dimensional * bounds and the bits of precision. This ensures the maximum amount of fidelity for represented * values. * * @param boundedDimensionDefinition describes the min, max, and cardinality of a dimension * @param value hilbert value to be denormalized * @param bins precomputed number of bins in this dimension the number of bins expected based on * the cardinality of the definition * @return range of values reprenenting this hilbert value (exlusive on the end) * @throws IllegalArgumentException thrown when the value passed doesn't fit with in the hilbert * SFC for the dimension definition provided */ private NumericRange denormalizeDimension( final SFCDimensionDefinition boundedDimensionDefinition, final BigInteger value, final BigDecimal bins) throws IllegalArgumentException { final double min = new BigDecimal(value).divide(bins).doubleValue(); final double max = new BigDecimal(value).add(BigDecimal.ONE).divide(bins).doubleValue(); if ((min < 0) || (min > 1)) { throw new IllegalArgumentException( "Value (" + value + ") is not within bounds. The normalized value (" + min + ") must be within (0,1)"); } if ((max < 0) || (max > 1)) { throw new IllegalArgumentException( "Value (" + value + ") is not within bounds. The normalized value (" + max + ") must be within (0,1)"); } // scale it to a value within the dimension definition range return new NumericRange( boundedDimensionDefinition.denormalize(min), boundedDimensionDefinition.denormalize(max)); } @Override public RangeDecomposition decomposeRange( final NumericData[] rangePerDimension, final CompactHilbertCurve compactHilbertCurve, final SFCDimensionDefinition[] dimensionDefinitions, final int totalPrecision, final int maxFilteredIndexedRanges, final boolean removeVacuum, final boolean overInclusiveOnEdge) { // List of query range minimum // and // maximum // values final List minRangeList = new ArrayList<>(); final List maxRangeList = new ArrayList<>(); final BigIntegerContent zero = new BigIntegerContent(BigInteger.valueOf(0L)); final List region = new ArrayList<>(dimensionDefinitions.length); for (int d = 0; d < dimensionDefinitions.length; d++) { final BigInteger normalizedMin = normalizeDimension( dimensionDefinitions[d], rangePerDimension[d].getMin(), binsPerDimension[d], true, overInclusiveOnEdge); BigInteger normalizedMax = normalizeDimension( dimensionDefinitions[d], rangePerDimension[d].getMax(), binsPerDimension[d], false, overInclusiveOnEdge); if (normalizedMin.compareTo(normalizedMax) > 0) { // if they're both equal, which is possible because we treat max // as exclusive, set bin max to bin min (ie. treat it as // inclusive in this case) normalizedMax = normalizedMin; } minRangeList.add(normalizedMin); maxRangeList.add(normalizedMax); region.add(BigIntegerRange.of(normalizedMin, normalizedMax.add(BigInteger.ONE))); } final BigInteger minQuadSize = getMinimumQuadSize(minRangeList, maxRangeList); final RegionInspector regionInspector = SimpleRegionInspector.create( ImmutableList.of(region), new BigIntegerContent(minQuadSize), Functions.identity(), BigIntegerRangeHome.INSTANCE, zero); final PlainFilterCombiner intervalCombiner = new PlainFilterCombiner<>(BigIntegerRange.of(0, 1)); final QueryBuilder queryBuilder = BacktrackingQueryBuilder.create( regionInspector, intervalCombiner, maxFilteredIndexedRanges, removeVacuum, BigIntegerRangeHome.INSTANCE, zero); synchronized (compactHilbertCurve) { compactHilbertCurve.accept(new ZoomingSpaceVisitorAdapter(compactHilbertCurve, queryBuilder)); } // com.google.uzaygezen.core.Query hilbertQuery = // queryBuilder.get(); final List> hilbertRanges = queryBuilder.get().getFilteredIndexRanges(); final ByteArrayRange[] sfcRanges = new ByteArrayRange[hilbertRanges.size()]; final int expectedByteCount = (int) Math.ceil(totalPrecision / 8.0); if (expectedByteCount <= 0) { // special case for no precision return new RangeDecomposition( new ByteArrayRange[] {new ByteArrayRange(new byte[0], new byte[0])}); } for (int i = 0; i < hilbertRanges.size(); i++) { final FilteredIndexRange range = hilbertRanges.get(i); // sanity check that values fit within the expected range // it seems that uzaygezen can produce a value at 2^totalPrecision // rather than 2^totalPrecision - 1 final BigInteger startValue = clamp(minHilbertValue, maxHilbertValue, range.getIndexRange().getStart()); final BigInteger endValue = clamp( minHilbertValue, maxHilbertValue, range.getIndexRange().getEnd().subtract(BigInteger.ONE)); // make sure its padded if necessary final byte[] start = HilbertSFC.fitExpectedByteCount(expectedByteCount, startValue.toByteArray()); // make sure its padded if necessary final byte[] end = HilbertSFC.fitExpectedByteCount(expectedByteCount, endValue.toByteArray()); sfcRanges[i] = new ByteArrayRange(start, end); } final RangeDecomposition rangeDecomposition = new RangeDecomposition(sfcRanges); return rangeDecomposition; } private static BigInteger clamp( final BigInteger minValue, final BigInteger maxValue, final BigInteger value) { return value.max(minValue).min(maxValue); } /** * * Returns the smallest range that will be fully decomposed (i.e. decomposition stops when the * range is equal or smaller than this value). Values is based on the _maximumRangeDecompsed and * _minRangeDecompsed instance members. * * @param minRangeList minimum values for each dimension (ordered) * @param maxRangeList maximum values for each dimension (ordered) * @return largest range that will be fully decomposed */ private BigInteger getMinimumQuadSize( final List minRangeList, final List maxRangeList) { BigInteger maxRange = BigInteger.valueOf(1); final int dimensionality = Math.min(minRangeList.size(), maxRangeList.size()); for (int d = 0; d < dimensionality; d++) { maxRange = maxRange.max(maxRangeList.get(d).subtract(minRangeList.get(d)).abs().add(BigInteger.ONE)); } final BigInteger maxRangeDecomposed = maxRange.pow(dimensionality); if (maxRangeDecomposed.compareTo(UNIT_CELL_SIZE) <= 0) { return BigInteger.ONE; } return maxRangeDecomposed.divide(UNIT_CELL_SIZE); } @Override public BigInteger getEstimatedIdCount( final MultiDimensionalNumericData data, final SFCDimensionDefinition[] dimensionDefinitions) { final Double[] mins = data.getMinValuesPerDimension(); final Double[] maxes = data.getMaxValuesPerDimension(); BigInteger estimatedIdCount = BigInteger.valueOf(1); for (int d = 0; d < data.getDimensionCount(); d++) { final BigInteger binMin = normalizeDimension(dimensionDefinitions[d], mins[d], binsPerDimension[d], true, false); BigInteger binMax = normalizeDimension(dimensionDefinitions[d], maxes[d], binsPerDimension[d], false, false); if (binMin.compareTo(binMax) > 0) { // if they're both equal, which is possible because we treat max // as exclusive, set bin max to bin min (ie. treat it as // inclusive in this case) binMax = binMin; } estimatedIdCount = estimatedIdCount.multiply(binMax.subtract(binMin).abs().add(BigInteger.ONE)); } return estimatedIdCount; } @Override public double[] getInsertionIdRangePerDimension( final SFCDimensionDefinition[] dimensionDefinitions) { final double[] retVal = new double[dimensionDefinitions.length]; for (int i = 0; i < dimensionDefinitions.length; i++) { retVal[i] = new BigDecimal(dimensionDefinitions[i].getRange()).divide( binsPerDimension[i]).doubleValue(); } return retVal; } @Override public long[] normalizeRange( final double minValue, final double maxValue, final int dimension, final SFCDimensionDefinition boundedDimensionDefinition) throws IllegalArgumentException { return new long[] { normalizeDimension( boundedDimensionDefinition, minValue, binsPerDimension[dimension], true, true).longValue(), normalizeDimension( boundedDimensionDefinition, maxValue, binsPerDimension[dimension], false, true).longValue()}; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/sfc/tiered/SingleTierSubStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.sfc.tiered; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.IndexMetaData; import org.locationtech.geowave.core.index.IndexUtils; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.MultiDimensionalCoordinateRanges; import org.locationtech.geowave.core.index.MultiDimensionalCoordinates; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.index.QueryRanges; import org.locationtech.geowave.core.index.SinglePartitionInsertionIds; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.dimension.bin.BinRange; import org.locationtech.geowave.core.index.numeric.BinnedNumericDataset; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.index.sfc.SpaceFillingCurve; import org.locationtech.geowave.core.index.sfc.binned.BinnedSFCUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class wraps a single SpaceFillingCurve implementation with a tiered approach to indexing (an * SFC with a tier ID). This can be utilized by an overall HierarchicalNumericIndexStrategy as an * encapsulated sub-strategy. */ public class SingleTierSubStrategy implements NumericIndexStrategy { private static final Logger LOGGER = LoggerFactory.getLogger(SingleTierSubStrategy.class); private SpaceFillingCurve sfc; private NumericDimensionDefinition[] baseDefinitions; public byte tier; public SingleTierSubStrategy() {} public SingleTierSubStrategy( final SpaceFillingCurve sfc, final NumericDimensionDefinition[] baseDefinitions, final byte tier) { this.sfc = sfc; this.baseDefinitions = baseDefinitions; this.tier = tier; } @Override public QueryRanges getQueryRanges( final MultiDimensionalNumericData indexedRange, final IndexMetaData... hints) { return getQueryRanges(indexedRange, TieredSFCIndexStrategy.DEFAULT_MAX_RANGES); } @Override public QueryRanges getQueryRanges( final MultiDimensionalNumericData indexedRange, final int maxRangeDecomposition, final IndexMetaData... hints) { final List binnedQueries = BinnedNumericDataset.applyBins(indexedRange, baseDefinitions); return new QueryRanges( BinnedSFCUtils.getQueryRanges(binnedQueries, sfc, maxRangeDecomposition, tier)); } @Override public MultiDimensionalNumericData getRangeForId( final byte[] partitionKey, final byte[] sortKey) { final List insertionIds = new SinglePartitionInsertionIds(partitionKey, sortKey).getCompositeInsertionIds(); if (insertionIds.isEmpty()) { LOGGER.warn("Unexpected empty insertion ID in getRangeForId()"); return null; } final byte[] rowId = insertionIds.get(0); return BinnedSFCUtils.getRangeForId(rowId, baseDefinitions, sfc); } @Override public MultiDimensionalCoordinates getCoordinatesPerDimension( final byte[] partitionKey, final byte[] sortKey) { final byte[] rowId = ByteArrayUtils.combineArrays( partitionKey == null ? null : partitionKey, sortKey == null ? null : sortKey); return new MultiDimensionalCoordinates( new byte[] {tier}, BinnedSFCUtils.getCoordinatesForId(rowId, baseDefinitions, sfc)); } @Override public InsertionIds getInsertionIds(final MultiDimensionalNumericData indexedData) { return getInsertionIds(indexedData, 1); } @Override public InsertionIds getInsertionIds( final MultiDimensionalNumericData indexedData, final int maxDuplicateInsertionIds) { if (indexedData.isEmpty()) { LOGGER.warn("Cannot index empty fields, skipping writing row to index '" + getId() + "'"); return new InsertionIds(); } // we need to duplicate per bin so we can't adhere to max duplication // anyways final List ranges = BinnedNumericDataset.applyBins(indexedData, baseDefinitions); final Set retVal = new HashSet<>(ranges.size()); for (final BinnedNumericDataset range : ranges) { final SinglePartitionInsertionIds binRowIds = TieredSFCIndexStrategy.getRowIdsAtTier(range, tier, sfc, null, tier); if (binRowIds != null) { retVal.add(binRowIds); } } return new InsertionIds(retVal); } @Override public NumericDimensionDefinition[] getOrderedDimensionDefinitions() { return baseDefinitions; } @Override public String getId() { return StringUtils.intToString(hashCode()); } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + Arrays.hashCode(baseDefinitions); result = (prime * result) + ((sfc == null) ? 0 : sfc.hashCode()); result = (prime * result) + tier; return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if ((obj == null) || (getClass() != obj.getClass())) { return false; } final SingleTierSubStrategy other = (SingleTierSubStrategy) obj; if (!Arrays.equals(baseDefinitions, other.baseDefinitions)) { return false; } if (sfc == null) { if (other.sfc != null) { return false; } } else if (!sfc.equals(other.sfc)) { return false; } if (tier != other.tier) { return false; } return true; } @Override public byte[] toBinary() { int byteBufferLength = 1 + VarintUtils.unsignedIntByteLength(baseDefinitions.length); final List dimensionBinaries = new ArrayList<>(baseDefinitions.length); final byte[] sfcBinary = PersistenceUtils.toBinary(sfc); byteBufferLength += (VarintUtils.unsignedIntByteLength(sfcBinary.length) + sfcBinary.length); for (final NumericDimensionDefinition dimension : baseDefinitions) { final byte[] dimensionBinary = PersistenceUtils.toBinary(dimension); byteBufferLength += (VarintUtils.unsignedIntByteLength(dimensionBinary.length) + dimensionBinary.length); dimensionBinaries.add(dimensionBinary); } final ByteBuffer buf = ByteBuffer.allocate(byteBufferLength); buf.put(tier); VarintUtils.writeUnsignedInt(baseDefinitions.length, buf); VarintUtils.writeUnsignedInt(sfcBinary.length, buf); buf.put(sfcBinary); for (final byte[] dimensionBinary : dimensionBinaries) { VarintUtils.writeUnsignedInt(dimensionBinary.length, buf); buf.put(dimensionBinary); } return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); tier = buf.get(); final int numDimensions = VarintUtils.readUnsignedInt(buf); baseDefinitions = new NumericDimensionDefinition[numDimensions]; final byte[] sfcBinary = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); sfc = (SpaceFillingCurve) PersistenceUtils.fromBinary(sfcBinary); for (int i = 0; i < numDimensions; i++) { final byte[] dim = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); baseDefinitions[i] = (NumericDimensionDefinition) PersistenceUtils.fromBinary(dim); } } @Override public double[] getHighestPrecisionIdRangePerDimension() { return sfc.getInsertionIdRangePerDimension(); } @Override public int getPartitionKeyLength() { int rowIdOffset = 1; for (int dimensionIdx = 0; dimensionIdx < baseDefinitions.length; dimensionIdx++) { final int binSize = baseDefinitions[dimensionIdx].getFixedBinIdSize(); if (binSize > 0) { rowIdOffset += binSize; } } return rowIdOffset; } @Override public List createMetaData() { return Collections.emptyList(); } @Override public MultiDimensionalCoordinateRanges[] getCoordinateRangesPerDimension( final MultiDimensionalNumericData dataRange, final IndexMetaData... hints) { final BinRange[][] binRangesPerDimension = BinnedNumericDataset.getBinnedRangesPerDimension(dataRange, baseDefinitions); return new MultiDimensionalCoordinateRanges[] { BinnedSFCUtils.getCoordinateRanges( binRangesPerDimension, sfc, baseDefinitions.length, tier)}; } @Override public byte[][] getInsertionPartitionKeys(final MultiDimensionalNumericData insertionData) { return IndexUtils.getInsertionPartitionKeys(this, insertionData); } @Override public byte[][] getQueryPartitionKeys( final MultiDimensionalNumericData queryData, final IndexMetaData... hints) { return IndexUtils.getQueryPartitionKeys(this, queryData, hints); } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/sfc/tiered/TieredSFCIndexFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.sfc.tiered; import java.util.Arrays; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.sfc.SFCDimensionDefinition; import org.locationtech.geowave.core.index.sfc.SFCFactory; import org.locationtech.geowave.core.index.sfc.SFCFactory.SFCType; import org.locationtech.geowave.core.index.sfc.SpaceFillingCurve; import com.google.common.collect.ImmutableBiMap; /** * A factory for creating TieredSFCIndexStrategy using various approaches for breaking down the bits * of precision per tier */ public class TieredSFCIndexFactory { private static int DEFAULT_NUM_TIERS = 11; /** * Used to create a Single Tier Index Strategy. For example, this would be used to generate a * strategy that has Point type spatial data. * * @param dimensionDefs an array of SFC Dimension Definition objects * @param sfc the type of space filling curve (e.g. Hilbert) * @return an Index Strategy object with a single tier */ public static TieredSFCIndexStrategy createSingleTierStrategy( final SFCDimensionDefinition[] dimensionDefs, final SFCType sfc) { final SpaceFillingCurve[] orderedSfcs = new SpaceFillingCurve[] {SFCFactory.createSpaceFillingCurve(dimensionDefs, sfc)}; // unwrap SFC dimension definitions final NumericDimensionDefinition[] baseDefinitions = new NumericDimensionDefinition[dimensionDefs.length]; int maxBitsOfPrecision = Integer.MIN_VALUE; for (int d = 0; d < baseDefinitions.length; d++) { baseDefinitions[d] = dimensionDefs[d].getDimensionDefinition(); maxBitsOfPrecision = Math.max(dimensionDefs[d].getBitsOfPrecision(), maxBitsOfPrecision); } return new TieredSFCIndexStrategy( baseDefinitions, orderedSfcs, ImmutableBiMap.of(0, (byte) maxBitsOfPrecision)); } /** * Used to create a Single Tier Index Strategy. For example, this would be used to generate a * strategy that has Point type spatial data. * * @param baseDefinitions the numeric dimensions of the strategy * @param maxBitsPerDimension the maximum bits to use for each dimension * @param sfc the type of space filling curve (e.g. Hilbert) * @return an Index Strategy object with a single tier */ public static TieredSFCIndexStrategy createSingleTierStrategy( final NumericDimensionDefinition[] baseDefinitions, final int[] maxBitsPerDimension, final SFCType sfc) { final SFCDimensionDefinition[] sfcDimensions = new SFCDimensionDefinition[baseDefinitions.length]; int maxBitsOfPrecision = Integer.MIN_VALUE; for (int d = 0; d < baseDefinitions.length; d++) { sfcDimensions[d] = new SFCDimensionDefinition(baseDefinitions[d], maxBitsPerDimension[d]); maxBitsOfPrecision = Math.max(maxBitsPerDimension[d], maxBitsOfPrecision); } final SpaceFillingCurve[] orderedSfcs = new SpaceFillingCurve[] {SFCFactory.createSpaceFillingCurve(sfcDimensions, sfc)}; return new TieredSFCIndexStrategy( baseDefinitions, orderedSfcs, ImmutableBiMap.of(0, (byte) maxBitsOfPrecision)); } public static TieredSFCIndexStrategy createFullIncrementalTieredStrategy( final NumericDimensionDefinition[] baseDefinitions, final int[] maxBitsPerDimension, final SFCType sfcType) { return createFullIncrementalTieredStrategy(baseDefinitions, maxBitsPerDimension, sfcType, null); } /** * @param baseDefinitions an array of Numeric Dimension Definitions * @param maxBitsPerDimension the max cardinality for the Index Strategy * @param sfcType the type of space filling curve (e.g. Hilbert) * @param maxEstimatedDuplicatedIds the max number of duplicate SFC IDs * @return an Index Strategy object with a tier for every incremental cardinality between the * lowest max bits of precision and 0 */ public static TieredSFCIndexStrategy createFullIncrementalTieredStrategy( final NumericDimensionDefinition[] baseDefinitions, final int[] maxBitsPerDimension, final SFCType sfcType, final Long maxEstimatedDuplicatedIds) { if (maxBitsPerDimension.length == 0) { final ImmutableBiMap emptyMap = ImmutableBiMap.of(); return new TieredSFCIndexStrategy(baseDefinitions, new SpaceFillingCurve[] {}, emptyMap); } int numIndices = Integer.MAX_VALUE; for (final int element : maxBitsPerDimension) { numIndices = Math.min(numIndices, element + 1); } final SpaceFillingCurve[] spaceFillingCurves = new SpaceFillingCurve[numIndices]; final ImmutableBiMap.Builder sfcIndexToTier = ImmutableBiMap.builder(); for (int sfcIndex = 0; sfcIndex < numIndices; sfcIndex++) { final SFCDimensionDefinition[] sfcDimensions = new SFCDimensionDefinition[baseDefinitions.length]; int maxBitsOfPrecision = Integer.MIN_VALUE; for (int d = 0; d < baseDefinitions.length; d++) { final int bitsOfPrecision = maxBitsPerDimension[d] - (numIndices - sfcIndex - 1); maxBitsOfPrecision = Math.max(bitsOfPrecision, maxBitsOfPrecision); sfcDimensions[d] = new SFCDimensionDefinition(baseDefinitions[d], bitsOfPrecision); } sfcIndexToTier.put(sfcIndex, (byte) maxBitsOfPrecision); spaceFillingCurves[sfcIndex] = SFCFactory.createSpaceFillingCurve(sfcDimensions, sfcType); } if ((maxEstimatedDuplicatedIds != null) && (maxEstimatedDuplicatedIds > 0)) { return new TieredSFCIndexStrategy( baseDefinitions, spaceFillingCurves, sfcIndexToTier.build(), maxEstimatedDuplicatedIds); } return new TieredSFCIndexStrategy(baseDefinitions, spaceFillingCurves, sfcIndexToTier.build()); } /** * @param baseDefinitions an array of Numeric Dimension Definitions * @param maxBitsPerDimension the max cardinality for the Index Strategy * @param sfcType the type of space filling curve (e.g. Hilbert) * @return an Index Strategy object with a equal interval tiers */ public static TieredSFCIndexStrategy createEqualIntervalPrecisionTieredStrategy( final NumericDimensionDefinition[] baseDefinitions, final int[] maxBitsPerDimension, final SFCType sfcType) { return createEqualIntervalPrecisionTieredStrategy( baseDefinitions, maxBitsPerDimension, sfcType, DEFAULT_NUM_TIERS); } /** * @param baseDefinitions an array of Numeric Dimension Definitions * @param maxBitsPerDimension the max cardinality for the Index Strategy * @param sfcType the type of space filling curve (e.g. Hilbert) * @param numIndices the number of tiers of the Index Strategy * @return an Index Strategy object with a specified number of tiers */ public static TieredSFCIndexStrategy createEqualIntervalPrecisionTieredStrategy( final NumericDimensionDefinition[] baseDefinitions, final int[] maxBitsPerDimension, final SFCType sfcType, final int numIndices) { // Subtracting one from the number tiers prevents an extra tier. If // we decide to create a catch-all, then we can ignore the subtraction. final SpaceFillingCurve[] spaceFillingCurves = new SpaceFillingCurve[numIndices]; final ImmutableBiMap.Builder sfcIndexToTier = ImmutableBiMap.builder(); for (int sfcIndex = 0; sfcIndex < numIndices; sfcIndex++) { final SFCDimensionDefinition[] sfcDimensions = new SFCDimensionDefinition[baseDefinitions.length]; int maxBitsOfPrecision = Integer.MIN_VALUE; for (int d = 0; d < baseDefinitions.length; d++) { int bitsOfPrecision; if (numIndices == 1) { bitsOfPrecision = maxBitsPerDimension[d]; } else { final double bitPrecisionIncrement = ((double) maxBitsPerDimension[d] / (numIndices - 1)); bitsOfPrecision = (int) (bitPrecisionIncrement * sfcIndex); } maxBitsOfPrecision = Math.max(bitsOfPrecision, maxBitsOfPrecision); sfcDimensions[d] = new SFCDimensionDefinition(baseDefinitions[d], bitsOfPrecision); } sfcIndexToTier.put(sfcIndex, (byte) maxBitsOfPrecision); spaceFillingCurves[sfcIndex] = SFCFactory.createSpaceFillingCurve(sfcDimensions, sfcType); } return new TieredSFCIndexStrategy(baseDefinitions, spaceFillingCurves, sfcIndexToTier.build()); } /** * @param orderedDimensionDefinitions an array of Numeric Dimension Definitions * @param bitsPerDimensionPerLevel * @param sfcType the type of space filling curve (e.g. Hilbert) * @return an Index Strategy object with a specified number of tiers */ public static TieredSFCIndexStrategy createDefinedPrecisionTieredStrategy( final NumericDimensionDefinition[] orderedDimensionDefinitions, final int[][] bitsPerDimensionPerLevel, final SFCType sfcType) { Integer numLevels = null; for (final int[] element : bitsPerDimensionPerLevel) { if (numLevels == null) { numLevels = element.length; } else { numLevels = Math.min(numLevels, element.length); } Arrays.sort(element); } if (numLevels == null) { numLevels = 0; } final SpaceFillingCurve[] orderedSFCTiers = new SpaceFillingCurve[numLevels]; final int numDimensions = orderedDimensionDefinitions.length; final ImmutableBiMap.Builder sfcIndexToTier = ImmutableBiMap.builder(); for (int l = 0; l < numLevels; l++) { final SFCDimensionDefinition[] sfcDimensions = new SFCDimensionDefinition[numDimensions]; int maxBitsOfPrecision = Integer.MIN_VALUE; for (int d = 0; d < numDimensions; d++) { sfcDimensions[d] = new SFCDimensionDefinition( orderedDimensionDefinitions[d], bitsPerDimensionPerLevel[d][l]); maxBitsOfPrecision = Math.max(bitsPerDimensionPerLevel[d][l], maxBitsOfPrecision); } sfcIndexToTier.put(l, (byte) maxBitsOfPrecision); orderedSFCTiers[l] = SFCFactory.createSpaceFillingCurve(sfcDimensions, sfcType); } return new TieredSFCIndexStrategy( orderedDimensionDefinitions, orderedSFCTiers, sfcIndexToTier.build()); } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/sfc/tiered/TieredSFCIndexStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.sfc.tiered; import java.math.BigInteger; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.locationtech.geowave.core.index.ByteArrayRange; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.FloatCompareUtils; import org.locationtech.geowave.core.index.HierarchicalNumericIndexStrategy; import org.locationtech.geowave.core.index.IndexMetaData; import org.locationtech.geowave.core.index.IndexUtils; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.index.MultiDimensionalCoordinateRanges; import org.locationtech.geowave.core.index.MultiDimensionalCoordinates; import org.locationtech.geowave.core.index.QueryRanges; import org.locationtech.geowave.core.index.SinglePartitionInsertionIds; import org.locationtech.geowave.core.index.SinglePartitionQueryRanges; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.dimension.bin.BinRange; import org.locationtech.geowave.core.index.numeric.BinnedNumericDataset; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.index.sfc.RangeDecomposition; import org.locationtech.geowave.core.index.sfc.SpaceFillingCurve; import org.locationtech.geowave.core.index.sfc.binned.BinnedSFCUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.ImmutableBiMap; import com.google.common.collect.ImmutableBiMap.Builder; import net.sf.json.JSONException; import net.sf.json.JSONObject; /** * This class uses multiple SpaceFillingCurve objects, one per tier, to represent a single cohesive * index strategy with multiple precisions */ public class TieredSFCIndexStrategy implements HierarchicalNumericIndexStrategy { private static final Logger LOGGER = LoggerFactory.getLogger(TieredSFCIndexStrategy.class); private static final int DEFAULT_MAX_ESTIMATED_DUPLICATE_IDS_PER_DIMENSION = 2; protected static final int DEFAULT_MAX_RANGES = -1; private SpaceFillingCurve[] orderedSfcs; private ImmutableBiMap orderedSfcIndexToTierId; private NumericDimensionDefinition[] baseDefinitions; private long maxEstimatedDuplicateIdsPerDimension; private final Map maxEstimatedDuplicatesPerDimensionalExtent = new HashMap<>(); public TieredSFCIndexStrategy() {} /** * Constructor used to create a Tiered Index Strategy. * * @param baseDefinitions the dimension definitions of the space filling curve * @param orderedSfcs the space filling curve used to create the strategy */ public TieredSFCIndexStrategy( final NumericDimensionDefinition[] baseDefinitions, final SpaceFillingCurve[] orderedSfcs, final ImmutableBiMap orderedSfcIndexToTierId) { this( baseDefinitions, orderedSfcs, orderedSfcIndexToTierId, DEFAULT_MAX_ESTIMATED_DUPLICATE_IDS_PER_DIMENSION); } /** Constructor used to create a Tiered Index Strategy. */ public TieredSFCIndexStrategy( final NumericDimensionDefinition[] baseDefinitions, final SpaceFillingCurve[] orderedSfcs, final ImmutableBiMap orderedSfcIndexToTierId, final long maxEstimatedDuplicateIdsPerDimension) { this.orderedSfcs = orderedSfcs; this.baseDefinitions = baseDefinitions; this.orderedSfcIndexToTierId = orderedSfcIndexToTierId; this.maxEstimatedDuplicateIdsPerDimension = maxEstimatedDuplicateIdsPerDimension; initDuplicateIdLookup(); } private void initDuplicateIdLookup() { for (int i = 0; i <= baseDefinitions.length; i++) { final long maxEstimatedDuplicateIds = (long) Math.pow(maxEstimatedDuplicateIdsPerDimension, i); maxEstimatedDuplicatesPerDimensionalExtent.put( i, BigInteger.valueOf(maxEstimatedDuplicateIds)); } } @Override public QueryRanges getQueryRanges( final MultiDimensionalNumericData indexedRange, final int maxRangeDecomposition, final IndexMetaData... hints) { // TODO don't just pass max ranges along to the SFC, take tiering and // binning into account to limit the number of ranges correctly final List queryRanges = new ArrayList<>(); final List binnedQueries = BinnedNumericDataset.applyBins(indexedRange, baseDefinitions); final TierIndexMetaData metaData = ((hints.length > 0) && (hints[0] != null) && (hints[0] instanceof TierIndexMetaData)) ? (TierIndexMetaData) hints[0] : null; for (int sfcIndex = orderedSfcs.length - 1; sfcIndex >= 0; sfcIndex--) { if ((metaData != null) && (metaData.tierCounts[sfcIndex] == 0)) { continue; } final SpaceFillingCurve sfc = orderedSfcs[sfcIndex]; final Byte tier = orderedSfcIndexToTierId.get(sfcIndex); queryRanges.addAll( BinnedSFCUtils.getQueryRanges( binnedQueries, sfc, maxRangeDecomposition, // for // now // we're // doing // this // per SFC/tier rather than // dividing by the tiers tier)); } return new QueryRanges(queryRanges); } /** * Returns a list of query ranges for an specified numeric range. * * @param indexedRange defines the numeric range for the query * @return a List of query ranges */ @Override public QueryRanges getQueryRanges( final MultiDimensionalNumericData indexedRange, final IndexMetaData... hints) { return getQueryRanges(indexedRange, DEFAULT_MAX_RANGES, hints); } /** * Returns a list of id's for insertion. * * @param indexedData defines the numeric data to be indexed * @return a List of insertion ID's */ @Override public InsertionIds getInsertionIds(final MultiDimensionalNumericData indexedData) { return internalGetInsertionIds( indexedData, maxEstimatedDuplicatesPerDimensionalExtent.get(getRanges(indexedData))); } private static int getRanges(final MultiDimensionalNumericData indexedData) { final Double[] mins = indexedData.getMinValuesPerDimension(); final Double[] maxes = indexedData.getMaxValuesPerDimension(); int ranges = 0; for (int d = 0; d < mins.length; d++) { if (!FloatCompareUtils.checkDoublesEqual(mins[d], maxes[d])) { ranges++; } } return ranges; } @Override public InsertionIds getInsertionIds( final MultiDimensionalNumericData indexedData, final int maxDuplicateInsertionIdsPerDimension) { return internalGetInsertionIds( indexedData, BigInteger.valueOf(maxDuplicateInsertionIdsPerDimension)); } private InsertionIds internalGetInsertionIds( final MultiDimensionalNumericData indexedData, final BigInteger maxDuplicateInsertionIds) { if (indexedData.isEmpty()) { LOGGER.warn("Cannot index empty fields, skipping writing row to index '" + getId() + "'"); return new InsertionIds(); } final List ranges = BinnedNumericDataset.applyBins(indexedData, baseDefinitions); // place each of these indices into a single row ID at a tier that will // fit its min and max final Set retVal = new HashSet<>(ranges.size()); for (final BinnedNumericDataset range : ranges) { retVal.add(getRowIds(range, maxDuplicateInsertionIds)); } return new InsertionIds(retVal); } @Override public MultiDimensionalCoordinates getCoordinatesPerDimension( final byte[] partitionKey, final byte[] sortKey) { if ((partitionKey != null) && (partitionKey.length > 0)) { final byte[] rowId = ByteArrayUtils.combineArrays(partitionKey, sortKey == null ? null : sortKey); final Integer orderedSfcIndex = orderedSfcIndexToTierId.inverse().get(rowId[0]); return new MultiDimensionalCoordinates( new byte[] {rowId[0]}, BinnedSFCUtils.getCoordinatesForId(rowId, baseDefinitions, orderedSfcs[orderedSfcIndex])); } else { LOGGER.warn("Row's partition key must at least contain a byte for the tier"); } return null; } @Override public MultiDimensionalNumericData getRangeForId( final byte[] partitionKey, final byte[] sortKey) { final List insertionIds = new SinglePartitionInsertionIds(partitionKey, sortKey).getCompositeInsertionIds(); if (insertionIds.isEmpty()) { LOGGER.warn("Unexpected empty insertion ID in getRangeForId()"); return null; } final byte[] rowId = insertionIds.get(0); if (rowId.length > 0) { final Integer orderedSfcIndex = orderedSfcIndexToTierId.inverse().get(rowId[0]); return BinnedSFCUtils.getRangeForId(rowId, baseDefinitions, orderedSfcs[orderedSfcIndex]); } else { LOGGER.warn("Row must at least contain a byte for tier"); } return null; } public void calculateCoordinateRanges( final List coordRanges, final BinRange[][] binRangesPerDimension, final IndexMetaData... hints) { final TierIndexMetaData metaData = ((hints.length > 0) && (hints[0] != null) && (hints[0] instanceof TierIndexMetaData)) ? (TierIndexMetaData) hints[0] : null; for (int sfcIndex = orderedSfcs.length - 1; sfcIndex >= 0; sfcIndex--) { if ((metaData != null) && (metaData.tierCounts[sfcIndex] == 0)) { continue; } final SpaceFillingCurve sfc = orderedSfcs[sfcIndex]; final Byte tier = orderedSfcIndexToTierId.get(sfcIndex); coordRanges.add( BinnedSFCUtils.getCoordinateRanges( binRangesPerDimension, sfc, baseDefinitions.length, tier)); } } @Override public MultiDimensionalCoordinateRanges[] getCoordinateRangesPerDimension( final MultiDimensionalNumericData dataRange, final IndexMetaData... hints) { final List coordRanges = new ArrayList<>(); final BinRange[][] binRangesPerDimension = BinnedNumericDataset.getBinnedRangesPerDimension(dataRange, baseDefinitions); calculateCoordinateRanges(coordRanges, binRangesPerDimension, hints); return coordRanges.toArray(new MultiDimensionalCoordinateRanges[] {}); } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + Arrays.hashCode(baseDefinitions); result = (prime * result) + (int) (maxEstimatedDuplicateIdsPerDimension ^ (maxEstimatedDuplicateIdsPerDimension >>> 32)); result = (prime * result) + ((orderedSfcIndexToTierId == null) ? 0 : orderedSfcIndexToTierId.hashCode()); result = (prime * result) + Arrays.hashCode(orderedSfcs); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final TieredSFCIndexStrategy other = (TieredSFCIndexStrategy) obj; if (!Arrays.equals(baseDefinitions, other.baseDefinitions)) { return false; } if (maxEstimatedDuplicateIdsPerDimension != other.maxEstimatedDuplicateIdsPerDimension) { return false; } if (orderedSfcIndexToTierId == null) { if (other.orderedSfcIndexToTierId != null) { return false; } } else if (!orderedSfcIndexToTierId.equals(other.orderedSfcIndexToTierId)) { return false; } if (!Arrays.equals(orderedSfcs, other.orderedSfcs)) { return false; } return true; } @Override public String getId() { return StringUtils.intToString(hashCode()); } @Override public NumericDimensionDefinition[] getOrderedDimensionDefinitions() { return baseDefinitions; } public boolean tierExists(final Byte tierId) { return orderedSfcIndexToTierId.containsValue(tierId); } private synchronized SinglePartitionInsertionIds getRowIds( final BinnedNumericDataset index, final BigInteger maxEstimatedDuplicateIds) { // most times this should be a single row ID, but if the lowest // precision tier does not have a single SFC value for this data, it // will be multiple row IDs // what tier does this entry belong in? for (int sfcIndex = orderedSfcs.length - 1; sfcIndex >= 0; sfcIndex--) { final SpaceFillingCurve sfc = orderedSfcs[sfcIndex]; // loop through space filling curves and stop when both the min and // max of the ranges fit the same row ID final byte tierId = orderedSfcIndexToTierId.get(sfcIndex); final SinglePartitionInsertionIds rowIdsAtTier = getRowIdsAtTier(index, tierId, sfc, maxEstimatedDuplicateIds, sfcIndex); if (rowIdsAtTier != null) { return rowIdsAtTier; } } // this should never happen because of the check for tier 0 return new SinglePartitionInsertionIds(null, new ArrayList()); } public static SinglePartitionInsertionIds getRowIdsAtTier( final BinnedNumericDataset index, final Byte tierId, final SpaceFillingCurve sfc, final BigInteger maxEstimatedDuplicateIds, final int sfcIndex) { final BigInteger rowCount = sfc.getEstimatedIdCount(index); final SinglePartitionInsertionIds singleId = BinnedSFCUtils.getSingleBinnedInsertionId(rowCount, tierId, index, sfc); if (singleId != null) { return singleId; } if ((maxEstimatedDuplicateIds == null) || (rowCount.compareTo(maxEstimatedDuplicateIds) <= 0) || (sfcIndex == 0)) { return decomposeRangesForEntry(index, tierId, sfc); } return null; } protected static SinglePartitionInsertionIds decomposeRangesForEntry( final BinnedNumericDataset index, final Byte tierId, final SpaceFillingCurve sfc) { final List retVal = new ArrayList<>(); final byte[] tierAndBinId = tierId != null ? ByteArrayUtils.combineArrays(new byte[] {tierId}, index.getBinId()) : index.getBinId(); final RangeDecomposition rangeDecomp = sfc.decomposeRange(index, false, DEFAULT_MAX_RANGES); // this range does not fit into a single row ID at the lowest // tier, decompose it for (final ByteArrayRange range : rangeDecomp.getRanges()) { ByteArrayUtils.addAllIntermediaryByteArrays(retVal, range); } return new SinglePartitionInsertionIds(tierAndBinId, retVal); } @Override public byte[] toBinary() { int byteBufferLength = (2 * orderedSfcIndexToTierId.size()); byteBufferLength += VarintUtils.unsignedIntByteLength(orderedSfcs.length); final List orderedSfcBinaries = new ArrayList<>(orderedSfcs.length); byteBufferLength += VarintUtils.unsignedIntByteLength(baseDefinitions.length); final List dimensionBinaries = new ArrayList<>(baseDefinitions.length); byteBufferLength += VarintUtils.unsignedIntByteLength(orderedSfcIndexToTierId.size()); byteBufferLength += VarintUtils.unsignedLongByteLength(maxEstimatedDuplicateIdsPerDimension); for (final SpaceFillingCurve sfc : orderedSfcs) { final byte[] sfcBinary = PersistenceUtils.toBinary(sfc); byteBufferLength += (VarintUtils.unsignedIntByteLength(sfcBinary.length) + sfcBinary.length); orderedSfcBinaries.add(sfcBinary); } for (final NumericDimensionDefinition dimension : baseDefinitions) { final byte[] dimensionBinary = PersistenceUtils.toBinary(dimension); byteBufferLength += (VarintUtils.unsignedIntByteLength(dimensionBinary.length) + dimensionBinary.length); dimensionBinaries.add(dimensionBinary); } final ByteBuffer buf = ByteBuffer.allocate(byteBufferLength); VarintUtils.writeUnsignedInt(orderedSfcs.length, buf); VarintUtils.writeUnsignedInt(baseDefinitions.length, buf); VarintUtils.writeUnsignedInt(orderedSfcIndexToTierId.size(), buf); VarintUtils.writeUnsignedLong(maxEstimatedDuplicateIdsPerDimension, buf); for (final byte[] sfcBinary : orderedSfcBinaries) { VarintUtils.writeUnsignedInt(sfcBinary.length, buf); buf.put(sfcBinary); } for (final byte[] dimensionBinary : dimensionBinaries) { VarintUtils.writeUnsignedInt(dimensionBinary.length, buf); buf.put(dimensionBinary); } for (final Entry entry : orderedSfcIndexToTierId.entrySet()) { buf.put(entry.getKey().byteValue()); buf.put(entry.getValue()); } return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int numSfcs = VarintUtils.readUnsignedInt(buf); final int numDimensions = VarintUtils.readUnsignedInt(buf); final int mappingSize = VarintUtils.readUnsignedInt(buf); maxEstimatedDuplicateIdsPerDimension = VarintUtils.readUnsignedLong(buf); orderedSfcs = new SpaceFillingCurve[numSfcs]; baseDefinitions = new NumericDimensionDefinition[numDimensions]; for (int i = 0; i < numSfcs; i++) { final byte[] sfc = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); orderedSfcs[i] = (SpaceFillingCurve) PersistenceUtils.fromBinary(sfc); } for (int i = 0; i < numDimensions; i++) { final byte[] dim = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); baseDefinitions[i] = (NumericDimensionDefinition) PersistenceUtils.fromBinary(dim); } final Builder bimapBuilder = ImmutableBiMap.builder(); for (int i = 0; i < mappingSize; i++) { bimapBuilder.put(Byte.valueOf(buf.get()).intValue(), buf.get()); } orderedSfcIndexToTierId = bimapBuilder.build(); initDuplicateIdLookup(); } @Override public SubStrategy[] getSubStrategies() { final SubStrategy[] subStrategies = new SubStrategy[orderedSfcs.length]; for (int sfcIndex = 0; sfcIndex < orderedSfcs.length; sfcIndex++) { final byte tierId = orderedSfcIndexToTierId.get(sfcIndex); subStrategies[sfcIndex] = new SubStrategy( new SingleTierSubStrategy(orderedSfcs[sfcIndex], baseDefinitions, tierId), new byte[] {tierId}); } return subStrategies; } @Override public double[] getHighestPrecisionIdRangePerDimension() { // delegate this to the highest precision tier SFC return orderedSfcs[orderedSfcs.length - 1].getInsertionIdRangePerDimension(); } public void setMaxEstimatedDuplicateIdsPerDimension( final int maxEstimatedDuplicateIdsPerDimension) { this.maxEstimatedDuplicateIdsPerDimension = maxEstimatedDuplicateIdsPerDimension; initDuplicateIdLookup(); } @Override public int getPartitionKeyLength() { int rowIdOffset = 1; for (int dimensionIdx = 0; dimensionIdx < baseDefinitions.length; dimensionIdx++) { final int binSize = baseDefinitions[dimensionIdx].getFixedBinIdSize(); if (binSize > 0) { rowIdOffset += binSize; } } return rowIdOffset; } public InsertionIds reprojectToTier( final byte[] insertId, final Byte reprojectTierId, final BigInteger maxDuplicates) { final MultiDimensionalNumericData originalRange = getRangeForId(insertId, null); final List ranges = BinnedNumericDataset.applyBins(originalRange, baseDefinitions); final int sfcIndex = orderedSfcIndexToTierId.inverse().get(reprojectTierId); final Set retVal = new HashSet<>(ranges.size()); for (final BinnedNumericDataset reprojectRange : ranges) { final SinglePartitionInsertionIds tierIds = TieredSFCIndexStrategy.getRowIdsAtTier( reprojectRange, reprojectTierId, orderedSfcs[sfcIndex], maxDuplicates, sfcIndex); retVal.add(tierIds); } return new InsertionIds(retVal); } @Override public List createMetaData() { return Collections.singletonList( (IndexMetaData) new TierIndexMetaData(orderedSfcIndexToTierId.inverse())); } public static class TierIndexMetaData implements IndexMetaData { private int[] tierCounts = null; private ImmutableBiMap orderedTierIdToSfcIndex = null; public TierIndexMetaData() {} public TierIndexMetaData(final ImmutableBiMap orderedTierIdToSfcIndex) { super(); tierCounts = new int[orderedTierIdToSfcIndex.size()]; this.orderedTierIdToSfcIndex = orderedTierIdToSfcIndex; } @Override public byte[] toBinary() { int bufferSize = VarintUtils.unsignedIntByteLength(tierCounts.length) + tierCounts.length * 2; for (final int count : tierCounts) { bufferSize += VarintUtils.unsignedIntByteLength(count); } final ByteBuffer buffer = ByteBuffer.allocate(bufferSize); VarintUtils.writeUnsignedInt(tierCounts.length, buffer); for (final int count : tierCounts) { VarintUtils.writeUnsignedInt(count, buffer); } for (final Entry entry : orderedTierIdToSfcIndex.entrySet()) { buffer.put(entry.getKey().byteValue()); buffer.put(entry.getValue().byteValue()); } return buffer.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); tierCounts = new int[VarintUtils.readUnsignedInt(buffer)]; for (int i = 0; i < tierCounts.length; i++) { tierCounts[i] = VarintUtils.readUnsignedInt(buffer); } final Builder bimapBuilder = ImmutableBiMap.builder(); for (int i = 0; i < tierCounts.length; i++) { bimapBuilder.put(buffer.get(), Byte.valueOf(buffer.get()).intValue()); } orderedTierIdToSfcIndex = bimapBuilder.build(); } @Override public void merge(final Mergeable merge) { if (merge instanceof TierIndexMetaData) { final TierIndexMetaData other = (TierIndexMetaData) merge; int pos = 0; for (final int count : other.tierCounts) { tierCounts[pos++] += count; } } } @Override public void insertionIdsAdded(final InsertionIds ids) { for (final SinglePartitionInsertionIds partitionIds : ids.getPartitionKeys()) { final byte first = partitionIds.getPartitionKey()[0]; if (orderedTierIdToSfcIndex.containsKey(first)) { tierCounts[orderedTierIdToSfcIndex.get(first).intValue()] += partitionIds.getSortKeys().size(); } } } @Override public void insertionIdsRemoved(final InsertionIds ids) { for (final SinglePartitionInsertionIds partitionIds : ids.getPartitionKeys()) { final byte first = partitionIds.getPartitionKey()[0]; if (orderedTierIdToSfcIndex.containsKey(first)) { tierCounts[orderedTierIdToSfcIndex.get(partitionIds.getPartitionKey()[0]).intValue()] -= partitionIds.getSortKeys().size(); } } } @Override public String toString() { return "Tier Metadata[Tier Counts:" + Arrays.toString(tierCounts) + "]"; } /** Convert Tiered Index Metadata statistics to a JSON object */ @Override public JSONObject toJSONObject() throws JSONException { final JSONObject jo = new JSONObject(); jo.put("type", "TieredSFCIndexStrategy"); jo.put("TierCountsSize", tierCounts.length); if (null == orderedTierIdToSfcIndex) { jo.put("orderedTierIdToSfcIndex", "null"); } else { jo.put("orderedTierIdToSfcIndexSize", orderedTierIdToSfcIndex.size()); } return jo; } } @Override public byte[][] getInsertionPartitionKeys(final MultiDimensionalNumericData insertionData) { return IndexUtils.getInsertionPartitionKeys(this, insertionData); } @Override public byte[][] getQueryPartitionKeys( final MultiDimensionalNumericData queryData, final IndexMetaData... hints) { return IndexUtils.getQueryPartitionKeys(this, queryData, hints); } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/sfc/xz/XZHierarchicalIndexFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.sfc.xz; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.sfc.SFCFactory.SFCType; import org.locationtech.geowave.core.index.sfc.tiered.TieredSFCIndexFactory; import org.locationtech.geowave.core.index.sfc.tiered.TieredSFCIndexStrategy; /** * A factory for creating a Hierachical XZ Index strategy with a TieredSFCIndexStrategy substrategy * using various approaches for breaking down the bits of precision per tier */ public class XZHierarchicalIndexFactory { public static XZHierarchicalIndexStrategy createFullIncrementalTieredStrategy( final NumericDimensionDefinition[] baseDefinitions, final int[] maxBitsPerDimension, final SFCType sfcType) { return createFullIncrementalTieredStrategy(baseDefinitions, maxBitsPerDimension, sfcType, null); } /** * @param baseDefinitions an array of Numeric Dimension Definitions * @param maxBitsPerDimension the max cardinality for the Index Strategy * @param sfcType the type of space filling curve (e.g. Hilbert) * @param maxEstimatedDuplicatedIds the max number of duplicate SFC IDs * @return an Index Strategy object with a tier for every incremental cardinality between the * lowest max bits of precision and 0 */ public static XZHierarchicalIndexStrategy createFullIncrementalTieredStrategy( final NumericDimensionDefinition[] baseDefinitions, final int[] maxBitsPerDimension, final SFCType sfcType, final Long maxEstimatedDuplicatedIds) { final TieredSFCIndexStrategy rasterStrategy = TieredSFCIndexFactory.createFullIncrementalTieredStrategy( baseDefinitions, maxBitsPerDimension, sfcType, maxEstimatedDuplicatedIds); return new XZHierarchicalIndexStrategy(baseDefinitions, rasterStrategy, maxBitsPerDimension); } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/sfc/xz/XZHierarchicalIndexStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.sfc.xz; import java.math.BigInteger; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.Coordinate; import org.locationtech.geowave.core.index.HierarchicalNumericIndexStrategy; import org.locationtech.geowave.core.index.IndexMetaData; import org.locationtech.geowave.core.index.IndexUtils; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.index.MultiDimensionalCoordinateRanges; import org.locationtech.geowave.core.index.MultiDimensionalCoordinates; import org.locationtech.geowave.core.index.QueryRanges; import org.locationtech.geowave.core.index.SinglePartitionInsertionIds; import org.locationtech.geowave.core.index.SinglePartitionQueryRanges; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.dimension.bin.BinRange; import org.locationtech.geowave.core.index.numeric.BinnedNumericDataset; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.index.sfc.SFCDimensionDefinition; import org.locationtech.geowave.core.index.sfc.SFCFactory; import org.locationtech.geowave.core.index.sfc.SFCFactory.SFCType; import org.locationtech.geowave.core.index.sfc.SpaceFillingCurve; import org.locationtech.geowave.core.index.sfc.binned.BinnedSFCUtils; import org.locationtech.geowave.core.index.sfc.tiered.TieredSFCIndexStrategy; import org.locationtech.geowave.core.index.sfc.tiered.TieredSFCIndexStrategy.TierIndexMetaData; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import net.sf.json.JSONException; import net.sf.json.JSONObject; public class XZHierarchicalIndexStrategy implements HierarchicalNumericIndexStrategy { private static final Logger LOGGER = LoggerFactory.getLogger(XZHierarchicalIndexStrategy.class); protected static final int DEFAULT_MAX_RANGES = -1; private Byte pointCurveMultiDimensionalId = null; private Byte xzCurveMultiDimensionalId = null; private SpaceFillingCurve pointCurve; private SpaceFillingCurve xzCurve; private TieredSFCIndexStrategy rasterStrategy; private NumericDimensionDefinition[] baseDefinitions; private int[] maxBitsPerDimension; private int byteOffsetFromDimensionIndex; public XZHierarchicalIndexStrategy() {} /** * Constructor used to create a XZ Hierarchical Index Strategy. * * @param maxBitsPerDimension */ public XZHierarchicalIndexStrategy( final NumericDimensionDefinition[] baseDefinitions, final TieredSFCIndexStrategy rasterStrategy, final int[] maxBitsPerDimension) { this.rasterStrategy = rasterStrategy; this.maxBitsPerDimension = maxBitsPerDimension; init(baseDefinitions); } private void init(final NumericDimensionDefinition[] baseDefinitions) { this.baseDefinitions = baseDefinitions; byteOffsetFromDimensionIndex = rasterStrategy.getPartitionKeyLength(); // init dimensionalIds with values not used by rasterStrategy for (byte i = Byte.MIN_VALUE; i <= Byte.MAX_VALUE; i++) { if (!rasterStrategy.tierExists(i)) { if (pointCurveMultiDimensionalId == null) { pointCurveMultiDimensionalId = i; } else if (xzCurveMultiDimensionalId == null) { xzCurveMultiDimensionalId = i; } else { break; } } } if ((pointCurveMultiDimensionalId == null) || (xzCurveMultiDimensionalId == null)) { LOGGER.error("No available byte values for xz and point sfc multiDimensionalIds."); } final SFCDimensionDefinition[] sfcDimensions = new SFCDimensionDefinition[baseDefinitions.length]; for (int i = 0; i < baseDefinitions.length; i++) { sfcDimensions[i] = new SFCDimensionDefinition(baseDefinitions[i], maxBitsPerDimension[i]); } pointCurve = SFCFactory.createSpaceFillingCurve(sfcDimensions, SFCType.HILBERT); xzCurve = SFCFactory.createSpaceFillingCurve(sfcDimensions, SFCType.XZORDER); } @Override public QueryRanges getQueryRanges( final MultiDimensionalNumericData indexedRange, final IndexMetaData... hints) { return getQueryRanges(indexedRange, DEFAULT_MAX_RANGES, hints); } @Override public QueryRanges getQueryRanges( final MultiDimensionalNumericData indexedRange, final int maxEstimatedRangeDecomposition, final IndexMetaData... hints) { // TODO don't just pass max ranges along to the SFC, take tiering and // binning into account to limit the number of ranges correctly TierIndexMetaData tieredHints = null; XZHierarchicalIndexMetaData xzHints = null; if ((hints != null) && (hints.length > 0)) { tieredHints = (TierIndexMetaData) hints[0]; xzHints = (XZHierarchicalIndexMetaData) hints[1]; } final QueryRanges queryRanges = rasterStrategy.getQueryRanges(indexedRange, maxEstimatedRangeDecomposition, tieredHints); final List binnedQueries = BinnedNumericDataset.applyBins(indexedRange, baseDefinitions); final List partitionedRanges = new ArrayList<>(); if ((xzHints == null) || (xzHints.pointCurveCount > 0)) { partitionedRanges.addAll( BinnedSFCUtils.getQueryRanges( binnedQueries, pointCurve, maxEstimatedRangeDecomposition, // for // now // we're // doing this // per SFC rather // than // dividing by the // SFCs pointCurveMultiDimensionalId)); } if ((xzHints == null) || (xzHints.xzCurveCount > 0)) { partitionedRanges.addAll( BinnedSFCUtils.getQueryRanges( binnedQueries, xzCurve, maxEstimatedRangeDecomposition, // for // now // we're // doing this // per SFC rather // than // dividing by the // SFCs xzCurveMultiDimensionalId)); } if (partitionedRanges.isEmpty()) { return queryRanges; } final List queryRangesList = new ArrayList<>(); queryRangesList.add(queryRanges); queryRangesList.add(new QueryRanges(partitionedRanges)); return new QueryRanges(queryRangesList); } @Override public InsertionIds getInsertionIds(final MultiDimensionalNumericData indexedData) { final List ranges = BinnedNumericDataset.applyBins(indexedData, baseDefinitions); final List partitionIds = new ArrayList<>(ranges.size()); for (final BinnedNumericDataset range : ranges) { final BigInteger pointIds = pointCurve.getEstimatedIdCount(range); final SinglePartitionInsertionIds pointCurveId = BinnedSFCUtils.getSingleBinnedInsertionId( pointIds, pointCurveMultiDimensionalId, range, pointCurve); if (pointCurveId != null) { partitionIds.add(pointCurveId); } else { final Double[] mins = range.getMinValuesPerDimension(); final Double[] maxes = range.getMaxValuesPerDimension(); final Double[] values = new Double[mins.length + maxes.length]; for (int i = 0; i < (values.length - 1); i++) { values[i] = mins[i / 2]; values[i + 1] = maxes[i / 2]; i++; } final byte[] xzId = xzCurve.getId(values); partitionIds.add( new SinglePartitionInsertionIds( ByteArrayUtils.combineArrays( new byte[] {xzCurveMultiDimensionalId}, range.getBinId()), xzId)); } } return new InsertionIds(partitionIds); } @Override public InsertionIds getInsertionIds( final MultiDimensionalNumericData indexedData, final int maxEstimatedDuplicateIds) { return getInsertionIds(indexedData); } @Override public MultiDimensionalNumericData getRangeForId( final byte[] partitionKey, final byte[] sortKey) { // select curve based on first byte final byte first = partitionKey[0]; if (first == pointCurveMultiDimensionalId) { return pointCurve.getRanges(sortKey); } else if (first == xzCurveMultiDimensionalId) { return xzCurve.getRanges(sortKey); } else { return rasterStrategy.getRangeForId(partitionKey, sortKey); } } @Override public int hashCode() { // internal tiered raster strategy already contains all the details that // provide uniqueness and comparability to the hierarchical strategy return rasterStrategy.hashCode(); } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final XZHierarchicalIndexStrategy other = (XZHierarchicalIndexStrategy) obj; // internal tiered raster strategy already contains all the details that // provide uniqueness and comparability to the hierarchical strategy return rasterStrategy.equals(other.rasterStrategy); } @Override public String getId() { return StringUtils.intToString(hashCode()); } @Override public byte[] toBinary() { final List dimensionDefBinaries = new ArrayList<>(baseDefinitions.length); int bufferLength = VarintUtils.unsignedIntByteLength(baseDefinitions.length); for (final NumericDimensionDefinition dimension : baseDefinitions) { final byte[] sfcDimensionBinary = PersistenceUtils.toBinary(dimension); bufferLength += (sfcDimensionBinary.length + VarintUtils.unsignedIntByteLength(sfcDimensionBinary.length)); dimensionDefBinaries.add(sfcDimensionBinary); } final byte[] rasterStrategyBinary = PersistenceUtils.toBinary(rasterStrategy); bufferLength += VarintUtils.unsignedIntByteLength(rasterStrategyBinary.length) + rasterStrategyBinary.length; bufferLength += VarintUtils.unsignedIntByteLength(maxBitsPerDimension.length); bufferLength += maxBitsPerDimension.length * 4; final ByteBuffer buf = ByteBuffer.allocate(bufferLength); VarintUtils.writeUnsignedInt(baseDefinitions.length, buf); for (final byte[] dimensionDefBinary : dimensionDefBinaries) { VarintUtils.writeUnsignedInt(dimensionDefBinary.length, buf); buf.put(dimensionDefBinary); } VarintUtils.writeUnsignedInt(rasterStrategyBinary.length, buf); buf.put(rasterStrategyBinary); VarintUtils.writeUnsignedInt(maxBitsPerDimension.length, buf); for (final int dimBits : maxBitsPerDimension) { buf.putInt(dimBits); } return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int numDimensions = VarintUtils.readUnsignedInt(buf); baseDefinitions = new NumericDimensionDefinition[numDimensions]; for (int i = 0; i < numDimensions; i++) { final byte[] dim = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); baseDefinitions[i] = (NumericDimensionDefinition) PersistenceUtils.fromBinary(dim); } final int rasterStrategySize = VarintUtils.readUnsignedInt(buf); final byte[] rasterStrategyBinary = ByteArrayUtils.safeRead(buf, rasterStrategySize); rasterStrategy = (TieredSFCIndexStrategy) PersistenceUtils.fromBinary(rasterStrategyBinary); final int bitsPerDimensionLength = VarintUtils.readUnsignedInt(buf); maxBitsPerDimension = new int[bitsPerDimensionLength]; for (int i = 0; i < bitsPerDimensionLength; i++) { maxBitsPerDimension[i] = buf.getInt(); } init(baseDefinitions); } @Override public MultiDimensionalCoordinates getCoordinatesPerDimension( final byte[] partitionKey, final byte[] sortKey) { // select curve based on first byte final byte first = partitionKey[0]; Coordinate[] coordinates = null; if (first == pointCurveMultiDimensionalId) { coordinates = BinnedSFCUtils.getCoordinatesForId( ByteArrayUtils.combineArrays(partitionKey, sortKey == null ? null : sortKey), baseDefinitions, pointCurve); } else if (first == xzCurveMultiDimensionalId) { coordinates = BinnedSFCUtils.getCoordinatesForId( ByteArrayUtils.combineArrays(partitionKey, sortKey == null ? null : sortKey), baseDefinitions, xzCurve); } else { return rasterStrategy.getCoordinatesPerDimension(partitionKey, sortKey); } if (coordinates == null) { return null; } return new MultiDimensionalCoordinates(new byte[] {first}, coordinates); } @Override public MultiDimensionalCoordinateRanges[] getCoordinateRangesPerDimension( final MultiDimensionalNumericData dataRange, final IndexMetaData... hints) { final List coordRanges = new ArrayList<>(); final BinRange[][] binRangesPerDimension = BinnedNumericDataset.getBinnedRangesPerDimension(dataRange, baseDefinitions); rasterStrategy.calculateCoordinateRanges(coordRanges, binRangesPerDimension, hints); final XZHierarchicalIndexMetaData metaData = ((hints.length > 1) && (hints[1] != null) && (hints[1] instanceof XZHierarchicalIndexMetaData)) ? (XZHierarchicalIndexMetaData) hints[1] : null; if (metaData != null) { if (metaData.pointCurveCount > 0) { coordRanges.add( BinnedSFCUtils.getCoordinateRanges( binRangesPerDimension, pointCurve, baseDefinitions.length, pointCurveMultiDimensionalId)); } if (metaData.xzCurveCount > 0) { // XZ does not implement this and will return full ranges coordRanges.add( BinnedSFCUtils.getCoordinateRanges( binRangesPerDimension, xzCurve, baseDefinitions.length, xzCurveMultiDimensionalId)); } } return coordRanges.toArray(new MultiDimensionalCoordinateRanges[coordRanges.size()]); } @Override public NumericDimensionDefinition[] getOrderedDimensionDefinitions() { return baseDefinitions; } @Override public double[] getHighestPrecisionIdRangePerDimension() { return pointCurve.getInsertionIdRangePerDimension(); } @Override public int getPartitionKeyLength() { return byteOffsetFromDimensionIndex; } @Override public SubStrategy[] getSubStrategies() { return rasterStrategy.getSubStrategies(); } @Override public List createMetaData() { final List metaData = new ArrayList<>(); metaData.addAll(rasterStrategy.createMetaData()); metaData.add( new XZHierarchicalIndexMetaData(pointCurveMultiDimensionalId, xzCurveMultiDimensionalId)); return metaData; } public static class XZHierarchicalIndexMetaData implements IndexMetaData { private int pointCurveCount = 0; private int xzCurveCount = 0; private byte pointCurveMultiDimensionalId; private byte xzCurveMultiDimensionalId; public XZHierarchicalIndexMetaData() {} public XZHierarchicalIndexMetaData( final byte pointCurveMultiDimensionalId, final byte xzCurveMultiDimensionalId) { super(); this.pointCurveMultiDimensionalId = pointCurveMultiDimensionalId; this.xzCurveMultiDimensionalId = xzCurveMultiDimensionalId; } @Override public byte[] toBinary() { final ByteBuffer buffer = ByteBuffer.allocate( 2 + VarintUtils.unsignedIntByteLength(pointCurveCount) + VarintUtils.unsignedIntByteLength(xzCurveCount)); buffer.put(pointCurveMultiDimensionalId); buffer.put(xzCurveMultiDimensionalId); VarintUtils.writeUnsignedInt(pointCurveCount, buffer); VarintUtils.writeUnsignedInt(xzCurveCount, buffer); return buffer.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); pointCurveMultiDimensionalId = buffer.get(); xzCurveMultiDimensionalId = buffer.get(); pointCurveCount = VarintUtils.readUnsignedInt(buffer); xzCurveCount = VarintUtils.readUnsignedInt(buffer); } @Override public void merge(final Mergeable merge) { if (merge instanceof XZHierarchicalIndexMetaData) { final XZHierarchicalIndexMetaData other = (XZHierarchicalIndexMetaData) merge; pointCurveCount += other.pointCurveCount; xzCurveCount += other.xzCurveCount; } } @Override public String toString() { return "XZ Hierarchical Metadata[Point Curve Count:" + pointCurveCount + ", XZ Curve Count:" + xzCurveCount + "]"; } @Override public void insertionIdsAdded(final InsertionIds insertionIds) { for (final SinglePartitionInsertionIds partitionId : insertionIds.getPartitionKeys()) { final byte first = partitionId.getPartitionKey()[0]; if (first == pointCurveMultiDimensionalId) { pointCurveCount += partitionId.getSortKeys().size(); } else if (first == xzCurveMultiDimensionalId) { xzCurveCount += partitionId.getSortKeys().size(); } } } @Override public void insertionIdsRemoved(final InsertionIds insertionIds) { for (final SinglePartitionInsertionIds partitionId : insertionIds.getPartitionKeys()) { final byte first = partitionId.getPartitionKey()[0]; if (first == pointCurveMultiDimensionalId) { pointCurveCount -= partitionId.getSortKeys().size(); } else if (first == xzCurveMultiDimensionalId) { xzCurveCount -= partitionId.getSortKeys().size(); } } } /** Convert XZHierarchical Index Metadata statistics to a JSON object */ @Override public JSONObject toJSONObject() throws JSONException { final JSONObject jo = new JSONObject(); jo.put("type", "XZHierarchicalIndexStrategy"); jo.put("pointCurveMultiDimensionalId", pointCurveMultiDimensionalId); jo.put("xzCurveMultiDimensionalId", xzCurveMultiDimensionalId); jo.put("pointCurveCount", pointCurveCount); jo.put("xzCurveCount", xzCurveCount); return jo; } } @Override public byte[][] getInsertionPartitionKeys(final MultiDimensionalNumericData insertionData) { return IndexUtils.getInsertionPartitionKeys(this, insertionData); } @Override public byte[][] getQueryPartitionKeys( final MultiDimensionalNumericData queryData, final IndexMetaData... hints) { return IndexUtils.getQueryPartitionKeys(this, queryData, hints); } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/sfc/xz/XZOrderSFC.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.sfc.xz; import java.math.BigInteger; import java.nio.ByteBuffer; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Arrays; import java.util.BitSet; import java.util.List; import org.locationtech.geowave.core.index.ByteArrayRange; import org.locationtech.geowave.core.index.ByteArrayRange.MergeOperation; import org.locationtech.geowave.core.index.numeric.BasicNumericDataset; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.index.sfc.RangeDecomposition; import org.locationtech.geowave.core.index.sfc.SFCDimensionDefinition; import org.locationtech.geowave.core.index.sfc.SpaceFillingCurve; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class XZOrderSFC implements SpaceFillingCurve { private static final Logger LOGGER = LoggerFactory.getLogger(XZOrderSFC.class); private static double LOG_POINT_FIVE = Math.log(0.5); // the initial level of 2^dim tree private XElement[] LevelOneElements; // indicator that we have searched a full level of the 2^dim tree private XElement LevelTerminator; // TODO magic number; have to determine most appropriate value (12 seems to have potential // issues)? private static int g = 11; private SFCDimensionDefinition[] dimensionDefs; private int dimensionCount; private int nthPowerOfTwo; public XZOrderSFC() {} public XZOrderSFC(final SFCDimensionDefinition[] dimensionDefs) { this.dimensionDefs = dimensionDefs; init(); } private void init() { dimensionCount = dimensionDefs.length; nthPowerOfTwo = (int) Math.pow(2, dimensionCount); final double[] mins = new double[dimensionCount]; Arrays.fill(mins, 0.0); final double[] maxes = new double[dimensionCount]; Arrays.fill(maxes, 1.0); final double[] negativeOnes = new double[dimensionCount]; Arrays.fill(negativeOnes, -1.0); LevelOneElements = new XElement(mins, maxes, 1.0).children(); LevelTerminator = new XElement(negativeOnes, negativeOnes, 0.0); } @Override public byte[] getId(Double[] values) { if (values.length == dimensionCount) { // We have a point, not a bounding box int boxCount = 0; final Double[] boxedValues = new Double[dimensionCount * 2]; for (int i = 0; i < dimensionCount; i++) { boxedValues[boxCount++] = values[i]; boxedValues[boxCount++] = values[i]; } values = boxedValues; } if (values.length != (dimensionCount * 2)) { LOGGER.error( "Point or bounding box value count does not match number of indexed dimensions."); return null; } normalize(values); // calculate the length of the sequence code (section 4.1 of XZ-Ordering // paper) double maxDim = 0.0; for (int i = 0; (i + 1) < values.length; i++) { maxDim = Math.max(maxDim, Math.abs(values[i] - values[++i])); } // l1 (el-one) is a bit confusing to read, but corresponds with the // paper's definitions final int l1 = (int) Math.floor(Math.log(maxDim) / LOG_POINT_FIVE); // the length will either be (l1) or (l1 + 1) int length = g; if (l1 < g) { final double w2 = Math.pow(0.5, l1 + 1); // width of an element at // resolution l2 (l1 + 1) length = l1 + 1; for (int i = 0; (i + 1) < values.length; i++) { if (!predicate(values[i], values[++i], w2)) { length = l1; break; } } } final double[] minValues = new double[values.length / 2]; for (int i = 0; (i + 1) < values.length; i += 2) { minValues[i / 2] = values[i]; } return sequenceCode(minValues, length); } // predicate for checking how many axis the polygon intersects // math.floor(min / w2) * w2 == start of cell containing min private boolean predicate(final double min, final double max, final double w2) { return max <= ((Math.floor(min / w2) * w2) + (2 * w2)); } /** Normalize user space values to [0,1] */ private void normalize(final Double[] values) { for (int i = 0; i < values.length; i++) { values[i] = dimensionDefs[i / 2].normalize(values[i]); } } private byte[] sequenceCode(final double[] minValues, final int length) { final double[] minsPerDimension = new double[dimensionCount]; Arrays.fill(minsPerDimension, 0.0); final double[] maxesPerDimension = new double[dimensionCount]; Arrays.fill(maxesPerDimension, 1.0); long cs = 0L; for (int i = 0; i < length; i++) { final double[] centers = new double[dimensionCount]; for (int j = 0; j < dimensionCount; j++) { centers[j] = (minsPerDimension[j] + maxesPerDimension[j]) / 2.0; } final BitSet bits = new BitSet(dimensionCount); for (int j = dimensionCount - 1; j >= 0; j--) { if (minValues[j] >= centers[j]) { bits.set(j); } } long bTerm = 0L; final long[] longs = bits.toLongArray(); if (longs.length > 0) { bTerm = longs[0]; } cs += 1L + ((bTerm * (((long) (Math.pow(nthPowerOfTwo, g - i))) - 1L)) / ((long) nthPowerOfTwo - 1)); for (int j = 0; j < dimensionCount; j++) { if (minValues[j] < centers[j]) { maxesPerDimension[j] = centers[j]; } else { minsPerDimension[j] = centers[j]; } } } return ByteArrayUtils.longToByteArray(cs); } /** * An extended Z curve element. Bounds refer to the non-extended z element for simplicity of * calculation. * *

An extended Z element refers to a normal Z curve element that has its upper bounds expanded * by double its dimensions. By convention, an element is always an n-cube. */ private static class XElement { private final double[] minsPerDimension; private final double[] maxesPerDimension; private double length; private final Double[] extendedBounds; private XElement[] children; private final int dimensionCount; private final int nthPowerOfTwo; public XElement( final double[] minsPerDimension, final double[] maxesPerDimension, final double length) { this.minsPerDimension = minsPerDimension; this.maxesPerDimension = maxesPerDimension; this.length = length; dimensionCount = minsPerDimension.length; nthPowerOfTwo = (int) Math.pow(2, dimensionCount); extendedBounds = new Double[dimensionCount]; } public XElement(final XElement xElement) { this( Arrays.copyOf(xElement.minsPerDimension, xElement.minsPerDimension.length), Arrays.copyOf(xElement.maxesPerDimension, xElement.maxesPerDimension.length), xElement.length); } // lazy-evaluated extended bounds public double getExtendedBound(final int dimension) { if (extendedBounds[dimension] == null) { extendedBounds[dimension] = maxesPerDimension[dimension] + length; } return extendedBounds[dimension]; } public boolean isContained(final Double[] windowMins, final Double[] windowMaxes) { for (int i = 0; i < dimensionCount; i++) { if ((windowMins[i] > minsPerDimension[i]) || (windowMaxes[i] < getExtendedBound(i))) { return false; } } return true; } public boolean overlaps(final Double[] windowMins, final Double[] windowMaxes) { for (int i = 0; i < dimensionCount; i++) { if ((windowMaxes[i] < minsPerDimension[i]) || (windowMins[i] > getExtendedBound(i))) { return false; } } return true; } public XElement[] children() { if (children == null) { final double[] centers = new double[dimensionCount]; for (int i = 0; i < dimensionCount; i++) { centers[i] = (minsPerDimension[i] + maxesPerDimension[i]) / 2.0; } final double len = length / 2.0; children = new XElement[nthPowerOfTwo]; for (int i = 0; i < children.length; i++) { final XElement child = new XElement(this); child.length = len; String binaryString = Integer.toBinaryString(i); // pad or trim binary as necessary to match dimensionality // of curve int paddingCount = binaryString.length() - dimensionCount; if (paddingCount > 0) { binaryString = binaryString.substring(paddingCount); } else { while (paddingCount < 0) { binaryString = "0" + binaryString; paddingCount++; } } for (int j = 1; j <= dimensionCount; j++) { if (binaryString.charAt(j - 1) == '1') { child.minsPerDimension[dimensionCount - j] = centers[dimensionCount - j]; } else { child.maxesPerDimension[dimensionCount - j] = centers[dimensionCount - j]; } } children[i] = child; } } return children; } } @Override public RangeDecomposition decomposeRangeFully(final MultiDimensionalNumericData query) { return decomposeRange(query, true, -1); } @Override public RangeDecomposition decomposeRange( final MultiDimensionalNumericData query, final boolean overInclusiveOnEdge, final int maxRanges) { // normalize query values final Double[] queryMins = query.getMinValuesPerDimension(); final Double[] queryMaxes = query.getMaxValuesPerDimension(); for (int i = 0; i < dimensionCount; i++) { queryMins[i] = dimensionDefs[i].normalize(queryMins[i]); queryMaxes[i] = dimensionDefs[i].normalize(queryMaxes[i]); } // stores our results - initial size of 100 in general saves us some // re-allocation final ArrayList ranges = new ArrayList<>(100); // values remaining to process - initial size of 100 in general saves us // some re-allocation final ArrayDeque remaining = new ArrayDeque<>(100); // initial level for (final XElement levelOneEl : LevelOneElements) { remaining.add(levelOneEl); } remaining.add(LevelTerminator); // level of recursion short level = 1; while ((level < g) && !remaining.isEmpty() && ((maxRanges < 1) || (ranges.size() < maxRanges))) { final XElement next = remaining.poll(); if (next.equals(LevelTerminator)) { // we've fully processed a level, increment our state if (!remaining.isEmpty()) { level = (short) (level + 1); remaining.add(LevelTerminator); } } else { checkValue(next, level, queryMins, queryMaxes, ranges, remaining); } } // bottom out and get all the ranges that partially overlapped but we // didn't fully process while (!remaining.isEmpty()) { final XElement next = remaining.poll(); if (next.equals(LevelTerminator)) { level = (short) (level + 1); } else { final ByteArrayRange range = sequenceInterval(next.minsPerDimension, level, false); ranges.add(range); } } // we've got all our ranges - now reduce them down by merging // overlapping values // note: we don't bother reducing the ranges as in the XZ paper, as // accumulo handles lots of ranges fairly well final ArrayList result = (ArrayList) ByteArrayRange.mergeIntersections(ranges, MergeOperation.UNION); return new RangeDecomposition(result.toArray(new ByteArrayRange[result.size()])); } // checks a single value and either: // eliminates it as out of bounds // adds it to our results as fully matching, or // adds it to our results as partial matching and queues up it's children // for further processing private void checkValue( final XElement value, final Short level, final Double[] queryMins, final Double[] queryMaxes, final ArrayList ranges, final ArrayDeque remaining) { if (value.isContained(queryMins, queryMaxes)) { // whole range matches, happy day final ByteArrayRange range = sequenceInterval(value.minsPerDimension, level, false); ranges.add(range); } else if (value.overlaps(queryMins, queryMaxes)) { // some portion of this range is excluded // add the partial match and queue up each sub-range for processing final ByteArrayRange range = sequenceInterval(value.minsPerDimension, level, true); ranges.add(range); for (final XElement child : value.children()) { remaining.add(child); } } } /** * Computes an interval of sequence codes for a given point - for polygons this is the lower-left * corner. * * @param minsPerDimension normalized min values [0,1] per dimension * @param length length of the sequence code that will used as the basis for this interval * @param partial true if the element partially intersects the query window, false if it is fully * contained * @return */ private ByteArrayRange sequenceInterval( final double[] minsPerDimension, final short length, final boolean partial) { final byte[] min = sequenceCode(minsPerDimension, length); // if a partial match, we just use the single sequence code as an // interval // if a full match, we have to match all sequence codes starting with // the single sequence code byte[] max; if (partial) { max = min; } else { // from lemma 3 in the XZ-Ordering paper max = ByteArrayUtils.longToByteArray( ByteArrayUtils.byteArrayToLong(min) + ((((long) (Math.pow(nthPowerOfTwo, (g - length) + 1))) - 1L) / (nthPowerOfTwo - 1))); } return new ByteArrayRange(min, max); } @Override public byte[] toBinary() { final List dimensionDefBinaries = new ArrayList<>(dimensionDefs.length); int bufferLength = VarintUtils.unsignedIntByteLength(dimensionDefs.length); for (final SFCDimensionDefinition sfcDimension : dimensionDefs) { final byte[] sfcDimensionBinary = PersistenceUtils.toBinary(sfcDimension); bufferLength += (sfcDimensionBinary.length + VarintUtils.unsignedIntByteLength(sfcDimensionBinary.length)); dimensionDefBinaries.add(sfcDimensionBinary); } final ByteBuffer buf = ByteBuffer.allocate(bufferLength); VarintUtils.writeUnsignedInt(dimensionDefs.length, buf); for (final byte[] dimensionDefBinary : dimensionDefBinaries) { VarintUtils.writeUnsignedInt(dimensionDefBinary.length, buf); buf.put(dimensionDefBinary); } return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int numDimensions = VarintUtils.readUnsignedInt(buf); dimensionDefs = new SFCDimensionDefinition[numDimensions]; for (int i = 0; i < numDimensions; i++) { final byte[] dim = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); dimensionDefs[i] = (SFCDimensionDefinition) PersistenceUtils.fromBinary(dim); } init(); } @Override public double[] getInsertionIdRangePerDimension() { final double normalizedSize = Math.pow(0.5, g); final double[] rangesPerDimension = new double[dimensionCount]; for (int i = 0; i < dimensionCount; i++) { rangesPerDimension[i] = dimensionDefs[i].denormalize(normalizedSize); } return rangesPerDimension; } @Override public BigInteger getEstimatedIdCount(final MultiDimensionalNumericData data) { // TODO Replace hard-coded value with real implementation? return BigInteger.ONE; } // TODO Backwords (sfc-space to user-space) conversion?? @Override public MultiDimensionalNumericData getRanges(final byte[] id) { // use max range per dimension for now // to avoid false negatives final NumericData[] dataPerDimension = new NumericData[dimensionCount]; int i = 0; for (final SFCDimensionDefinition dim : dimensionDefs) { dataPerDimension[i++] = dim.getFullRange(); } return new BasicNumericDataset(dataPerDimension); } @Override public long[] getCoordinates(final byte[] id) { return null; } @Override public long[] normalizeRange(final double minValue, final double maxValue, final int dimension) { // TODO: This should actually be calculated return new long[] {Long.MIN_VALUE, Long.MAX_VALUE}; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/sfc/zorder/ZOrderSFC.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.sfc.zorder; import java.math.BigInteger; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.locationtech.geowave.core.index.ByteArrayRange; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.numeric.BasicNumericDataset; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.index.sfc.RangeDecomposition; import org.locationtech.geowave.core.index.sfc.SFCDimensionDefinition; import org.locationtech.geowave.core.index.sfc.SpaceFillingCurve; /** * Implementation of a ZOrder Space Filling Curve. Also called Morton, GeoHash, etc. */ public class ZOrderSFC implements SpaceFillingCurve { private SFCDimensionDefinition[] dimensionDefs; private int cardinalityPerDimension; private double binsPerDimension; public ZOrderSFC() { super(); } /** * Use the SFCFactory.createSpaceFillingCurve method - don't call this constructor directly */ public ZOrderSFC(final SFCDimensionDefinition[] dimensionDefs) { init(dimensionDefs); } private void init(final SFCDimensionDefinition[] dimensionDefs) { this.dimensionDefs = dimensionDefs; cardinalityPerDimension = 1; for (final SFCDimensionDefinition dimensionDef : dimensionDefs) { if (dimensionDef.getBitsOfPrecision() > cardinalityPerDimension) { cardinalityPerDimension = dimensionDef.getBitsOfPrecision(); } } binsPerDimension = Math.pow(2, cardinalityPerDimension); } /** * {@inheritDoc} */ @Override public byte[] getId(final Double[] values) { final double[] normalizedValues = new double[values.length]; for (int d = 0; d < values.length; d++) { normalizedValues[d] = dimensionDefs[d].normalize(values[d]); } return ZOrderUtils.encode(normalizedValues, cardinalityPerDimension, values.length); } @Override public MultiDimensionalNumericData getRanges(final byte[] id) { return new BasicNumericDataset( ZOrderUtils.decodeRanges(id, cardinalityPerDimension, dimensionDefs)); } @Override public long[] getCoordinates(final byte[] id) { return ZOrderUtils.decodeIndices(id, cardinalityPerDimension, dimensionDefs.length); } @Override public double[] getInsertionIdRangePerDimension() { final double[] retVal = new double[dimensionDefs.length]; for (int i = 0; i < dimensionDefs.length; i++) { retVal[i] = dimensionDefs[i].getRange() / binsPerDimension; } return retVal; } @Override public BigInteger getEstimatedIdCount(final MultiDimensionalNumericData data) { final Double[] mins = data.getMinValuesPerDimension(); final Double[] maxes = data.getMaxValuesPerDimension(); BigInteger estimatedIdCount = BigInteger.valueOf(1); for (int d = 0; d < data.getDimensionCount(); d++) { final double binMin = dimensionDefs[d].normalize(mins[d]) * binsPerDimension; final double binMax = dimensionDefs[d].normalize(maxes[d]) * binsPerDimension; estimatedIdCount = estimatedIdCount.multiply(BigInteger.valueOf((long) (Math.abs(binMax - binMin) + 1))); } return estimatedIdCount; } /** * {@inheritDoc} */ @Override public RangeDecomposition decomposeRange( final MultiDimensionalNumericData query, final boolean overInclusiveOnEdge, final int maxFilteredIndexedRanges) { // TODO: Because the research and benchmarking show Hilbert to // outperform Z-Order // the optimization of full query decomposition is not implemented at // the moment for Z-Order final Double[] queryMins = query.getMinValuesPerDimension(); final Double[] queryMaxes = query.getMaxValuesPerDimension(); final double[] normalizedMins = new double[query.getDimensionCount()]; final double[] normalizedMaxes = new double[query.getDimensionCount()]; for (int d = 0; d < query.getDimensionCount(); d++) { normalizedMins[d] = dimensionDefs[d].normalize(queryMins[d]); normalizedMaxes[d] = dimensionDefs[d].normalize(queryMaxes[d]); } final byte[] minZorder = ZOrderUtils.encode(normalizedMins, cardinalityPerDimension, query.getDimensionCount()); final byte[] maxZorder = ZOrderUtils.encode(normalizedMaxes, cardinalityPerDimension, query.getDimensionCount()); return new RangeDecomposition(new ByteArrayRange[] {new ByteArrayRange(minZorder, maxZorder)}); } /** * {@inheritDoc} */ @Override public RangeDecomposition decomposeRangeFully(final MultiDimensionalNumericData query) { return decomposeRange(query, true, -1); } @Override public byte[] toBinary() { final List dimensionDefBinaries = new ArrayList<>(dimensionDefs.length); int bufferLength = VarintUtils.unsignedIntByteLength(dimensionDefs.length); for (final SFCDimensionDefinition sfcDimension : dimensionDefs) { final byte[] sfcDimensionBinary = PersistenceUtils.toBinary(sfcDimension); bufferLength += (sfcDimensionBinary.length + VarintUtils.unsignedIntByteLength(sfcDimensionBinary.length)); dimensionDefBinaries.add(sfcDimensionBinary); } final ByteBuffer buf = ByteBuffer.allocate(bufferLength); VarintUtils.writeUnsignedInt(dimensionDefs.length, buf); for (final byte[] dimensionDefBinary : dimensionDefBinaries) { VarintUtils.writeUnsignedInt(dimensionDefBinary.length, buf); buf.put(dimensionDefBinary); } return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int numDimensions = VarintUtils.readUnsignedInt(buf); dimensionDefs = new SFCDimensionDefinition[numDimensions]; for (int i = 0; i < numDimensions; i++) { final byte[] dim = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); dimensionDefs[i] = (SFCDimensionDefinition) PersistenceUtils.fromBinary(dim); } init(dimensionDefs); } @Override public int hashCode() { final int prime = 31; int result = 1; final String className = getClass().getName(); result = (prime * result) + ((className == null) ? 0 : className.hashCode()); result = (prime * result) + Arrays.hashCode(dimensionDefs); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final ZOrderSFC other = (ZOrderSFC) obj; if (!Arrays.equals(dimensionDefs, other.dimensionDefs)) { return false; } return true; } @Override public long[] normalizeRange(final double minValue, final double maxValue, final int d) { return new long[] { (long) (dimensionDefs[d].normalize(minValue) * binsPerDimension), (long) (dimensionDefs[d].normalize(maxValue) * binsPerDimension)}; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/sfc/zorder/ZOrderUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.sfc.zorder; import java.util.Arrays; import java.util.BitSet; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.index.sfc.SFCDimensionDefinition; /** * Convenience methods used to decode/encode Z-Order space filling curve values (using a simple * bit-interleaving approach). */ public class ZOrderUtils { public static NumericRange[] decodeRanges( final byte[] bytes, final int bitsPerDimension, final SFCDimensionDefinition[] dimensionDefinitions) { final byte[] littleEndianBytes = swapEndianFormat(bytes); final BitSet bitSet = BitSet.valueOf(littleEndianBytes); final NumericRange[] normalizedValues = new NumericRange[dimensionDefinitions.length]; for (int d = 0; d < dimensionDefinitions.length; d++) { final BitSet dimensionSet = new BitSet(); int j = 0; for (int i = d; i < (bitsPerDimension * dimensionDefinitions.length); i += dimensionDefinitions.length) { dimensionSet.set(j++, bitSet.get(i)); } normalizedValues[d] = decode(dimensionSet, 0, 1, dimensionDefinitions[d]); } return normalizedValues; } public static long[] decodeIndices( final byte[] bytes, final int bitsPerDimension, final int numDimensions) { final byte[] littleEndianBytes = swapEndianFormat(bytes); final BitSet bitSet = BitSet.valueOf(littleEndianBytes); final long[] coordinates = new long[numDimensions]; final long rangePerDimension = (long) Math.pow(2, bitsPerDimension); for (int d = 0; d < numDimensions; d++) { final BitSet dimensionSet = new BitSet(); int j = 0; for (int i = d; i < (bitsPerDimension * numDimensions); i += numDimensions) { dimensionSet.set(j++, bitSet.get(i)); } coordinates[d] = decodeIndex(dimensionSet, rangePerDimension); } return coordinates; } private static long decodeIndex(final BitSet bs, final long rangePerDimension) { long floor = 0; long ceiling = rangePerDimension; long mid = 0; for (int i = 0; i < bs.length(); i++) { mid = (floor + ceiling) / 2; if (bs.get(i)) { floor = mid; } else { ceiling = mid; } } return mid; } private static NumericRange decode( final BitSet bs, double floor, double ceiling, final SFCDimensionDefinition dimensionDefinition) { double mid = 0; for (int i = 0; i < bs.length(); i++) { mid = (floor + ceiling) / 2; if (bs.get(i)) { floor = mid; } else { ceiling = mid; } } return new NumericRange( dimensionDefinition.denormalize(floor), dimensionDefinition.denormalize(ceiling)); } public static byte[] encode( final double[] normalizedValues, final int bitsPerDimension, final int numDimensions) { final BitSet[] bitSets = new BitSet[numDimensions]; for (int d = 0; d < numDimensions; d++) { bitSets[d] = getBits(normalizedValues[d], 0, 1, bitsPerDimension); } final int usedBits = bitsPerDimension * numDimensions; final int usedBytes = (int) Math.ceil(usedBits / 8.0); final int bitsetLength = (usedBytes * 8); final int bitOffset = bitsetLength - usedBits; // round up to a bitset divisible by 8 final BitSet combinedBitSet = new BitSet(bitsetLength); int j = bitOffset; for (int i = 0; i < bitsPerDimension; i++) { for (int d = 0; d < numDimensions; d++) { combinedBitSet.set(j++, bitSets[d].get(i)); } } final byte[] littleEndianBytes = combinedBitSet.toByteArray(); final byte[] retVal = swapEndianFormat(littleEndianBytes); if (retVal.length < usedBytes) { return Arrays.copyOf(retVal, usedBytes); } return retVal; } public static byte[] swapEndianFormat(final byte[] b) { final byte[] endianSwappedBytes = new byte[b.length]; for (int i = 0; i < b.length; i++) { endianSwappedBytes[i] = swapEndianFormat(b[i]); } return endianSwappedBytes; } private static byte swapEndianFormat(final byte b) { int converted = 0x00; converted ^= (b & 0b1000_0000) >> 7; converted ^= (b & 0b0100_0000) >> 5; converted ^= (b & 0b0010_0000) >> 3; converted ^= (b & 0b0001_0000) >> 1; converted ^= (b & 0b0000_1000) << 1; converted ^= (b & 0b0000_0100) << 3; converted ^= (b & 0b0000_0010) << 5; converted ^= (b & 0b0000_0001) << 7; return (byte) (converted & 0xFF); } private static BitSet getBits( final double value, double floor, double ceiling, final int bitsPerDimension) { final BitSet buffer = new BitSet(bitsPerDimension); for (int i = 0; i < bitsPerDimension; i++) { final double mid = (floor + ceiling) / 2; if (value >= mid) { buffer.set(i); floor = mid; } else { ceiling = mid; } } return buffer; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/simple/HashKeyIndexStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.simple; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Collections; import java.util.List; import org.locationtech.geowave.core.index.IndexMetaData; import org.locationtech.geowave.core.index.PartitionIndexStrategy; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; /** * Used to create determined, uniform row id prefix as one possible approach to prevent hot * spotting. * *

Before using this class, one should consider balancing options for the specific data store. * Can one pre-split using a component of another index strategy (e.g. bin identifier)? How about * ingest first and then do major compaction? * *

Consider that Accumulo 1.7 supports two balancers * org.apache.accumulo.server.master.balancer.RegexGroupBalancer and * org.apache.accumulo.server.master.balancer.GroupBalancer. * *

This class should be used with a CompoundIndexStrategy. In addition, tablets should be * pre-split on the number of prefix IDs. Without splits, the splits are at the mercy of the Big * Table servers default. For example, Accumulo fills up one tablet before splitting, regardless of * the partitioning. * *

The key set size does not need to be large. For example, using two times the number of tablet * servers (for growth) and presplitting, two keys per server. The default is 3. * *

There is a cost to using this approach: queries must span all prefixes. The number of * prefixes should initially be at least the number of tablet servers. */ public class HashKeyIndexStrategy implements PartitionIndexStrategy { private byte[][] keys; public HashKeyIndexStrategy() { this(3); } public HashKeyIndexStrategy(final int size) { init(size); } private void init(final int size) { keys = new byte[size][]; if (size > 256) { final ByteBuffer buf = ByteBuffer.allocate(4); for (int i = 0; i < size; i++) { buf.putInt(i); keys[i] = Arrays.copyOf(buf.array(), 4); buf.rewind(); } } else { for (int i = 0; i < size; i++) { keys[i] = new byte[] {(byte) i}; } } } @Override public String getId() { return StringUtils.intToString(hashCode()); } @Override public byte[] toBinary() { final ByteBuffer buf = ByteBuffer.allocate(VarintUtils.unsignedIntByteLength(keys.length)); VarintUtils.writeUnsignedInt(keys.length, buf); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); init(VarintUtils.readUnsignedInt(buf)); } public byte[][] getPartitionKeys() { return keys; } @Override public int getPartitionKeyLength() { if ((keys != null) && (keys.length > 0)) { return keys[0].length; } return 0; } @Override public List createMetaData() { return Collections.emptyList(); } /** Returns an insertion id selected round-robin from a predefined pool */ @Override public byte[][] getInsertionPartitionKeys(final MultiDimensionalNumericData insertionData) { final long hashCode; if (insertionData.isEmpty()) { hashCode = insertionData.hashCode(); } else { hashCode = Arrays.hashCode(insertionData.getMaxValuesPerDimension()) + (31 * Arrays.hashCode(insertionData.getMinValuesPerDimension())); } final int position = (int) (Math.abs(hashCode) % keys.length); return new byte[][] {keys[position]}; } /** always return all keys */ @Override public byte[][] getQueryPartitionKeys( final MultiDimensionalNumericData queryData, final IndexMetaData... hints) { return getPartitionKeys(); } @Override public byte[][] getPredefinedSplits() { return getPartitionKeys(); } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/simple/RoundRobinKeyIndexStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.simple; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Collections; import java.util.List; import org.locationtech.geowave.core.index.IndexMetaData; import org.locationtech.geowave.core.index.PartitionIndexStrategy; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; /** * Used to create determined, uniform row id prefix as one possible approach to prevent hot * spotting. * *

Before using this class, one should consider balancing options for the specific data store. * Can one pre-split using a component of another index strategy (e.g. bin identifier)? How about * ingest first and then do major compaction? * *

Consider that Accumulo 1.7 supports two balancers * org.apache.accumulo.server.master.balancer.RegexGroupBalancer and * org.apache.accumulo.server.master.balancer.GroupBalancer. * *

This class should be used with a CompoundIndexStrategy. In addition, tablets should be * pre-split on the number of prefix IDs. Without splits, the splits are at the mercy of the Big * Table servers default. For example, Accumulo fills up one tablet before splitting, regardless of * the partitioning. * *

The key set size does not need to be large. For example, using two times the number of tablet * servers (for growth) and presplitting, two keys per server. The default is 3. * *

There is a cost to using this approach: queries must span all prefixes. The number of * prefixes should initially be at least the number of tablet servers. */ public class RoundRobinKeyIndexStrategy implements PartitionIndexStrategy { private byte[][] keys; public int position = 0; /** Default initial key set size is 3. */ public RoundRobinKeyIndexStrategy() { init(3); } public RoundRobinKeyIndexStrategy(final int size) { init(size); } private void init(final int size) { keys = new byte[size][]; if (size > 256) { final ByteBuffer buf = ByteBuffer.allocate(4); for (int i = 0; i < size; i++) { buf.putInt(i); keys[i] = Arrays.copyOf(buf.array(), 4); buf.rewind(); } } else { for (int i = 0; i < size; i++) { keys[i] = new byte[] {(byte) i}; } } } @Override public String getId() { return StringUtils.intToString(hashCode()); } @Override public byte[] toBinary() { final ByteBuffer buf = ByteBuffer.allocate(VarintUtils.unsignedIntByteLength(keys.length)); VarintUtils.writeUnsignedInt(keys.length, buf); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); init(VarintUtils.readUnsignedInt(buf)); } public byte[][] getPartitionKeys() { return keys; } @Override public int getPartitionKeyLength() { if ((keys != null) && (keys.length > 0)) { return keys[0].length; } return 0; } @Override public List createMetaData() { return Collections.emptyList(); } @Override public byte[][] getInsertionPartitionKeys(final MultiDimensionalNumericData insertionData) { position = (position + 1) % keys.length; return new byte[][] {keys[position]}; } @Override public byte[][] getQueryPartitionKeys( final MultiDimensionalNumericData queryData, final IndexMetaData... hints) { return getPartitionKeys(); } @Override public byte[][] getPredefinedSplits() { return getPartitionKeys(); } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/simple/SimpleByteIndexStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.simple; import org.locationtech.geowave.core.index.lexicoder.Lexicoders; /** * A simple 1-dimensional NumericIndexStrategy that represents an index of signed short values. The * strategy doesn't use any binning. The ids are simply the byte arrays of the value. This index * strategy will not perform well for inserting ranges because there will be too much replication of * data. */ public class SimpleByteIndexStrategy extends SimpleNumericIndexStrategy { public SimpleByteIndexStrategy() { super(Lexicoders.BYTE); } @Override protected Byte cast(final double value) { return (byte) value; } @Override protected boolean isInteger() { return true; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/simple/SimpleDoubleIndexStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.simple; import org.locationtech.geowave.core.index.lexicoder.Lexicoders; public class SimpleDoubleIndexStrategy extends SimpleNumericIndexStrategy { public SimpleDoubleIndexStrategy() { super(Lexicoders.DOUBLE); } @Override protected Double cast(final double value) { return value; } @Override protected boolean isInteger() { return false; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/simple/SimpleFloatIndexStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.simple; import org.locationtech.geowave.core.index.lexicoder.Lexicoders; public class SimpleFloatIndexStrategy extends SimpleNumericIndexStrategy { public SimpleFloatIndexStrategy() { super(Lexicoders.FLOAT); } @Override protected Float cast(final double value) { return (float) value; } @Override protected boolean isInteger() { return false; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/simple/SimpleIntegerIndexStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.simple; import org.locationtech.geowave.core.index.lexicoder.Lexicoders; /** * A simple 1-dimensional NumericIndexStrategy that represents an index of signed integer values. * The strategy doesn't use any binning. The ids are simply the byte arrays of the value. This index * strategy will not perform well for inserting ranges because there will be too much replication of * data. */ public class SimpleIntegerIndexStrategy extends SimpleNumericIndexStrategy { public SimpleIntegerIndexStrategy() { super(Lexicoders.INT); } @Override protected Integer cast(final double value) { return (int) value; } @Override protected boolean isInteger() { return true; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/simple/SimpleLongIndexStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.simple; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.lexicoder.Lexicoders; /** * A simple 1-dimensional NumericIndexStrategy that represents an index of signed long values. The * strategy doesn't use any binning. The ids are simply the byte arrays of the value. This index * strategy will not perform well for inserting ranges because there will be too much replication of * data. */ public class SimpleLongIndexStrategy extends SimpleNumericIndexStrategy { public SimpleLongIndexStrategy() { super(Lexicoders.LONG); } public SimpleLongIndexStrategy(final NumericDimensionDefinition definition) { super(Lexicoders.LONG, new NumericDimensionDefinition[] {definition}); } @Override protected Long cast(final double value) { return (long) value; } @Override protected boolean isInteger() { return true; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/simple/SimpleNumericIndexStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.simple; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import org.locationtech.geowave.core.index.ByteArrayRange; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.Coordinate; import org.locationtech.geowave.core.index.IndexMetaData; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.MultiDimensionalCoordinateRanges; import org.locationtech.geowave.core.index.MultiDimensionalCoordinates; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.index.QueryRanges; import org.locationtech.geowave.core.index.SinglePartitionQueryRanges; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.dimension.BasicDimensionDefinition; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.lexicoder.NumberLexicoder; import org.locationtech.geowave.core.index.numeric.BasicNumericDataset; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericValue; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A simple 1-dimensional NumericIndexStrategy that represents an index of signed integer values * (currently supports 16 bit, 32 bit, and 64 bit integers). The strategy doesn't use any binning. * The ids are simply the byte arrays of the value. This index strategy will not perform well for * inserting ranges because there will be too much replication of data. */ public abstract class SimpleNumericIndexStrategy implements NumericIndexStrategy { private static final Logger LOGGER = LoggerFactory.getLogger(SimpleNumericIndexStrategy.class); private final NumberLexicoder lexicoder; private final NumericDimensionDefinition[] definitions; protected SimpleNumericIndexStrategy(final NumberLexicoder lexicoder) { this( lexicoder, new NumericDimensionDefinition[] { new BasicDimensionDefinition( lexicoder.getMinimumValue().doubleValue(), lexicoder.getMaximumValue().doubleValue())}); } protected SimpleNumericIndexStrategy( final NumberLexicoder lexicoder, final NumericDimensionDefinition[] definitions) { this.lexicoder = lexicoder; this.definitions = definitions; } public NumberLexicoder getLexicoder() { return lexicoder; } /** * Cast a double into the type T * * @param value a double value * @return the value represented as a T */ protected abstract T cast(double value); /** * Checks whehter * * @return the value represented as a T */ protected abstract boolean isInteger(); /** * Always returns a single range since this is a 1-dimensional index. The sort-order of the bytes * is the same as the sort order of values, so an indexedRange can be represented by a single * contiguous ByteArrayRange. {@inheritDoc} */ @Override public QueryRanges getQueryRanges( final MultiDimensionalNumericData indexedRange, final IndexMetaData... hints) { return getQueryRanges(indexedRange, -1, hints); } /** * Always returns a single range since this is a 1-dimensional index. The sort-order of the bytes * is the same as the sort order of values, so an indexedRange can be represented by a single * contiguous ByteArrayRange. {@inheritDoc} */ @Override public QueryRanges getQueryRanges( final MultiDimensionalNumericData indexedRange, final int maxEstimatedRangeDecomposition, final IndexMetaData... hints) { final T min = cast(indexedRange.getDataPerDimension()[0].getMin()); byte[] start = lexicoder.toByteArray(min); final T max = cast( isInteger() ? Math.ceil(indexedRange.getDataPerDimension()[0].getMax()) : indexedRange.getMaxValuesPerDimension()[0]); byte[] end = lexicoder.toByteArray(max); if (!indexedRange.getDataPerDimension()[0].isMinInclusive()) { start = ByteArrayUtils.getNextPrefix(start); } if (!indexedRange.getDataPerDimension()[0].isMaxInclusive()) { end = ByteArrayUtils.getPreviousPrefix(end); } final ByteArrayRange range = new ByteArrayRange(start, end); final SinglePartitionQueryRanges partitionRange = new SinglePartitionQueryRanges(Collections.singletonList(range)); return new QueryRanges(Collections.singletonList(partitionRange)); } /** * Returns all of the insertion ids for the range. Since this index strategy doensn't use binning, * it will return the ByteArrayId of every value in the range (i.e. if you are storing a range * using this index strategy, your data will be replicated for every integer value in the range). * *

{@inheritDoc} */ @Override public InsertionIds getInsertionIds(final MultiDimensionalNumericData indexedData) { return getInsertionIds(indexedData, -1); } /** * Returns all of the insertion ids for the range. Since this index strategy doensn't use binning, * it will return the ByteArrayId of every value in the range (i.e. if you are storing a range * using this index strategy, your data will be replicated for every integer value in the range). * *

{@inheritDoc} */ @Override public InsertionIds getInsertionIds( final MultiDimensionalNumericData indexedData, final int maxEstimatedDuplicateIds) { if (indexedData.isEmpty()) { LOGGER.warn("Cannot index empty fields, skipping writing row to index '" + getId() + "'"); return new InsertionIds(); } final double min = indexedData.getMinValuesPerDimension()[0]; final double max = indexedData.getMaxValuesPerDimension()[0]; final List insertionIds = new ArrayList<>((int) (max - min) + 1); for (double i = min; i <= max; i++) { insertionIds.add(lexicoder.toByteArray(cast(i))); } return new InsertionIds(insertionIds); } @Override public NumericDimensionDefinition[] getOrderedDimensionDefinitions() { return definitions; } @Override public MultiDimensionalNumericData getRangeForId( final byte[] partitionKey, final byte[] sortKey) { final double value = lexicoder.fromByteArray(sortKey).doubleValue(); final NumericData[] dataPerDimension = new NumericData[] {new NumericValue(value)}; return new BasicNumericDataset(dataPerDimension); } @Override public MultiDimensionalCoordinates getCoordinatesPerDimension( final byte[] partitionKey, final byte[] sortKey) { return new MultiDimensionalCoordinates( null, new Coordinate[] {new Coordinate(lexicoder.fromByteArray(sortKey).longValue(), null)}); } @Override public MultiDimensionalCoordinateRanges[] getCoordinateRangesPerDimension( final MultiDimensionalNumericData dataRange, final IndexMetaData... hints) { return null; } @Override public double[] getHighestPrecisionIdRangePerDimension() { return new double[] {1d}; } @Override public String getId() { return StringUtils.intToString(hashCode()); } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + Arrays.hashCode(definitions); result = (prime * result) + ((lexicoder == null) ? 0 : lexicoder.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final SimpleNumericIndexStrategy other = (SimpleNumericIndexStrategy) obj; if (!Arrays.equals(definitions, other.definitions)) { return false; } if (lexicoder == null) { if (other.lexicoder != null) { return false; } } else if (!lexicoder.equals(other.lexicoder)) { return false; } return true; } @Override public List createMetaData() { return Collections.emptyList(); } @Override public int getPartitionKeyLength() { return 0; } @Override public byte[][] getInsertionPartitionKeys(final MultiDimensionalNumericData insertionData) { return null; } @Override public byte[][] getQueryPartitionKeys( final MultiDimensionalNumericData queryData, final IndexMetaData... hints) { return null; } @Override public byte[] toBinary() { return new byte[] {}; } @Override public void fromBinary(final byte[] bytes) {} } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/simple/SimpleShortIndexStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.simple; import org.locationtech.geowave.core.index.lexicoder.Lexicoders; /** * A simple 1-dimensional NumericIndexStrategy that represents an index of signed short values. The * strategy doesn't use any binning. The ids are simply the byte arrays of the value. This index * strategy will not perform well for inserting ranges because there will be too much replication of * data. */ public class SimpleShortIndexStrategy extends SimpleNumericIndexStrategy { public SimpleShortIndexStrategy() { super(Lexicoders.SHORT); } @Override protected Short cast(final double value) { return (short) value; } @Override protected boolean isInteger() { return true; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/text/BasicTextDataset.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.text; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.PersistenceUtils; /** * The Basic Index Result class creates an object associated with a generic query. This class can be * used when the dimensions and/or axis are generic. */ public class BasicTextDataset implements MultiDimensionalTextData { private TextData[] dataPerDimension; /** Open ended/unconstrained */ public BasicTextDataset() { dataPerDimension = new TextData[0]; } /** * Constructor used to create a new Basic Text Dataset object. * * @param dataPerDimension an array of text data objects */ public BasicTextDataset(final TextData[] dataPerDimension) { this.dataPerDimension = dataPerDimension; } /** @return all of the maximum values (for each dimension) */ @Override public String[] getMaxValuesPerDimension() { final TextData[] ranges = getDataPerDimension(); final String[] maxPerDimension = new String[ranges.length]; for (int d = 0; d < ranges.length; d++) { maxPerDimension[d] = ranges[d].getMax(); } return maxPerDimension; } /** @return all of the minimum values (for each dimension) */ @Override public String[] getMinValuesPerDimension() { final TextData[] ranges = getDataPerDimension(); final String[] minPerDimension = new String[ranges.length]; for (int d = 0; d < ranges.length; d++) { minPerDimension[d] = ranges[d].getMin(); } return minPerDimension; } /** @return all of the centroid values (for each dimension) */ @Override public String[] getCentroidPerDimension() { final TextData[] ranges = getDataPerDimension(); final String[] centroid = new String[ranges.length]; for (int d = 0; d < ranges.length; d++) { centroid[d] = ranges[d].getCentroid(); } return centroid; } /** @return an array of NumericData objects */ @Override public TextData[] getDataPerDimension() { return dataPerDimension; } /** @return the number of dimensions associated with this data set */ @Override public int getDimensionCount() { return dataPerDimension.length; } @Override public boolean isEmpty() { if ((dataPerDimension == null) || (dataPerDimension.length == 0)) { return true; } return !Arrays.stream(dataPerDimension).noneMatch(d -> d == null); } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + Arrays.hashCode(dataPerDimension); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final BasicTextDataset other = (BasicTextDataset) obj; if (!Arrays.equals(dataPerDimension, other.dataPerDimension)) { return false; } return true; } @Override public byte[] toBinary() { int totalBytes = VarintUtils.unsignedIntByteLength(dataPerDimension.length); final List serializedData = new ArrayList<>(); for (final TextData data : dataPerDimension) { final byte[] binary = PersistenceUtils.toBinary(data); totalBytes += (binary.length + VarintUtils.unsignedIntByteLength(binary.length)); serializedData.add(binary); } final ByteBuffer buf = ByteBuffer.allocate(totalBytes); VarintUtils.writeUnsignedInt(dataPerDimension.length, buf); for (final byte[] binary : serializedData) { VarintUtils.writeUnsignedInt(binary.length, buf); buf.put(binary); } return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int numDimensions = VarintUtils.readUnsignedInt(buf); dataPerDimension = new TextData[numDimensions]; for (int d = 0; d < numDimensions; d++) { final byte[] binary = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); dataPerDimension[d] = (TextData) PersistenceUtils.fromBinary(binary); } } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/text/CaseSensitivity.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.text; public enum CaseSensitivity { CASE_SENSITIVE, CASE_INSENSITIVE } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/text/EnumIndexStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.text; import java.nio.ByteBuffer; import java.util.Arrays; import org.locationtech.geowave.core.index.ByteArrayRange; import org.locationtech.geowave.core.index.CustomIndexStrategy; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.QueryRanges; import org.locationtech.geowave.core.index.SinglePartitionInsertionIds; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class EnumIndexStrategy implements CustomIndexStrategy { private static Logger LOGGER = LoggerFactory.getLogger(EnumIndexStrategy.class); private String[] exactMatchTerms; private TextIndexEntryConverter converter; public EnumIndexStrategy() {} public EnumIndexStrategy( final TextIndexEntryConverter converter, final String[] exactMatchTerms) { super(); this.converter = converter; Arrays.sort(exactMatchTerms); this.exactMatchTerms = exactMatchTerms; } @Override public Class getConstraintsClass() { return EnumSearch.class; } @Override public byte[] toBinary() { final byte[] converterBytes = PersistenceUtils.toBinary(converter); final byte[] termsBytes = StringUtils.stringsToBinary(exactMatchTerms); final ByteBuffer buf = ByteBuffer.allocate( VarintUtils.unsignedIntByteLength(termsBytes.length) + termsBytes.length + converterBytes.length); VarintUtils.writeUnsignedInt(termsBytes.length, buf); buf.put(termsBytes); buf.put(converterBytes); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { fromBinary(ByteBuffer.wrap(bytes)); } protected void fromBinary(final ByteBuffer buf) { final byte[] termsBytes = new byte[VarintUtils.readUnsignedInt(buf)]; buf.get(termsBytes); exactMatchTerms = StringUtils.stringsFromBinary(termsBytes); final byte[] converterBytes = new byte[buf.remaining()]; buf.get(converterBytes); converter = (TextIndexEntryConverter) PersistenceUtils.fromBinary(converterBytes); } @Override public InsertionIds getInsertionIds(final E entry) { final String str = entryToString(entry); if (str == null) { LOGGER.warn("Cannot index null enum, skipping entry"); return new InsertionIds(); } final int index = Arrays.binarySearch(exactMatchTerms, str); if (index < 0) { LOGGER.warn("Enumerated value not found for insertion '" + str + "'"); return new InsertionIds(); } return new InsertionIds( new SinglePartitionInsertionIds(null, VarintUtils.writeUnsignedInt(index))); } @Override public QueryRanges getQueryRanges(final EnumSearch constraints) { final int index = Arrays.binarySearch(exactMatchTerms, constraints.getSearchTerm()); final byte[] sortKey = VarintUtils.writeUnsignedInt(index); if (index < 0) { LOGGER.warn("Enumerated value not found for search '" + constraints.getSearchTerm() + "'"); // the sort key shouldn't match so let's pass through (alternatives to giving an unused sort // key such as null or empty queries result in all rows) } return new QueryRanges(new ByteArrayRange(sortKey, sortKey)); } protected String entryToString(final E entry) { return converter.apply(entry); } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/text/EnumSearch.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.text; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.persist.Persistable; public class EnumSearch implements Persistable { private String searchTerm; public EnumSearch() {} public EnumSearch(final String searchTerm) { this.searchTerm = searchTerm; } public String getSearchTerm() { return searchTerm; } @Override public byte[] toBinary() { return StringUtils.stringToBinary(searchTerm); } @Override public void fromBinary(final byte[] bytes) { searchTerm = StringUtils.stringFromBinary(bytes); } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/text/ExplicitTextSearch.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.text; import java.util.EnumSet; import java.util.List; import org.locationtech.geowave.core.index.QueryRanges; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import com.google.common.collect.Lists; /** * Explicitly queries a set of text ranges. */ public class ExplicitTextSearch implements TextConstraints { private List indexData; public ExplicitTextSearch() {} public ExplicitTextSearch(final List indexData) { this.indexData = indexData; } @Override public byte[] toBinary() { return PersistenceUtils.toBinary(indexData); } @SuppressWarnings({"unchecked", "rawtypes"}) @Override public void fromBinary(final byte[] bytes) { indexData = (List) PersistenceUtils.fromBinaryAsList(bytes); } @Override public QueryRanges getQueryRanges( final EnumSet supportedSearchTypes, final int nCharacterGrams) { final List ranges = Lists.newArrayListWithCapacity(indexData.size()); for (final MultiDimensionalTextData data : indexData) { ranges.add(TextIndexUtils.getQueryRanges(data)); } if (ranges.size() == 1) { return ranges.get(0); } return new QueryRanges(ranges); } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/text/MultiDimensionalTextData.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.text; import org.locationtech.geowave.core.index.MultiDimensionalIndexData; /** Interface which defines the methods associated with a multi-dimensional text data range. */ public interface MultiDimensionalTextData extends MultiDimensionalIndexData { /** @return an array of object QueryRange */ @Override public TextData[] getDataPerDimension(); } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/text/TextConstraints.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.text; import java.util.EnumSet; import org.locationtech.geowave.core.index.QueryRanges; import org.locationtech.geowave.core.index.persist.Persistable; /** * Provides QueryRanges for queries on text indices. */ public interface TextConstraints extends Persistable { QueryRanges getQueryRanges( final EnumSet supportedSearchTypes, final int nCharacterGrams); } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/text/TextData.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.text; import org.locationtech.geowave.core.index.IndexData; /** Interface used to define text data ranges. */ public interface TextData extends IndexData { boolean isCaseSensitive(); boolean isReversed(); } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/text/TextIndexEntryConverter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.text; import java.util.function.Function; import org.locationtech.geowave.core.index.persist.Persistable; public interface TextIndexEntryConverter extends Function, Persistable { } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/text/TextIndexStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.text; import java.nio.ByteBuffer; import java.util.EnumSet; import org.locationtech.geowave.core.index.CustomIndexStrategy; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.QueryRanges; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.PersistenceUtils; public class TextIndexStrategy implements CustomIndexStrategy { private EnumSet supportedSearchTypes; private EnumSet supportedCaseSensitivity; private TextIndexEntryConverter converter; private int nCharacterGrams; public TextIndexStrategy() {} public TextIndexStrategy(final TextIndexEntryConverter converter) { this(EnumSet.allOf(TextSearchType.class), EnumSet.allOf(CaseSensitivity.class), converter); } public TextIndexStrategy( final EnumSet supportedSearchTypes, final EnumSet caseSensitivity, final TextIndexEntryConverter converter) { this(supportedSearchTypes, caseSensitivity, 3, converter); } public TextIndexStrategy( final EnumSet supportedSearchTypes, final EnumSet supportedCaseSensitivity, final int nCharacterGrams, final TextIndexEntryConverter converter) { super(); this.supportedSearchTypes = supportedSearchTypes; this.supportedCaseSensitivity = supportedCaseSensitivity; this.nCharacterGrams = nCharacterGrams; this.converter = converter; } public TextIndexEntryConverter getEntryConverter() { return converter; } public boolean isSupported(final TextSearchType searchType) { return supportedSearchTypes.contains(searchType); } public boolean isSupported(final CaseSensitivity caseSensitivity) { return supportedCaseSensitivity.contains(caseSensitivity); } @Override public byte[] toBinary() { final int encodedType = encodeType(supportedSearchTypes); final int encodedCase = encodeCaseSensitivity(supportedCaseSensitivity); final byte[] converterBytes = PersistenceUtils.toBinary(converter); final ByteBuffer buf = ByteBuffer.allocate( VarintUtils.unsignedIntByteLength(encodedType) + VarintUtils.unsignedIntByteLength(encodedCase) + VarintUtils.unsignedIntByteLength(nCharacterGrams) + converterBytes.length); VarintUtils.writeUnsignedInt(encodedType, buf); VarintUtils.writeUnsignedInt(encodedCase, buf); VarintUtils.writeUnsignedInt(nCharacterGrams, buf); buf.put(converterBytes); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { fromBinary(ByteBuffer.wrap(bytes)); } @SuppressWarnings("unchecked") protected void fromBinary(final ByteBuffer buf) { supportedSearchTypes = decodeType(VarintUtils.readUnsignedInt(buf)); supportedCaseSensitivity = decodeCaseSensitivity(VarintUtils.readUnsignedInt(buf)); nCharacterGrams = VarintUtils.readUnsignedInt(buf); final byte[] converterBytes = new byte[buf.remaining()]; buf.get(converterBytes); converter = (TextIndexEntryConverter) PersistenceUtils.fromBinary(converterBytes); } @Override public InsertionIds getInsertionIds(final E entry) { return TextIndexUtils.getInsertionIds( entryToString(entry), supportedSearchTypes, supportedCaseSensitivity, nCharacterGrams); } @Override public QueryRanges getQueryRanges(final TextConstraints constraints) { return constraints.getQueryRanges(supportedSearchTypes, nCharacterGrams); } public QueryRanges getQueryRanges(final MultiDimensionalTextData textData) { return TextIndexUtils.getQueryRanges(textData); } @SuppressWarnings({"rawtypes", "unchecked"}) @Override public PersistableBiPredicate getFilter(final TextConstraints constraints) { if (constraints instanceof TextSearch) { if (((TextSearch) constraints).getType().requiresEvaluate()) { return (PersistableBiPredicate) new TextSearchPredicate<>(converter); } } return CustomIndexStrategy.super.getFilter(constraints); } protected String entryToString(final E entry) { return converter.apply(entry); } private static int encodeType(final EnumSet set) { int ret = 0; for (final TextSearchType val : set) { ret |= 1 << val.ordinal(); } return ret; } private static EnumSet decodeType(int code) { final TextSearchType[] values = TextSearchType.values(); final EnumSet result = EnumSet.noneOf(TextSearchType.class); while (code != 0) { final int ordinal = Integer.numberOfTrailingZeros(code); code ^= Integer.lowestOneBit(code); result.add(values[ordinal]); } return result; } private static int encodeCaseSensitivity(final EnumSet set) { int ret = 0; for (final CaseSensitivity val : set) { ret |= 1 << val.ordinal(); } return ret; } private static EnumSet decodeCaseSensitivity(int code) { final CaseSensitivity[] values = CaseSensitivity.values(); final EnumSet result = EnumSet.noneOf(CaseSensitivity.class); while (code != 0) { final int ordinal = Integer.numberOfTrailingZeros(code); code ^= Integer.lowestOneBit(code); result.add(values[ordinal]); } return result; } @Override public Class getConstraintsClass() { return TextConstraints.class; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/text/TextIndexType.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.text; public enum TextIndexType { FORWARD, REVERSE, NGRAM } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/text/TextIndexUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.text; import java.util.ArrayList; import java.util.Collections; import java.util.EnumSet; import java.util.List; import java.util.Set; import java.util.function.BiPredicate; import java.util.stream.Collectors; import org.locationtech.geowave.core.index.ByteArrayRange; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.QueryRanges; import org.locationtech.geowave.core.index.SinglePartitionInsertionIds; import org.locationtech.geowave.core.index.SinglePartitionQueryRanges; import org.locationtech.geowave.core.index.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class TextIndexUtils { private static Logger LOGGER = LoggerFactory.getLogger(TextIndexUtils.class); protected static BiPredicate ALWAYS_TRUE = (term, value) -> true; private static final byte[] FORWARD_INDEX_CASE_SENSITIVE_PARTITION_KEY = new byte[] {0}; private static final byte[] REVERSE_INDEX_CASE_SENSITIVE_PARTITION_KEY = new byte[] {1}; private static final byte[] NGRAM_INDEX_CASE_SENSITIVE_PARTITION_KEY = new byte[] {2}; private static final byte[] FORWARD_INDEX_CASE_INSENSITIVE_PARTITION_KEY = new byte[] {3}; private static final byte[] REVERSE_INDEX_CASE_INSENSITIVE_PARTITION_KEY = new byte[] {4}; private static final byte[] NGRAM_INDEX_CASE_INSENSITIVE_PARTITION_KEY = new byte[] {5}; public static InsertionIds getInsertionIds( final String entry, final EnumSet supportedSearchTypes, final EnumSet supportedCaseSensitivities, final int nGramCharacters) { if ((entry == null) || entry.isEmpty()) { LOGGER.info("Cannot index null string, skipping entry"); return new InsertionIds(); } final Set indexTypes = supportedSearchTypes.stream().map(TextSearchType::getIndexType).collect(Collectors.toSet()); final List retVal = new ArrayList<>(indexTypes.size()); for (final TextIndexType indexType : indexTypes) { for (final CaseSensitivity caseSensitivity : supportedCaseSensitivities) { final boolean caseSensitive = CaseSensitivity.CASE_SENSITIVE.equals(caseSensitivity); switch (indexType) { case FORWARD: retVal.add(getForwardInsertionIds(entry, caseSensitive)); break; case REVERSE: retVal.add(getReverseInsertionIds(entry, caseSensitive)); break; case NGRAM: final SinglePartitionInsertionIds i = getNGramInsertionIds( entry, nGramCharacters, indexTypes.contains(TextIndexType.FORWARD), caseSensitive); if (i != null) { retVal.add(i); } break; } } } return new InsertionIds(retVal); } public static QueryRanges getQueryRanges( final String term, final TextSearchType searchType, final CaseSensitivity caseSensitivity, final EnumSet supportedSearchTypes, final int nGramCharacters) { final Set indexTypes = supportedSearchTypes.stream().map(TextSearchType::getIndexType).collect(Collectors.toSet()); final boolean caseSensitive = CaseSensitivity.CASE_SENSITIVE.equals(caseSensitivity); switch (searchType.getIndexType()) { case FORWARD: return getForwardQueryRanges(term, caseSensitive); case REVERSE: return getReverseQueryRanges(term, caseSensitive); case NGRAM: default: return getNGramQueryRanges( term, nGramCharacters, indexTypes.contains(TextIndexType.FORWARD), caseSensitive); } } public static QueryRanges getQueryRanges(final MultiDimensionalTextData textData) { final TextData data = textData.getDataPerDimension()[0]; if (data.isReversed()) { return getReverseQueryRanges( data.getMin(), data.getMax(), data.isMinInclusive(), data.isMaxInclusive(), data.isCaseSensitive()); } return getForwardQueryRanges( data.getMin(), data.getMax(), data.isMinInclusive(), data.isMaxInclusive(), data.isCaseSensitive()); } private static SinglePartitionInsertionIds getForwardInsertionIds( final String entry, final boolean caseSensitive) { return getForwardInsertionIds( caseSensitive ? entry : entry.toLowerCase(), caseSensitive ? FORWARD_INDEX_CASE_SENSITIVE_PARTITION_KEY : FORWARD_INDEX_CASE_INSENSITIVE_PARTITION_KEY); } private static SinglePartitionInsertionIds getForwardInsertionIds( final String entry, final byte[] partitionKey) { return new SinglePartitionInsertionIds(partitionKey, StringUtils.stringToBinary(entry)); } private static SinglePartitionInsertionIds getReverseInsertionIds( final String entry, final boolean caseSensitive) { return getReverseInsertionIds( caseSensitive ? entry : entry.toLowerCase(), caseSensitive ? REVERSE_INDEX_CASE_SENSITIVE_PARTITION_KEY : REVERSE_INDEX_CASE_INSENSITIVE_PARTITION_KEY); } private static SinglePartitionInsertionIds getReverseInsertionIds( final String entry, final byte[] partitionKey) { return new SinglePartitionInsertionIds( partitionKey, StringUtils.stringToBinary(new StringBuilder(entry).reverse().toString())); } private static SinglePartitionInsertionIds getNGramInsertionIds( final String entry, final int nGramCharacters, final boolean isForwardIndexed, final boolean caseSensitive) { return getNGramInsertionIds( caseSensitive ? entry : entry.toLowerCase(), nGramCharacters, isForwardIndexed, caseSensitive ? NGRAM_INDEX_CASE_SENSITIVE_PARTITION_KEY : NGRAM_INDEX_CASE_INSENSITIVE_PARTITION_KEY); } private static SinglePartitionInsertionIds getNGramInsertionIds( final String entry, final int nGramCharacters, final boolean isForwardIndexed, final byte[] partitionKey) { final int startIndex = (isForwardIndexed ? 1 : 0); final int endIndex = entry.length() - nGramCharacters; final int numNGrams = (endIndex - startIndex) + 1; if (numNGrams >= 0) { final List sortKeys = new ArrayList<>(numNGrams); for (int i = startIndex; i <= endIndex; i++) { sortKeys.add(StringUtils.stringToBinary(entry.substring(i, i + nGramCharacters))); } return new SinglePartitionInsertionIds(partitionKey, sortKeys); } return null; } public static QueryRanges getForwardQueryRanges(final String term, final boolean caseSensitive) { final byte[] forwardTermBytes = StringUtils.stringToBinary(caseSensitive ? term : term.toLowerCase()); final List retVal = new ArrayList<>(1); retVal.add( new SinglePartitionQueryRanges( caseSensitive ? FORWARD_INDEX_CASE_SENSITIVE_PARTITION_KEY : FORWARD_INDEX_CASE_INSENSITIVE_PARTITION_KEY, Collections.singletonList(new ByteArrayRange(forwardTermBytes, forwardTermBytes)))); return new QueryRanges(retVal); } public static QueryRanges getForwardQueryRanges( final String startTerm, final String endTerm, final boolean startInclusive, final boolean endInclusive, final boolean caseSensitive) { byte[] startBytes = StringUtils.stringToBinary(caseSensitive ? startTerm : startTerm.toLowerCase()); if (!startInclusive) { startBytes = ByteArrayUtils.getNextPrefix(startBytes); } byte[] endBytes = StringUtils.stringToBinary(caseSensitive ? endTerm : endTerm.toLowerCase()); if (!endInclusive) { endBytes = ByteArrayUtils.getPreviousPrefix(endBytes); } final List retVal = new ArrayList<>(1); retVal.add( new SinglePartitionQueryRanges( caseSensitive ? FORWARD_INDEX_CASE_SENSITIVE_PARTITION_KEY : FORWARD_INDEX_CASE_INSENSITIVE_PARTITION_KEY, Collections.singletonList(new ByteArrayRange(startBytes, endBytes)))); return new QueryRanges(retVal); } public static QueryRanges getReverseQueryRanges(final String term, final boolean caseSensitive) { final byte[] reverseTermBytes = StringUtils.stringToBinary( new StringBuilder(caseSensitive ? term : term.toLowerCase()).reverse().toString()); final List retVal = new ArrayList<>(1); retVal.add( new SinglePartitionQueryRanges( caseSensitive ? REVERSE_INDEX_CASE_SENSITIVE_PARTITION_KEY : REVERSE_INDEX_CASE_INSENSITIVE_PARTITION_KEY, Collections.singletonList(new ByteArrayRange(reverseTermBytes, reverseTermBytes)))); return new QueryRanges(retVal); } public static QueryRanges getReverseQueryRanges( final String startTerm, final String endTerm, final boolean startInclusive, final boolean endInclusive, final boolean caseSensitive) { byte[] startBytes = StringUtils.stringToBinary( new StringBuilder( caseSensitive ? startTerm : endTerm.toLowerCase()).reverse().toString()); if (!startInclusive) { startBytes = ByteArrayUtils.getNextPrefix(startBytes); } final byte[] endBytes = StringUtils.stringToBinary( new StringBuilder( caseSensitive ? endTerm : endTerm.toLowerCase()).reverse().toString()); if (!endInclusive) { startBytes = ByteArrayUtils.getPreviousPrefix(startBytes); } final List retVal = new ArrayList<>(1); retVal.add( new SinglePartitionQueryRanges( caseSensitive ? REVERSE_INDEX_CASE_SENSITIVE_PARTITION_KEY : REVERSE_INDEX_CASE_INSENSITIVE_PARTITION_KEY, Collections.singletonList(new ByteArrayRange(startBytes, endBytes)))); return new QueryRanges(retVal); } public static QueryRanges getNGramQueryRanges( final String initialTerm, final int nGramCharacters, final boolean isForwardIndexed, final boolean caseSensitive) { final String term = caseSensitive ? initialTerm : initialTerm.toLowerCase(); final boolean shouldTruncateNGram = term.length() > nGramCharacters; final byte[] nGramTermBytes = StringUtils.stringToBinary(shouldTruncateNGram ? term.substring(0, nGramCharacters) : term); final List retVal = new ArrayList<>(1 + (isForwardIndexed ? 1 : 0)); final SinglePartitionQueryRanges ngramRange = new SinglePartitionQueryRanges( caseSensitive ? NGRAM_INDEX_CASE_SENSITIVE_PARTITION_KEY : NGRAM_INDEX_CASE_INSENSITIVE_PARTITION_KEY, Collections.singletonList(new ByteArrayRange(nGramTermBytes, nGramTermBytes))); retVal.add(ngramRange); if (isForwardIndexed) { final byte[] forwardTermBytes = shouldTruncateNGram ? StringUtils.stringToBinary(term) : nGramTermBytes; retVal.add( new SinglePartitionQueryRanges( caseSensitive ? FORWARD_INDEX_CASE_SENSITIVE_PARTITION_KEY : FORWARD_INDEX_CASE_INSENSITIVE_PARTITION_KEY, Collections.singletonList(new ByteArrayRange(forwardTermBytes, forwardTermBytes)))); } return new QueryRanges(retVal); } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/text/TextRange.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.text; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; /** Concrete implementation defining a text range. */ public class TextRange implements TextData { /** */ private static final long serialVersionUID = 1L; private String min; private String max; private boolean minInclusive; private boolean maxInclusive; private boolean caseSensitive; private boolean reversed; public TextRange() {} /** * Constructor used to create a IndexRange object * * @param min the minimum bounds of a unique index range * @param max the maximum bounds of a unique index range */ public TextRange(final String min, final String max) { this(min, max, true, true, true, false); } public TextRange( final String min, final String max, final boolean minInclusive, final boolean maxInclusive, final boolean caseSensitive, final boolean reversed) { this.min = min; this.max = max; this.minInclusive = minInclusive; this.maxInclusive = maxInclusive; this.caseSensitive = caseSensitive; this.reversed = reversed; } /** @return min the minimum bounds of a index range object */ @Override public String getMin() { return min; } /** @return max the maximum bounds of a index range object */ @Override public String getMax() { return max; } @Override public boolean isMinInclusive() { return minInclusive; } @Override public boolean isMaxInclusive() { return maxInclusive; } @Override public boolean isCaseSensitive() { return caseSensitive; } @Override public boolean isReversed() { return reversed; } /** @return centroid the center of a unique index range object */ @Override public String getCentroid() { final int length = Math.min(min.length(), max.length()); final StringBuilder sb = new StringBuilder(); for (int i = 0; i < length; i++) { sb.append((char) ((min.charAt(i) + max.charAt(i)) / 2)); } return sb.toString(); } /** Flag to determine if the object is a range */ @Override public boolean isRange() { return true; } @Override public String toString() { return "TextRange [min=" + min + ", max=" + max + "]"; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + min.hashCode(); result = (prime * result) + max.hashCode(); result = (prime * result) + (minInclusive ? 1 : 0); result = (prime * result) + (maxInclusive ? 1 : 0); result = (prime * result) + (caseSensitive ? 1 : 0); result = (prime * result) + (reversed ? 1 : 0); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } // changing this check will fail some unit tests. if (!TextRange.class.isAssignableFrom(obj.getClass())) { return false; } final TextRange other = (TextRange) obj; return min.equals(other.min) && max.equals(other.max) && (minInclusive == other.minInclusive) && (maxInclusive == other.maxInclusive) && (caseSensitive == other.caseSensitive) && (reversed == other.reversed); } @Override public byte[] toBinary() { final byte[] minBytes = StringUtils.stringToBinary(min); final byte[] maxBytes = StringUtils.stringToBinary(max); final ByteBuffer buf = ByteBuffer.allocate( VarintUtils.unsignedIntByteLength(minBytes.length) + VarintUtils.unsignedIntByteLength(maxBytes.length) + minBytes.length + maxBytes.length + 4); VarintUtils.writeUnsignedInt(minBytes.length, buf); buf.put(minBytes); VarintUtils.writeUnsignedInt(maxBytes.length, buf); buf.put(maxBytes); buf.put(minInclusive ? (byte) 1 : (byte) 0); buf.put(maxInclusive ? (byte) 1 : (byte) 0); buf.put(caseSensitive ? (byte) 1 : (byte) 0); buf.put(reversed ? (byte) 1 : (byte) 0); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final byte[] minBytes = new byte[VarintUtils.readUnsignedInt(buf)]; buf.get(minBytes); final byte[] maxBytes = new byte[VarintUtils.readUnsignedInt(buf)]; buf.get(maxBytes); min = StringUtils.stringFromBinary(minBytes); max = StringUtils.stringFromBinary(maxBytes); minInclusive = buf.get() > 0; maxInclusive = buf.get() > 0; caseSensitive = buf.get() > 0; reversed = buf.get() > 0; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/text/TextSearch.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.text; import java.nio.ByteBuffer; import java.util.EnumSet; import org.locationtech.geowave.core.index.QueryRanges; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import com.google.common.primitives.Bytes; public class TextSearch implements TextConstraints { private TextSearchType type; private CaseSensitivity caseSensitivity; private String searchTerm; public TextSearch() {} public TextSearch( final TextSearchType type, final CaseSensitivity caseSensitivity, final String searchTerm) { this.type = type; this.caseSensitivity = caseSensitivity; this.searchTerm = searchTerm; } public TextSearchType getType() { return type; } public String getSearchTerm() { return searchTerm; } public CaseSensitivity getCaseSensitivity() { return caseSensitivity; } @Override public byte[] toBinary() { return Bytes.concat( VarintUtils.writeUnsignedInt(type.ordinal()), VarintUtils.writeUnsignedInt(caseSensitivity.ordinal()), StringUtils.stringToBinary(searchTerm)); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); type = TextSearchType.values()[VarintUtils.readUnsignedInt(buf)]; caseSensitivity = CaseSensitivity.values()[VarintUtils.readUnsignedInt(buf)]; final byte[] searchTermBytes = new byte[buf.remaining()]; buf.get(searchTermBytes); searchTerm = StringUtils.stringFromBinary(searchTermBytes); } @Override public QueryRanges getQueryRanges( final EnumSet supportedSearchTypes, final int nCharacterGrams) { return TextIndexUtils.getQueryRanges( searchTerm, type, caseSensitivity, supportedSearchTypes, nCharacterGrams); } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/text/TextSearchPredicate.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.text; import org.locationtech.geowave.core.index.CustomIndexStrategy.PersistableBiPredicate; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; public class TextSearchPredicate implements PersistableBiPredicate { private TextIndexEntryConverter converter; private String cachedSearchTerm; private String cachedLowerCaseTerm; public TextSearchPredicate() {} public TextSearchPredicate(final TextIndexEntryConverter converter) { this.converter = converter; } @Override public boolean test(final E t, final TextSearch u) { final String value = converter.apply(t); final boolean caseSensitive = CaseSensitivity.CASE_SENSITIVE.equals(u.getCaseSensitivity()); return u.getType().evaluate( ((value != null) && !caseSensitive) ? value.toLowerCase() : value, caseSensitive ? u.getSearchTerm() : getLowerCaseTerm(u.getSearchTerm())); } @SuppressFBWarnings( value = {"ES_COMPARING_PARAMETER_STRING_WITH_EQ"}, justification = "this is actually intentional; comparing instance of a string") private String getLowerCaseTerm(final String term) { // because under normal conditions its always the same search term per instance of the // predicate, let's just make sure we perform toLowerCase one time instead of repeatedly for // each evaluation if ((cachedSearchTerm == null) || (cachedLowerCaseTerm == null)) { synchronized (this) { cachedSearchTerm = term; cachedLowerCaseTerm = term.toLowerCase(); } } // intentionally using == because this should be the same instance of the term else if (term == cachedSearchTerm) { return cachedLowerCaseTerm; } return term.toLowerCase(); } @Override public byte[] toBinary() { return PersistenceUtils.toBinary(converter); } @Override public void fromBinary(final byte[] bytes) { converter = (TextIndexEntryConverter) PersistenceUtils.fromBinary(bytes); } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/text/TextSearchType.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.text; import java.util.function.BiPredicate; public enum TextSearchType { // for all but "contains" the Sort Keys of the query ranges should fully match expected search // results without the need for additional filtering via an "evaluate" BiPredicate EXACT_MATCH(TextIndexType.FORWARD, (value, term) -> (value != null) && value.equals(term)), BEGINS_WITH(TextIndexType.FORWARD), ENDS_WITH(TextIndexType.REVERSE), CONTAINS(TextIndexType.NGRAM, (value, term) -> (value != null) && value.contains(term)); private TextIndexType indexType; private BiPredicate evaluate; private boolean requiresEvaluate; private TextSearchType(final TextIndexType indexType) { this(indexType, TextIndexUtils.ALWAYS_TRUE, false); } private TextSearchType( final TextIndexType indexType, final BiPredicate evaluate) { this(indexType, evaluate, true); } private TextSearchType( final TextIndexType indexType, final BiPredicate evaluate, final boolean requiresEvaluate) { this.indexType = indexType; this.evaluate = evaluate; this.requiresEvaluate = requiresEvaluate; } public boolean evaluate(final String value, final String searchTerm) { return evaluate.test(value, searchTerm); } public boolean requiresEvaluate() { return requiresEvaluate; } public TextIndexType getIndexType() { return indexType; } } ================================================ FILE: core/index/src/main/java/org/locationtech/geowave/core/index/text/TextValue.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.text; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.StringUtils; /** * Concrete implementation defining a single text value. */ public class TextValue implements TextData { /** */ private static final long serialVersionUID = 1L; private String value; private boolean caseSensitive; private boolean reversed; public TextValue() {} /** * Constructor used to create a new TextValue object * * @param value the particular text value */ public TextValue(final String value, final boolean caseSensitive, final boolean reversed) { this.value = value; this.caseSensitive = caseSensitive; this.reversed = reversed; } /** @return value the value of a text value object */ @Override public String getMin() { return value; } /** @return value the value of a text value object */ @Override public String getMax() { return value; } @Override public boolean isMinInclusive() { return true; } @Override public boolean isMaxInclusive() { return true; } @Override public boolean isCaseSensitive() { return caseSensitive; } @Override public boolean isReversed() { return reversed; } /** @return value the value of a text value object */ @Override public String getCentroid() { return value; } /** Determines if this object is a range or not */ @Override public boolean isRange() { return false; } @Override public String toString() { return "TextRange [value=" + value + "]"; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + value.hashCode(); result = (prime * result) + (caseSensitive ? 1 : 0); result = (prime * result) + (reversed ? 1 : 0); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final TextValue other = (TextValue) obj; return value.equals(other.value); } @Override public byte[] toBinary() { final byte[] valueBytes = StringUtils.stringToBinary(value); final ByteBuffer buf = ByteBuffer.allocate(valueBytes.length + 2); buf.put(valueBytes); buf.put(caseSensitive ? (byte) 1 : (byte) 0); buf.put(reversed ? (byte) 1 : (byte) 0); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final byte[] valueBytes = new byte[buf.remaining() - 2]; buf.get(valueBytes); value = StringUtils.stringFromBinary(valueBytes); caseSensitive = buf.get() > 0; reversed = buf.get() > 0; } } ================================================ FILE: core/index/src/main/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi ================================================ org.locationtech.geowave.core.index.IndexPersistableRegistry ================================================ FILE: core/index/src/test/java/org/locationtech/geowave/core/index/ByteArrayRangeTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import static org.junit.Assert.assertEquals; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Random; import java.util.UUID; import org.junit.Test; import org.locationtech.geowave.core.index.ByteArrayRange.MergeOperation; public class ByteArrayRangeTest { @Test public void testUnion() { final ByteArrayRange bar1 = new ByteArrayRange(new ByteArray("232").getBytes(), new ByteArray("332").getBytes()); final ByteArrayRange bar2 = new ByteArrayRange(new ByteArray("282").getBytes(), new ByteArray("300").getBytes()); final ByteArrayRange bar3 = new ByteArrayRange(new ByteArray("272").getBytes(), new ByteArray("340").getBytes()); final ByteArrayRange bar4 = new ByteArrayRange(new ByteArray("392").getBytes(), new ByteArray("410").getBytes()); Collection l1 = new ArrayList<>(Arrays.asList(bar4, bar3, bar1, bar2)); l1 = ByteArrayRange.mergeIntersections(l1, MergeOperation.UNION); Collection l2 = new ArrayList<>(Arrays.asList(bar1, bar4, bar2, bar3)); l2 = ByteArrayRange.mergeIntersections(l2, MergeOperation.UNION); assertEquals(2, l1.size()); assertEquals(l1, l2); assertEquals( new ByteArrayRange(new ByteArray("232").getBytes(), new ByteArray("340").getBytes()), ((ArrayList) l1).get(0)); assertEquals( new ByteArrayRange(new ByteArray("392").getBytes(), new ByteArray("410").getBytes()), ((ArrayList) l1).get(1)); } @Test public void testIntersection() { final ByteArrayRange bar1 = new ByteArrayRange(new ByteArray("232").getBytes(), new ByteArray("332").getBytes()); final ByteArrayRange bar2 = new ByteArrayRange(new ByteArray("282").getBytes(), new ByteArray("300").getBytes()); final ByteArrayRange bar3 = new ByteArrayRange(new ByteArray("272").getBytes(), new ByteArray("340").getBytes()); final ByteArrayRange bar4 = new ByteArrayRange(new ByteArray("392").getBytes(), new ByteArray("410").getBytes()); Collection l1 = new ArrayList<>(Arrays.asList(bar4, bar3, bar1, bar2)); l1 = ByteArrayRange.mergeIntersections(l1, MergeOperation.INTERSECTION); Collection l2 = new ArrayList<>(Arrays.asList(bar1, bar4, bar2, bar3)); l2 = ByteArrayRange.mergeIntersections(l2, MergeOperation.INTERSECTION); assertEquals(2, l1.size()); assertEquals(l1, l2); assertEquals( new ByteArrayRange(new ByteArray("282").getBytes(), new ByteArray("300").getBytes()), ((ArrayList) l1).get(0)); assertEquals( new ByteArrayRange(new ByteArray("392").getBytes(), new ByteArray("410").getBytes()), ((ArrayList) l1).get(1)); } final Random random = new Random(); public String increment(final String id) { int v = (int) (Math.abs(random.nextDouble()) * 10000); final StringBuffer buf = new StringBuffer(); int pos = id.length() - 1; int r = 0; while (v > 0) { final int m = (v - ((v >> 8) << 8)); final int c = id.charAt(pos); final int n = c + m + r; buf.append((char) (n % 255)); r = n / 255; v >>= 8; pos--; } while (pos >= 0) { buf.append(id.charAt(pos--)); } return buf.reverse().toString(); } @Test public void bigTest() { final List l2 = new ArrayList<>(); for (int i = 0; i < 3000; i++) { String seed = UUID.randomUUID().toString(); for (int j = 0; j < 500; j++) { l2.add( new ByteArrayRange( new ByteArray(seed).getBytes(), new ByteArray(increment(seed)).getBytes())); seed = increment(seed); } } ByteArrayRange.mergeIntersections(l2, MergeOperation.INTERSECTION); } } ================================================ FILE: core/index/src/test/java/org/locationtech/geowave/core/index/ByteArrayUtilsTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import org.apache.commons.lang3.tuple.Pair; import org.junit.Assert; import org.junit.Test; public class ByteArrayUtilsTest { @Test public void testSplit() { final ByteArray first = new ByteArray("first"); final ByteArray second = new ByteArray("second"); final byte[] combined = ByteArrayUtils.combineVariableLengthArrays(first.getBytes(), second.getBytes()); final Pair split = ByteArrayUtils.splitVariableLengthArrays(combined); Assert.assertArrayEquals(first.getBytes(), split.getLeft()); Assert.assertArrayEquals(second.getBytes(), split.getRight()); } @Test public void testVariableLengthEncodeDecode() { testVariableLengthValue(0); testVariableLengthValue(123456L); testVariableLengthValue(-42L); testVariableLengthValue(Byte.MAX_VALUE); testVariableLengthValue(Byte.MIN_VALUE); testVariableLengthValue(Integer.MIN_VALUE); testVariableLengthValue(Integer.MAX_VALUE); testVariableLengthValue(Long.MAX_VALUE); testVariableLengthValue(Long.MIN_VALUE); } @Test public void testReplace() { byte[] source = "test byte array".getBytes(); byte[] find = "e".getBytes(); byte[] replace = "xx".getBytes(); byte[] replaced = ByteArrayUtils.replace(source, find, replace); Assert.assertArrayEquals("txxst bytxx array".getBytes(), replaced); source = "test byte array".getBytes(); find = "test".getBytes(); replace = "".getBytes(); replaced = ByteArrayUtils.replace(source, find, replace); Assert.assertArrayEquals(" byte array".getBytes(), replaced); source = "test byte array".getBytes(); find = "array".getBytes(); replace = "".getBytes(); replaced = ByteArrayUtils.replace(source, find, replace); Assert.assertArrayEquals("test byte ".getBytes(), replaced); source = "test byte test".getBytes(); find = "test".getBytes(); replace = "____".getBytes(); replaced = ByteArrayUtils.replace(source, find, replace); Assert.assertArrayEquals("____ byte ____".getBytes(), replaced); source = "test byte array".getBytes(); find = "".getBytes(); replace = "____".getBytes(); replaced = ByteArrayUtils.replace(source, find, replace); Assert.assertArrayEquals("test byte array".getBytes(), replaced); source = "test byte array".getBytes(); find = null; replace = "____".getBytes(); replaced = ByteArrayUtils.replace(source, find, replace); Assert.assertArrayEquals("test byte array".getBytes(), replaced); source = "test byte array".getBytes(); find = "none".getBytes(); replace = "____".getBytes(); replaced = ByteArrayUtils.replace(source, find, replace); Assert.assertArrayEquals("test byte array".getBytes(), replaced); } private void testVariableLengthValue(final long value) { final byte[] encoded = ByteArrayUtils.variableLengthEncode(value); final long result = ByteArrayUtils.variableLengthDecode(encoded); Assert.assertEquals(value, result); } } ================================================ FILE: core/index/src/test/java/org/locationtech/geowave/core/index/CompoundIndexStrategyTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; import org.junit.Assert; import org.junit.Test; import org.locationtech.geowave.core.index.dimension.BasicDimensionDefinition; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.numeric.BasicNumericDataset; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.index.sfc.SFCFactory.SFCType; import org.locationtech.geowave.core.index.sfc.tiered.TieredSFCIndexFactory; import org.locationtech.geowave.core.index.simple.HashKeyIndexStrategy; public class CompoundIndexStrategyTest { private static final NumericDimensionDefinition[] SPATIAL_DIMENSIONS = new NumericDimensionDefinition[] { new BasicDimensionDefinition(-180, 180), new BasicDimensionDefinition(-90, 90)}; private static final PartitionIndexStrategy simpleIndexStrategy = new HashKeyIndexStrategy(10); private static final NumericIndexStrategy sfcIndexStrategy = TieredSFCIndexFactory.createSingleTierStrategy( SPATIAL_DIMENSIONS, new int[] {16, 16}, SFCType.HILBERT); private static final CompoundIndexStrategy compoundIndexStrategy = new CompoundIndexStrategy(simpleIndexStrategy, sfcIndexStrategy); private static final NumericRange dimension2Range = new NumericRange(50.0, 50.025); private static final NumericRange dimension3Range = new NumericRange(-20.5, -20.455); private static final MultiDimensionalNumericData sfcIndexedRange = new BasicNumericDataset(new NumericData[] {dimension2Range, dimension3Range}); private static final MultiDimensionalNumericData compoundIndexedRange = new BasicNumericDataset(new NumericData[] {dimension2Range, dimension3Range}); @Test public void testBinaryEncoding() { final byte[] bytes = PersistenceUtils.toBinary(compoundIndexStrategy); final CompoundIndexStrategy deserializedStrategy = (CompoundIndexStrategy) PersistenceUtils.fromBinary(bytes); final byte[] bytes2 = PersistenceUtils.toBinary(deserializedStrategy); Assert.assertArrayEquals(bytes, bytes2); } @Test public void testGetNumberOfDimensions() { final int numDimensions = compoundIndexStrategy.getNumberOfDimensions(); Assert.assertEquals(2, numDimensions); } @Test public void testGetQueryRangesWithMaximumNumberOfRanges() { final byte[][] partitions = simpleIndexStrategy.getQueryPartitionKeys(sfcIndexedRange); final QueryRanges sfcIndexRanges = sfcIndexStrategy.getQueryRanges(sfcIndexedRange); final List ranges = new ArrayList<>(); for (final byte[] r1 : partitions) { for (final ByteArrayRange r2 : sfcIndexRanges.getCompositeQueryRanges()) { final byte[] start = ByteArrayUtils.combineArrays(r1, r2.getStart()); final byte[] end = ByteArrayUtils.combineArrays(r1, r2.getEnd()); ranges.add(new ByteArrayRange(start, end)); } } final Set testRanges = new HashSet<>(ranges); final Set compoundIndexRanges = new HashSet<>( compoundIndexStrategy.getQueryRanges(compoundIndexedRange).getCompositeQueryRanges()); Assert.assertTrue(testRanges.containsAll(compoundIndexRanges)); Assert.assertTrue(compoundIndexRanges.containsAll(testRanges)); } @Test public void testGetQueryRanges() { final byte[][] simpleIndexRanges = simpleIndexStrategy.getQueryPartitionKeys(sfcIndexedRange); final List sfcIndexRanges = sfcIndexStrategy.getQueryRanges(sfcIndexedRange, 8).getCompositeQueryRanges(); final List ranges = new ArrayList<>(simpleIndexRanges.length * sfcIndexRanges.size()); for (final byte[] r1 : simpleIndexRanges) { for (final ByteArrayRange r2 : sfcIndexRanges) { final byte[] start = ByteArrayUtils.combineArrays(r1, r2.getStart()); final byte[] end = ByteArrayUtils.combineArrays(r1, r2.getEnd()); ranges.add(new ByteArrayRange(start, end)); } } final Set testRanges = new HashSet<>(ranges); final Set compoundIndexRanges = new HashSet<>( compoundIndexStrategy.getQueryRanges( compoundIndexedRange, 8).getCompositeQueryRanges()); Assert.assertTrue(testRanges.containsAll(compoundIndexRanges)); Assert.assertTrue(compoundIndexRanges.containsAll(testRanges)); } @Test public void testGetInsertionIds() { final List ids = new ArrayList<>(); final byte[][] ids1 = simpleIndexStrategy.getInsertionPartitionKeys(sfcIndexedRange); final int maxEstDuplicatesStrategy2 = 8 / ids1.length; final List ids2 = sfcIndexStrategy.getInsertionIds( sfcIndexedRange, maxEstDuplicatesStrategy2).getCompositeInsertionIds(); for (final byte[] id1 : ids1) { for (final byte[] id2 : ids2) { ids.add(ByteArrayUtils.combineArrays(id1, id2)); } } final Set testIds = new HashSet<>(ids.stream().map(i -> new ByteArray(i)).collect(Collectors.toList())); final Set compoundIndexIds = new HashSet<>( compoundIndexStrategy.getInsertionIds( compoundIndexedRange, 8).getCompositeInsertionIds().stream().map(i -> new ByteArray(i)).collect( Collectors.toList())); Assert.assertTrue(testIds.containsAll(compoundIndexIds)); Assert.assertTrue(compoundIndexIds.containsAll(testIds)); } @Test public void testGetCoordinatesPerDimension() { final byte[] compoundIndexPartitionKey = new byte[] {16}; final byte[] compoundIndexSortKey = new byte[] {-46, -93, -110, -31}; final MultiDimensionalCoordinates sfcIndexCoordinatesPerDim = sfcIndexStrategy.getCoordinatesPerDimension( compoundIndexPartitionKey, compoundIndexSortKey); final MultiDimensionalCoordinates coordinatesPerDim = compoundIndexStrategy.getCoordinatesPerDimension( compoundIndexPartitionKey, compoundIndexSortKey); Assert.assertTrue( Long.compare( sfcIndexCoordinatesPerDim.getCoordinate(0).getCoordinate(), coordinatesPerDim.getCoordinate(0).getCoordinate()) == 0); Assert.assertTrue( Long.compare( sfcIndexCoordinatesPerDim.getCoordinate(1).getCoordinate(), coordinatesPerDim.getCoordinate(1).getCoordinate()) == 0); } @Test public void testGetRangeForId() { final byte[] sfcIndexPartitionKey = new byte[] {16}; final byte[] sfcIndexSortKey = new byte[] {-46, -93, -110, -31}; final MultiDimensionalNumericData sfcIndexRange = sfcIndexStrategy.getRangeForId(sfcIndexPartitionKey, sfcIndexSortKey); final MultiDimensionalNumericData range = compoundIndexStrategy.getRangeForId(sfcIndexPartitionKey, sfcIndexSortKey); Assert.assertEquals(sfcIndexRange.getDimensionCount(), 2); Assert.assertEquals(range.getDimensionCount(), 2); Assert.assertTrue( Double.compare( sfcIndexRange.getMinValuesPerDimension()[0], range.getMinValuesPerDimension()[0]) == 0); Assert.assertTrue( Double.compare( sfcIndexRange.getMinValuesPerDimension()[1], range.getMinValuesPerDimension()[1]) == 0); Assert.assertTrue( Double.compare( sfcIndexRange.getMaxValuesPerDimension()[0], range.getMaxValuesPerDimension()[0]) == 0); Assert.assertTrue( Double.compare( sfcIndexRange.getMaxValuesPerDimension()[1], range.getMaxValuesPerDimension()[1]) == 0); } @Test public void testHints() { final InsertionIds ids = compoundIndexStrategy.getInsertionIds(compoundIndexedRange, 8); final List metaData = compoundIndexStrategy.createMetaData(); for (final IndexMetaData imd : metaData) { imd.insertionIdsAdded(ids); } final byte[][] simpleIndexRanges = simpleIndexStrategy.getQueryPartitionKeys(sfcIndexedRange); final QueryRanges sfcIndexRanges = sfcIndexStrategy.getQueryRanges(sfcIndexedRange); final List ranges = new ArrayList<>(); for (final byte[] r1 : simpleIndexRanges) { for (final ByteArrayRange r2 : sfcIndexRanges.getCompositeQueryRanges()) { final byte[] start = ByteArrayUtils.combineArrays(r1, r2.getStart()); final byte[] end = ByteArrayUtils.combineArrays(r1, r2.getEnd()); ranges.add(new ByteArrayRange(start, end)); } } final Set compoundIndexRangesWithoutHints = new HashSet<>( compoundIndexStrategy.getQueryRanges(compoundIndexedRange).getCompositeQueryRanges()); final Set compoundIndexRangesWithHints = new HashSet<>( compoundIndexStrategy.getQueryRanges( compoundIndexedRange, metaData.toArray(new IndexMetaData[metaData.size()])).getCompositeQueryRanges()); Assert.assertTrue(compoundIndexRangesWithoutHints.containsAll(compoundIndexRangesWithHints)); Assert.assertTrue(compoundIndexRangesWithHints.containsAll(compoundIndexRangesWithoutHints)); final List newMetaData = PersistenceUtils.fromBinaryAsList(PersistenceUtils.toBinary(metaData)); final Set compoundIndexRangesWithHints2 = new HashSet<>( compoundIndexStrategy.getQueryRanges( compoundIndexedRange, metaData.toArray(new IndexMetaData[newMetaData.size()])).getCompositeQueryRanges()); Assert.assertTrue(compoundIndexRangesWithoutHints.containsAll(compoundIndexRangesWithHints2)); Assert.assertTrue(compoundIndexRangesWithHints2.containsAll(compoundIndexRangesWithoutHints)); } } ================================================ FILE: core/index/src/test/java/org/locationtech/geowave/core/index/PersistenceUtilsTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import org.junit.Assert; import org.junit.Test; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; public class PersistenceUtilsTest { public static class APersistable implements Persistable { @Override public byte[] toBinary() { return new byte[] {1, 2, 3}; } @Override public void fromBinary(final byte[] bytes) { Assert.assertTrue(Arrays.equals(bytes, new byte[] {1, 2, 3})); } } @Test public void test() { final APersistable persistable = new APersistable(); Assert.assertTrue( PersistenceUtils.fromBinaryAsList( PersistenceUtils.toBinary(new ArrayList())).isEmpty()); Assert.assertTrue( PersistenceUtils.fromBinaryAsList( PersistenceUtils.toBinary( Collections.singleton(persistable))).size() == 1); Assert.assertTrue( PersistenceUtils.fromBinaryAsList( PersistenceUtils.toBinary( Arrays.asList( new Persistable[] {persistable, persistable}))).size() == 2); } } ================================================ FILE: core/index/src/test/java/org/locationtech/geowave/core/index/StringUtilsTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import static org.junit.Assert.assertEquals; import org.junit.Test; public class StringUtilsTest { @Test public void testFull() { final String[] result = StringUtils.stringsFromBinary(StringUtils.stringsToBinary(new String[] {"12", "34"})); assertEquals(2, result.length); assertEquals("12", result[0]); assertEquals("34", result[1]); } @Test public void testEmpty() { final String[] result = StringUtils.stringsFromBinary(StringUtils.stringsToBinary(new String[] {})); assertEquals(0, result.length); } } ================================================ FILE: core/index/src/test/java/org/locationtech/geowave/core/index/TestIndexPersistableRegistry.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import org.locationtech.geowave.core.index.PersistenceUtilsTest.APersistable; import org.locationtech.geowave.core.index.persist.InternalPersistableRegistry; import org.locationtech.geowave.core.index.persist.PersistableRegistrySpi; public class TestIndexPersistableRegistry implements PersistableRegistrySpi, InternalPersistableRegistry { @Override public PersistableIdAndConstructor[] getSupportedPersistables() { return new PersistableIdAndConstructor[] { new PersistableIdAndConstructor((short) 10100, APersistable::new),}; } } ================================================ FILE: core/index/src/test/java/org/locationtech/geowave/core/index/VarintUtilsTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index; import java.math.BigDecimal; import java.nio.ByteBuffer; import java.util.Calendar; import java.util.Date; import org.junit.Assert; import org.junit.Test; public class VarintUtilsTest { @Test public void testVarintEncodeDecodeUnsignedIntReversed() { testEncodeDecodeUnsignedIntReversed(0); testEncodeDecodeUnsignedIntReversed(123456); testEncodeDecodeUnsignedIntReversed(Byte.MAX_VALUE); testEncodeDecodeUnsignedIntReversed(Integer.MAX_VALUE); final int length = VarintUtils.unsignedIntByteLength(15) + VarintUtils.unsignedIntByteLength(Byte.MAX_VALUE); final ByteBuffer buffer = ByteBuffer.allocate(length); VarintUtils.writeUnsignedIntReversed(15, buffer); VarintUtils.writeUnsignedIntReversed(Byte.MAX_VALUE, buffer); buffer.position(buffer.limit() - 1); Assert.assertEquals(Byte.MAX_VALUE, VarintUtils.readUnsignedIntReversed(buffer)); Assert.assertEquals(15, VarintUtils.readUnsignedIntReversed(buffer)); } private void testEncodeDecodeUnsignedIntReversed(final int value) { final int length = VarintUtils.unsignedIntByteLength(value); final ByteBuffer buffer = ByteBuffer.allocate(length); VarintUtils.writeUnsignedIntReversed(value, buffer); buffer.position(buffer.limit() - 1); final int decoded = VarintUtils.readUnsignedIntReversed(buffer); Assert.assertEquals(value, decoded); } @Test public void testVarintSignedUnsignedInt() { testSignedUnsignedIntValue(0); testSignedUnsignedIntValue(-123456); testSignedUnsignedIntValue(123456); testSignedUnsignedIntValue(Byte.MIN_VALUE); testSignedUnsignedIntValue(Byte.MAX_VALUE); testSignedUnsignedIntValue(Integer.MIN_VALUE); testSignedUnsignedIntValue(Integer.MAX_VALUE); } private void testSignedUnsignedIntValue(final int value) { final int unsigned = VarintUtils.signedToUnsignedInt(value); final int signed = VarintUtils.unsignedToSignedInt(unsigned); Assert.assertEquals(value, signed); } @Test public void testVarintEncodeDecodeUnsignedInt() { testEncodeDecodeUnsignedIntValue(0); testEncodeDecodeUnsignedIntValue(123456); testEncodeDecodeUnsignedIntValue(Byte.MAX_VALUE); testEncodeDecodeUnsignedIntValue(Integer.MAX_VALUE); // negative values are inefficient in this encoding, but should still // work. testEncodeDecodeUnsignedIntValue(-123456); testEncodeDecodeUnsignedIntValue(Byte.MIN_VALUE); testEncodeDecodeUnsignedIntValue(Integer.MIN_VALUE); } private void testEncodeDecodeUnsignedIntValue(final int value) { final int length = VarintUtils.unsignedIntByteLength(value); // should never use more than 5 bytes Assert.assertTrue(length <= 5); final ByteBuffer buffer = ByteBuffer.allocate(length); VarintUtils.writeUnsignedInt(value, buffer); buffer.position(0); final int decoded = VarintUtils.readUnsignedInt(buffer); Assert.assertEquals(value, decoded); } @Test public void testVarintEncodeDecodeUnsignedShort() { testEncodeDecodeUnsignedShortValue((short) 0); testEncodeDecodeUnsignedShortValue((short) 12345); testEncodeDecodeUnsignedShortValue(Byte.MAX_VALUE); testEncodeDecodeUnsignedShortValue(Short.MAX_VALUE); // negative values are inefficient in this encoding, but should still // work. testEncodeDecodeUnsignedShortValue((short) -12345); testEncodeDecodeUnsignedShortValue(Byte.MIN_VALUE); testEncodeDecodeUnsignedShortValue(Short.MIN_VALUE); } private void testEncodeDecodeUnsignedShortValue(final short value) { final int length = VarintUtils.unsignedShortByteLength(value); // should never use more than 3 bytes Assert.assertTrue(length <= 3); final ByteBuffer buffer = ByteBuffer.allocate(length); VarintUtils.writeUnsignedShort(value, buffer); buffer.position(0); final int decoded = VarintUtils.readUnsignedShort(buffer); Assert.assertEquals(value, decoded); } @Test public void testVarintEncodeDecodeSignedInt() { testEncodeDecodeSignedIntValue(0); testEncodeDecodeSignedIntValue(-123456); testEncodeDecodeSignedIntValue(123456); testEncodeDecodeSignedIntValue(Byte.MIN_VALUE); testEncodeDecodeSignedIntValue(Byte.MAX_VALUE); testEncodeDecodeSignedIntValue(Integer.MIN_VALUE); testEncodeDecodeSignedIntValue(Integer.MAX_VALUE); } private void testEncodeDecodeSignedIntValue(final int value) { final int length = VarintUtils.signedIntByteLength(value); final ByteBuffer buffer = ByteBuffer.allocate(length); VarintUtils.writeSignedInt(value, buffer); buffer.position(0); final int decoded = VarintUtils.readSignedInt(buffer); Assert.assertEquals(value, decoded); } @Test public void testVarintSignedUnsignedLong() { testSignedUnsignedLongValue(0L); testSignedUnsignedLongValue(-123456L); testSignedUnsignedLongValue(123456L); testSignedUnsignedLongValue(Byte.MIN_VALUE); testSignedUnsignedLongValue(Byte.MAX_VALUE); testSignedUnsignedLongValue(Integer.MIN_VALUE); testSignedUnsignedLongValue(Integer.MAX_VALUE); testSignedUnsignedLongValue(Long.MIN_VALUE); testSignedUnsignedLongValue(Long.MAX_VALUE); } private void testSignedUnsignedLongValue(final long value) { final long unsigned = VarintUtils.signedToUnsignedLong(value); final long signed = VarintUtils.unsignedToSignedLong(unsigned); Assert.assertEquals(value, signed); } @Test public void testVarLongEncodeDecodeUnsignedLong() { testEncodeDecodeUnsignedLongValue(0L); testEncodeDecodeUnsignedLongValue(123456L); testEncodeDecodeUnsignedLongValue(Byte.MAX_VALUE); testEncodeDecodeUnsignedLongValue(Integer.MAX_VALUE); testEncodeDecodeUnsignedLongValue(Long.MAX_VALUE); // negative values are inefficient in this encoding, but should still // work. testEncodeDecodeUnsignedLongValue(-123456L); testEncodeDecodeUnsignedLongValue(Byte.MIN_VALUE); testEncodeDecodeUnsignedLongValue(Integer.MIN_VALUE); testEncodeDecodeUnsignedLongValue(Long.MIN_VALUE); } private void testEncodeDecodeUnsignedLongValue(final long value) { final int length = VarintUtils.unsignedLongByteLength(value); final ByteBuffer buffer = ByteBuffer.allocate(length); VarintUtils.writeUnsignedLong(value, buffer); buffer.position(0); final long decoded = VarintUtils.readUnsignedLong(buffer); Assert.assertEquals(value, decoded); } @Test public void testVarLongEncodeDecodeSignedLong() { testEncodeDecodeSignedLongValue(0L); testEncodeDecodeSignedLongValue(-123456L); testEncodeDecodeSignedLongValue(123456L); testEncodeDecodeSignedLongValue(Byte.MIN_VALUE); testEncodeDecodeSignedLongValue(Byte.MAX_VALUE); testEncodeDecodeSignedLongValue(Integer.MIN_VALUE); testEncodeDecodeSignedLongValue(Integer.MAX_VALUE); testEncodeDecodeSignedLongValue(Long.MIN_VALUE); testEncodeDecodeSignedLongValue(Long.MAX_VALUE); } private void testEncodeDecodeSignedLongValue(final long value) { final int length = VarintUtils.signedLongByteLength(value); final ByteBuffer buffer = ByteBuffer.allocate(length); VarintUtils.writeSignedLong(value, buffer); buffer.position(0); final long decoded = VarintUtils.readSignedLong(buffer); Assert.assertEquals(value, decoded); } @Test public void testEncodeDecodeTime() { final Calendar cal = Calendar.getInstance(); // Current time testEncodeDecodeTimeValue(new Date()); // Epoch time testEncodeDecodeTimeValue(new Date(0)); // GeoWave epoch time testEncodeDecodeTimeValue(new Date(VarintUtils.TIME_EPOCH)); // Distant past cal.set(15, Calendar.SEPTEMBER, 13, 5, 18, 36); testEncodeDecodeTimeValue(cal.getTime()); // Distant future cal.set(3802, Calendar.DECEMBER, 31, 23, 59, 59); testEncodeDecodeTimeValue(cal.getTime()); } private void testEncodeDecodeTimeValue(final Date value) { final int length = VarintUtils.timeByteLength(value.getTime()); final ByteBuffer buffer = ByteBuffer.allocate(length); VarintUtils.writeTime(value.getTime(), buffer); buffer.position(0); final Date decoded = new Date(VarintUtils.readTime(buffer)); Assert.assertEquals(value, decoded); } @Test public void testEncodeDecodeBigDecimal() { testEncodeDecodeBigDecimalValue(new BigDecimal(123)); testEncodeDecodeBigDecimalValue(new BigDecimal(-123)); testEncodeDecodeBigDecimalValue(new BigDecimal(256)); testEncodeDecodeBigDecimalValue(new BigDecimal(2_061_000_009)); testEncodeDecodeBigDecimalValue(new BigDecimal(-1_000_000_000)); testEncodeDecodeBigDecimalValue(new BigDecimal("3133731337313373133731337")); testEncodeDecodeBigDecimalValue(new BigDecimal("-3133731337313373133731337")); testEncodeDecodeBigDecimalValue( new BigDecimal("-3133731337313373133731337.3133731337313373133731337")); } private void testEncodeDecodeBigDecimalValue(final BigDecimal value) { byte[] encoded = VarintUtils.writeBigDecimal(value); BigDecimal roundtrip = VarintUtils.readBigDecimal(ByteBuffer.wrap(encoded)); Assert.assertNotNull(roundtrip); Assert.assertEquals(0, value.compareTo(roundtrip)); // append garbage after the BigDecimal to ensure that it is not read. byte[] garbage = new byte[] {0xc, 0xa, 0xf, 0xe, 32, 0xb, 0xa, 0xb, 0xe}; ByteBuffer appended = ByteBuffer.allocate(encoded.length + garbage.length); appended.put(encoded); appended.put(garbage); roundtrip = VarintUtils.readBigDecimal((ByteBuffer) appended.flip()); Assert.assertNotNull(roundtrip); Assert.assertEquals( value.toString() + " == " + roundtrip.toString(), 0, value.compareTo(roundtrip)); } } ================================================ FILE: core/index/src/test/java/org/locationtech/geowave/core/index/dimension/BasicDimensionDefinitionTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.dimension; import org.junit.Assert; import org.junit.Test; import org.locationtech.geowave.core.index.dimension.bin.BinRange; import org.locationtech.geowave.core.index.numeric.NumericRange; public class BasicDimensionDefinitionTest { private final double MINIMUM = 20; private final double MAXIMUM = 100; private final double DELTA = 1e-15; @Test public void testNormalizeMidValue() { final double midValue = 60; final double normalizedValue = 0.5; Assert.assertEquals( normalizedValue, getNormalizedValueUsingBounds(MINIMUM, MAXIMUM, midValue), DELTA); } @Test public void testNormalizeUpperValue() { final double lowerValue = 20; final double normalizedValue = 0.0; Assert.assertEquals( normalizedValue, getNormalizedValueUsingBounds(MINIMUM, MAXIMUM, lowerValue), DELTA); } @Test public void testNormalizeLowerValue() { final double upperValue = 100; final double normalizedValue = 1.0; Assert.assertEquals( normalizedValue, getNormalizedValueUsingBounds(MINIMUM, MAXIMUM, upperValue), DELTA); } @Test public void testNormalizeClampOutOfBoundsValue() { final double value = 1; final double normalizedValue = 0.0; Assert.assertEquals( normalizedValue, getNormalizedValueUsingBounds(MINIMUM, MAXIMUM, value), DELTA); } @Test public void testNormalizeRangesBinRangeCount() { final double minRange = 40; final double maxRange = 50; final int binCount = 1; final BinRange[] binRange = getNormalizedRangesUsingBounds(minRange, maxRange); Assert.assertEquals(binCount, binRange.length); } @Test public void testNormalizeClampOutOfBoundsRanges() { final double minRange = 1; final double maxRange = 150; final BinRange[] binRange = getNormalizedRangesUsingBounds(minRange, maxRange); Assert.assertEquals(MINIMUM, binRange[0].getNormalizedMin(), DELTA); Assert.assertEquals(MAXIMUM, binRange[0].getNormalizedMax(), DELTA); } private double getNormalizedValueUsingBounds( final double min, final double max, final double value) { return new BasicDimensionDefinition(min, max).normalize(value); } private BinRange[] getNormalizedRangesUsingBounds(final double minRange, final double maxRange) { return new BasicDimensionDefinition(MINIMUM, MAXIMUM).getNormalizedRanges( new NumericRange(minRange, maxRange)); } } ================================================ FILE: core/index/src/test/java/org/locationtech/geowave/core/index/lexicoder/AbstractLexicoderTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.lexicoder; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import org.junit.Assert; import org.junit.Test; public abstract class AbstractLexicoderTest> { private final NumberLexicoder lexicoder; private final T expectedMin; private final T expectedMax; private final T[] unsortedVals; private final Comparator comparator; public AbstractLexicoderTest( final NumberLexicoder lexicoder, final T expectedMin, final T expectedMax, final T[] unsortedVals, final Comparator comparator) { super(); this.lexicoder = lexicoder; this.expectedMin = expectedMin; this.expectedMax = expectedMax; this.unsortedVals = unsortedVals; this.comparator = comparator; } @Test public void testRanges() { Assert.assertTrue(lexicoder.getMinimumValue().equals(expectedMin)); Assert.assertTrue(lexicoder.getMaximumValue().equals(expectedMax)); } @Test public void testSortOrder() { final List list = Arrays.asList(unsortedVals); final Map sortedByteArrayToRawTypeMappings = new TreeMap<>(comparator); for (final T d : list) { sortedByteArrayToRawTypeMappings.put(lexicoder.toByteArray(d), d); } Collections.sort(list); int idx = 0; final Set sortedByteArrays = sortedByteArrayToRawTypeMappings.keySet(); for (final byte[] byteArray : sortedByteArrays) { final T value = sortedByteArrayToRawTypeMappings.get(byteArray); Assert.assertTrue(value.equals(list.get(idx++))); } } } ================================================ FILE: core/index/src/test/java/org/locationtech/geowave/core/index/lexicoder/ByteLexicoderTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.lexicoder; import com.google.common.primitives.UnsignedBytes; public class ByteLexicoderTest extends AbstractLexicoderTest { public ByteLexicoderTest() { super( Lexicoders.BYTE, Byte.MIN_VALUE, Byte.MAX_VALUE, new Byte[] { (byte) -10, Byte.MIN_VALUE, (byte) 11, (byte) -122, (byte) 122, (byte) -100, (byte) 100, Byte.MAX_VALUE, (byte) 0}, UnsignedBytes.lexicographicalComparator()); } } ================================================ FILE: core/index/src/test/java/org/locationtech/geowave/core/index/lexicoder/DoubleLexicoderTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.lexicoder; import com.google.common.primitives.UnsignedBytes; public class DoubleLexicoderTest extends AbstractLexicoderTest { public DoubleLexicoderTest() { super( Lexicoders.DOUBLE, -Double.MAX_VALUE, Double.MAX_VALUE, new Double[] { -10d, -Double.MAX_VALUE, 11d, -14.2, 14.2, -100.002, 100.002, -11d, Double.MAX_VALUE, 0d}, UnsignedBytes.lexicographicalComparator()); } } ================================================ FILE: core/index/src/test/java/org/locationtech/geowave/core/index/lexicoder/FloatLexicoderTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.lexicoder; import com.google.common.primitives.UnsignedBytes; public class FloatLexicoderTest extends AbstractLexicoderTest { public FloatLexicoderTest() { super( Lexicoders.FLOAT, -Float.MAX_VALUE, Float.MAX_VALUE, new Float[] { -10f, -Float.MAX_VALUE, 11f, -14.2f, 14.2f, -100.002f, 100.002f, -11f, Float.MAX_VALUE, 0f}, UnsignedBytes.lexicographicalComparator()); } } ================================================ FILE: core/index/src/test/java/org/locationtech/geowave/core/index/lexicoder/IntegerLexicoderTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.lexicoder; import com.google.common.primitives.UnsignedBytes; public class IntegerLexicoderTest extends AbstractLexicoderTest { public IntegerLexicoderTest() { super( Lexicoders.INT, Integer.MIN_VALUE, Integer.MAX_VALUE, new Integer[] {-10, Integer.MIN_VALUE, 2678, Integer.MAX_VALUE, 0}, UnsignedBytes.lexicographicalComparator()); } } ================================================ FILE: core/index/src/test/java/org/locationtech/geowave/core/index/lexicoder/LongLexicoderTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.lexicoder; import com.google.common.primitives.UnsignedBytes; public class LongLexicoderTest extends AbstractLexicoderTest { public LongLexicoderTest() { super( Lexicoders.LONG, Long.MIN_VALUE, Long.MAX_VALUE, new Long[] {-10l, Long.MIN_VALUE, 2678l, Long.MAX_VALUE, 0l}, UnsignedBytes.lexicographicalComparator()); } } ================================================ FILE: core/index/src/test/java/org/locationtech/geowave/core/index/lexicoder/ShortLexicoderTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.lexicoder; import com.google.common.primitives.UnsignedBytes; public class ShortLexicoderTest extends AbstractLexicoderTest { public ShortLexicoderTest() { super( Lexicoders.SHORT, Short.MIN_VALUE, Short.MAX_VALUE, new Short[] {(short) -10, Short.MIN_VALUE, (short) 2678, Short.MAX_VALUE, (short) 0}, UnsignedBytes.lexicographicalComparator()); } } ================================================ FILE: core/index/src/test/java/org/locationtech/geowave/core/index/sfc/data/BasicNumericDatasetTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.sfc.data; import org.junit.Assert; import org.junit.Test; import org.locationtech.geowave.core.index.numeric.BasicNumericDataset; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.index.numeric.NumericValue; public class BasicNumericDatasetTest { private final double DELTA = 1e-15; private final BasicNumericDataset basicNumericDatasetRanges = new BasicNumericDataset( new NumericData[] { new NumericRange(10, 50), new NumericRange(25, 95), new NumericRange(-50, 50)}); private final BasicNumericDataset basicNumericDatasetValues = new BasicNumericDataset( new NumericData[] {new NumericValue(25), new NumericValue(60), new NumericValue(0)}); @Test public void testNumericRangesMinValues() { final int expectedCount = 3; final Double[] expectedMinValues = new Double[] {10d, 25d, -50d}; final Double[] mins = basicNumericDatasetRanges.getMinValuesPerDimension(); Assert.assertEquals(expectedCount, basicNumericDatasetRanges.getDimensionCount()); for (int i = 0; i < basicNumericDatasetRanges.getDimensionCount(); i++) { Assert.assertEquals(expectedMinValues[i], mins[i], DELTA); } } @Test public void testNumericRangesMaxValues() { final int expectedCount = 3; final Double[] expectedMaxValues = new Double[] {50d, 95d, 50d}; final Double[] max = basicNumericDatasetRanges.getMaxValuesPerDimension(); Assert.assertEquals(expectedCount, basicNumericDatasetRanges.getDimensionCount()); for (int i = 0; i < basicNumericDatasetRanges.getDimensionCount(); i++) { Assert.assertEquals(expectedMaxValues[i], max[i], DELTA); } } @Test public void testNumericRangesCentroidValues() { final int expectedCount = 3; final Double[] expectedCentroidValues = new Double[] {30d, 60d, 0d}; final Double[] centroid = basicNumericDatasetRanges.getCentroidPerDimension(); Assert.assertEquals(expectedCount, basicNumericDatasetRanges.getDimensionCount()); for (int i = 0; i < basicNumericDatasetRanges.getDimensionCount(); i++) { Assert.assertEquals(expectedCentroidValues[i], centroid[i], DELTA); } } @Test public void testNumericValuesAllValues() { final int expectedCount = 3; final double[] expectedValues = new double[] {25, 60, 0}; final Double[] mins = basicNumericDatasetValues.getMinValuesPerDimension(); final Double[] max = basicNumericDatasetValues.getMaxValuesPerDimension(); final Double[] centroid = basicNumericDatasetValues.getCentroidPerDimension(); Assert.assertEquals(expectedCount, basicNumericDatasetValues.getDimensionCount()); for (int i = 0; i < basicNumericDatasetValues.getDimensionCount(); i++) { Assert.assertEquals(expectedValues[i], mins[i], DELTA); Assert.assertEquals(expectedValues[i], max[i], DELTA); Assert.assertEquals(expectedValues[i], centroid[i], DELTA); } } } ================================================ FILE: core/index/src/test/java/org/locationtech/geowave/core/index/sfc/data/NumericRangeTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.sfc.data; import org.junit.Assert; import org.junit.Test; import org.locationtech.geowave.core.index.numeric.NumericRange; public class NumericRangeTest { private final double MINIMUM = 20; private final double MAXIMUM = 100; private final double CENTROID = 60; private final double DELTA = 1e-15; @Test public void testNumericRangeValues() { final NumericRange numericRange = new NumericRange(MINIMUM, MAXIMUM); Assert.assertEquals(MINIMUM, numericRange.getMin(), DELTA); Assert.assertEquals(MAXIMUM, numericRange.getMax(), DELTA); Assert.assertEquals(CENTROID, numericRange.getCentroid(), DELTA); Assert.assertTrue(numericRange.isRange()); } } ================================================ FILE: core/index/src/test/java/org/locationtech/geowave/core/index/sfc/data/NumericValueTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.sfc.data; import org.junit.Assert; import org.junit.Test; import org.locationtech.geowave.core.index.numeric.NumericValue; public class NumericValueTest { private final double VALUE = 50; private final double DELTA = 1e-15; @Test public void testNumericValue() { final NumericValue numericValue = new NumericValue(VALUE); Assert.assertEquals(VALUE, numericValue.getMin(), DELTA); Assert.assertEquals(VALUE, numericValue.getMax(), DELTA); Assert.assertEquals(VALUE, numericValue.getCentroid(), DELTA); Assert.assertFalse(numericValue.isRange()); } } ================================================ FILE: core/index/src/test/java/org/locationtech/geowave/core/index/sfc/xz/XZOrderSFCTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.sfc.xz; import org.junit.Assert; import org.junit.Test; import org.locationtech.geowave.core.index.dimension.BasicDimensionDefinition; import org.locationtech.geowave.core.index.numeric.BasicNumericDataset; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.index.sfc.SFCDimensionDefinition; public class XZOrderSFCTest { @Test public void testIndex() { final Double[] values = {42d, 43d, 57d, 59d}; // TODO Meaningful examination of results? Assert.assertNotNull(createSFC().getId(values)); } @Test public void testRangeDecomposition() { final NumericRange longBounds = new NumericRange(19.0, 21.0); final NumericRange latBounds = new NumericRange(33.0, 34.0); final NumericData[] dataPerDimension = {longBounds, latBounds}; final MultiDimensionalNumericData query = new BasicNumericDataset(dataPerDimension); // TODO Meaningful examination of results? Assert.assertNotNull(createSFC().decomposeRangeFully(query)); } private XZOrderSFC createSFC() { final SFCDimensionDefinition[] dimensions = { new SFCDimensionDefinition(new BasicDimensionDefinition(-180.0, 180.0), 32), new SFCDimensionDefinition(new BasicDimensionDefinition(-90.0, 90.0), 32)}; return new XZOrderSFC(dimensions); } } ================================================ FILE: core/index/src/test/java/org/locationtech/geowave/core/index/sfc/zorder/ZOrderSFCTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.sfc.zorder; public class ZOrderSFCTest { // TODO: add unit tests for ZOrder implementation } ================================================ FILE: core/index/src/test/java/org/locationtech/geowave/core/index/simple/HashKeyIndexStrategyTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.simple; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import org.junit.Assert; import org.junit.Test; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.ByteArrayRange; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.CompoundIndexStrategy; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.MultiDimensionalCoordinates; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.index.SinglePartitionInsertionIds; import org.locationtech.geowave.core.index.dimension.BasicDimensionDefinition; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.numeric.BasicNumericDataset; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.index.sfc.SFCFactory.SFCType; import org.locationtech.geowave.core.index.sfc.tiered.TieredSFCIndexFactory; public class HashKeyIndexStrategyTest { private static final NumericDimensionDefinition[] SPATIAL_DIMENSIONS = new NumericDimensionDefinition[] { new BasicDimensionDefinition(-180, 180), new BasicDimensionDefinition(-90, 90)}; private static final NumericIndexStrategy sfcIndexStrategy = TieredSFCIndexFactory.createSingleTierStrategy( SPATIAL_DIMENSIONS, new int[] {16, 16}, SFCType.HILBERT); private static final HashKeyIndexStrategy hashIdexStrategy = new HashKeyIndexStrategy(3); private static final CompoundIndexStrategy compoundIndexStrategy = new CompoundIndexStrategy(hashIdexStrategy, sfcIndexStrategy); private static final NumericRange dimension1Range = new NumericRange(50.0, 50.025); private static final NumericRange dimension2Range = new NumericRange(-20.5, -20.455); private static final MultiDimensionalNumericData sfcIndexedRange = new BasicNumericDataset(new NumericData[] {dimension1Range, dimension2Range}); @Test public void testDistribution() { final Map counts = new HashMap<>(); int total = 0; for (double x = 90; x < 180; x += 0.05) { for (double y = 50; y < 90; y += 0.5) { final NumericRange dimension1Range = new NumericRange(x, x + 0.002); final NumericRange dimension2Range = new NumericRange(y - 0.002, y); final MultiDimensionalNumericData sfcIndexedRange = new BasicNumericDataset(new NumericData[] {dimension1Range, dimension2Range}); for (final byte[] id : hashIdexStrategy.getInsertionPartitionKeys(sfcIndexedRange)) { final Long count = counts.get(new ByteArray(id)); final long nextcount = count == null ? 1 : count + 1; counts.put(new ByteArray(id), nextcount); total++; } } } final double mean = total / counts.size(); double diff = 0.0; for (final Long count : counts.values()) { diff += Math.pow(mean - count, 2); } final double sd = Math.sqrt(diff / counts.size()); assertTrue(sd < (mean * 0.18)); } @Test public void testBinaryEncoding() { final byte[] bytes = PersistenceUtils.toBinary(compoundIndexStrategy); final CompoundIndexStrategy deserializedStrategy = (CompoundIndexStrategy) PersistenceUtils.fromBinary(bytes); final byte[] bytes2 = PersistenceUtils.toBinary(deserializedStrategy); Assert.assertArrayEquals(bytes, bytes2); } @Test public void testGetNumberOfDimensions() { final int numDimensions = compoundIndexStrategy.getNumberOfDimensions(); Assert.assertEquals(2, numDimensions); } @Test public void testGetCoordinatesPerDimension() { final NumericRange dimension1Range = new NumericRange(20.01, 20.02); final NumericRange dimension2Range = new NumericRange(30.51, 30.59); final MultiDimensionalNumericData sfcIndexedRange = new BasicNumericDataset(new NumericData[] {dimension1Range, dimension2Range}); final InsertionIds id = compoundIndexStrategy.getInsertionIds(sfcIndexedRange); for (final SinglePartitionInsertionIds partitionKey : id.getPartitionKeys()) { for (final byte[] sortKey : partitionKey.getSortKeys()) { final MultiDimensionalCoordinates coords = compoundIndexStrategy.getCoordinatesPerDimension( partitionKey.getPartitionKey(), sortKey); assertTrue(coords.getCoordinate(0).getCoordinate() > 0); assertTrue(coords.getCoordinate(1).getCoordinate() > 0); } } final Iterator it = id.getPartitionKeys().iterator(); assertTrue(it.hasNext()); final SinglePartitionInsertionIds partitionId = it.next(); assertTrue(!it.hasNext()); for (final byte[] sortKey : partitionId.getSortKeys()) { final MultiDimensionalNumericData nd = compoundIndexStrategy.getRangeForId(partitionId.getPartitionKey(), sortKey); assertEquals(20.02, nd.getMaxValuesPerDimension()[0], 0.01); assertEquals(30.59, nd.getMaxValuesPerDimension()[1], 0.1); assertEquals(20.01, nd.getMinValuesPerDimension()[0], 0.01); assertEquals(30.51, nd.getMinValuesPerDimension()[1], 0.1); } } @Test public void testGetQueryRangesWithMaximumNumberOfRanges() { final List sfcIndexRanges = sfcIndexStrategy.getQueryRanges(sfcIndexedRange).getCompositeQueryRanges(); final List ranges = new ArrayList<>(); for (int i = 0; i < 3; i++) { for (final ByteArrayRange r2 : sfcIndexRanges) { final byte[] start = ByteArrayUtils.combineArrays(new byte[] {(byte) i}, r2.getStart()); final byte[] end = ByteArrayUtils.combineArrays(new byte[] {(byte) i}, r2.getEnd()); ranges.add(new ByteArrayRange(start, end)); } } final Set testRanges = new HashSet<>(ranges); final Set compoundIndexRanges = new HashSet<>( compoundIndexStrategy.getQueryRanges(sfcIndexedRange).getCompositeQueryRanges()); Assert.assertTrue(testRanges.containsAll(compoundIndexRanges)); Assert.assertTrue(compoundIndexRanges.containsAll(testRanges)); } } ================================================ FILE: core/index/src/test/java/org/locationtech/geowave/core/index/simple/RoundRobinKeyIndexStrategyTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.simple; import static org.junit.Assert.assertEquals; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import org.junit.Assert; import org.junit.Test; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.ByteArrayRange; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.CompoundIndexStrategy; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.MultiDimensionalCoordinates; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.index.SinglePartitionInsertionIds; import org.locationtech.geowave.core.index.dimension.BasicDimensionDefinition; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.numeric.BasicNumericDataset; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.index.sfc.SFCFactory.SFCType; import org.locationtech.geowave.core.index.sfc.tiered.TieredSFCIndexFactory; public class RoundRobinKeyIndexStrategyTest { private static final NumericDimensionDefinition[] SPATIAL_DIMENSIONS = new NumericDimensionDefinition[] { new BasicDimensionDefinition(-180, 180), new BasicDimensionDefinition(-90, 90)}; private static final NumericIndexStrategy sfcIndexStrategy = TieredSFCIndexFactory.createSingleTierStrategy( SPATIAL_DIMENSIONS, new int[] {16, 16}, SFCType.HILBERT); private static final CompoundIndexStrategy compoundIndexStrategy = new CompoundIndexStrategy(new RoundRobinKeyIndexStrategy(), sfcIndexStrategy); private static final NumericRange dimension1Range = new NumericRange(50.0, 50.025); private static final NumericRange dimension2Range = new NumericRange(-20.5, -20.455); private static final MultiDimensionalNumericData sfcIndexedRange = new BasicNumericDataset(new NumericData[] {dimension1Range, dimension2Range}); @Test public void testBinaryEncoding() { final byte[] bytes = PersistenceUtils.toBinary(compoundIndexStrategy); final CompoundIndexStrategy deserializedStrategy = (CompoundIndexStrategy) PersistenceUtils.fromBinary(bytes); final byte[] bytes2 = PersistenceUtils.toBinary(deserializedStrategy); Assert.assertArrayEquals(bytes, bytes2); } @Test public void testGetNumberOfDimensions() { final int numDimensions = compoundIndexStrategy.getNumberOfDimensions(); Assert.assertEquals(2, numDimensions); } @Test public void testGetQueryRangesWithMaximumNumberOfRanges() { final List sfcIndexRanges = sfcIndexStrategy.getQueryRanges(sfcIndexedRange).getCompositeQueryRanges(); final List ranges = new ArrayList<>(); for (int i = 0; i < 3; i++) { for (final ByteArrayRange r2 : sfcIndexRanges) { final byte[] start = ByteArrayUtils.combineArrays(new byte[] {(byte) i}, r2.getStart()); final byte[] end = ByteArrayUtils.combineArrays(new byte[] {(byte) i}, r2.getEnd()); ranges.add(new ByteArrayRange(start, end)); } } final Set testRanges = new HashSet<>(ranges); final Set compoundIndexRanges = new HashSet<>( compoundIndexStrategy.getQueryRanges(sfcIndexedRange).getCompositeQueryRanges()); Assert.assertTrue(testRanges.containsAll(compoundIndexRanges)); Assert.assertTrue(compoundIndexRanges.containsAll(testRanges)); } @Test public void testUniformityAndLargeKeySet() { final RoundRobinKeyIndexStrategy strategy = new RoundRobinKeyIndexStrategy(512); final Map countMap = new HashMap<>(); for (int i = 0; i < 2048; i++) { final byte[][] ids = strategy.getInsertionPartitionKeys(sfcIndexedRange); assertEquals(1, ids.length); final ByteArray key = new ByteArray(ids[0]); if (countMap.containsKey(key)) { countMap.put(key, countMap.get(key) + 1); } else { countMap.put(key, 1); } } for (final Integer i : countMap.values()) { assertEquals(4, i.intValue()); } } @Test public void testGetInsertionIds() { final List ids = new ArrayList<>(); final InsertionIds ids2 = sfcIndexStrategy.getInsertionIds(sfcIndexedRange, 1); final List compositeIds = ids2.getCompositeInsertionIds(); for (int i = 0; i < 3; i++) { for (final byte[] id2 : compositeIds) { ids.add(new ByteArray(ByteArrayUtils.combineArrays(new byte[] {(byte) i}, id2))); } } final Set testIds = new HashSet<>(ids); final Set compoundIndexIds = compoundIndexStrategy.getInsertionIds( sfcIndexedRange, 8).getCompositeInsertionIds().stream().map(i -> new ByteArray(i)).collect( Collectors.toSet()); Assert.assertTrue(testIds.containsAll(compoundIndexIds)); final SinglePartitionInsertionIds id2 = ids2.getPartitionKeys().iterator().next(); final MultiDimensionalCoordinates sfcIndexCoordinatesPerDim = sfcIndexStrategy.getCoordinatesPerDimension( id2.getPartitionKey(), id2.getSortKeys().get(0)); // the first 2 bytes are the partition keys final MultiDimensionalCoordinates coordinatesPerDim = compoundIndexStrategy.getCoordinatesPerDimension( Arrays.copyOfRange(ids.get(0).getBytes(), 0, 2), Arrays.copyOfRange(ids.get(0).getBytes(), 2, ids.get(0).getBytes().length)); Assert.assertTrue(sfcIndexCoordinatesPerDim.equals(coordinatesPerDim)); } } ================================================ FILE: core/index/src/test/java/org/locationtech/geowave/core/index/simple/SimpleNumericIndexStrategyTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.simple; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; import org.locationtech.geowave.core.index.ByteArrayRange; import org.locationtech.geowave.core.index.QueryRanges; import org.locationtech.geowave.core.index.numeric.BasicNumericDataset; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.index.numeric.NumericValue; import com.google.common.primitives.UnsignedBytes; @RunWith(Parameterized.class) public class SimpleNumericIndexStrategyTest { private final SimpleNumericIndexStrategy strategy; public SimpleNumericIndexStrategyTest(final SimpleNumericIndexStrategy strategy) { this.strategy = strategy; } @Parameters public static Collection instancesToTest() { return Arrays.asList( new Object[] {new SimpleShortIndexStrategy()}, new Object[] {new SimpleIntegerIndexStrategy()}, new Object[] {new SimpleLongIndexStrategy()}); } private static long castToLong(final Number n) { if (n instanceof Short) { return (n.shortValue()); } else if (n instanceof Integer) { return (n.intValue()); } else if (n instanceof Long) { return n.longValue(); } else { throw new UnsupportedOperationException("only supports casting Short, Integer, and Long"); } } private static MultiDimensionalNumericData getIndexedRange(final long value) { return getIndexedRange(value, value); } private static MultiDimensionalNumericData getIndexedRange(final long min, final long max) { NumericData[] dataPerDimension; if (min == max) { dataPerDimension = new NumericData[] {new NumericValue(min)}; } else { dataPerDimension = new NumericData[] {new NumericRange(min, max)}; } return new BasicNumericDataset(dataPerDimension); } private byte[] getByteArray(final long value) { final MultiDimensionalNumericData insertionData = getIndexedRange(value); final List insertionIds = strategy.getInsertionIds(insertionData).getCompositeInsertionIds(); final byte[] insertionId = insertionIds.iterator().next(); return insertionId; } @Test public void testGetQueryRangesPoint() { final MultiDimensionalNumericData indexedRange = getIndexedRange(10l); final QueryRanges ranges = strategy.getQueryRanges(indexedRange); Assert.assertEquals(ranges.getCompositeQueryRanges().size(), 1); final ByteArrayRange range = ranges.getCompositeQueryRanges().get(0); final byte[] start = range.getStart(); final byte[] end = range.getEnd(); Assert.assertTrue(Arrays.equals(start, end)); Assert.assertEquals(10L, castToLong(strategy.getLexicoder().fromByteArray(start))); } @Test public void testGetQueryRangesRange() { final long startValue = 10; final long endValue = 15; final MultiDimensionalNumericData indexedRange = getIndexedRange(startValue, endValue); final List ranges = strategy.getQueryRanges(indexedRange).getCompositeQueryRanges(); Assert.assertEquals(ranges.size(), 1); final ByteArrayRange range = ranges.get(0); final byte[] start = range.getStart(); final byte[] end = range.getEnd(); Assert.assertEquals(castToLong(strategy.getLexicoder().fromByteArray(start)), startValue); Assert.assertEquals(castToLong(strategy.getLexicoder().fromByteArray(end)), endValue); } /** * Check that lexicographical sorting of the byte arrays yields the same sort order as sorting the * values */ @Test public void testRangeSortOrder() { final List values = Arrays.asList(10l, 0l, 15l, -275l, 982l, 430l, -1l, 1l, 82l); final List byteArrays = new ArrayList<>(values.size()); for (final long value : values) { final byte[] bytes = getByteArray(value); byteArrays.add(bytes); } Collections.sort(values); Collections.sort(byteArrays, UnsignedBytes.lexicographicalComparator()); final List convertedValues = new ArrayList<>(values.size()); for (final byte[] bytes : byteArrays) { final long value = castToLong(strategy.getLexicoder().fromByteArray(bytes)); convertedValues.add(value); } Assert.assertTrue(values.equals(convertedValues)); } @Test public void testGetInsertionIdsPoint() { final long pointValue = 5926; final MultiDimensionalNumericData indexedData = getIndexedRange(pointValue); final List insertionIds = strategy.getInsertionIds(indexedData).getCompositeInsertionIds(); Assert.assertEquals(insertionIds.size(), 1); final byte[] insertionId = insertionIds.get(0); Assert.assertEquals(castToLong(strategy.getLexicoder().fromByteArray(insertionId)), pointValue); } @Test public void testGetInsertionIdsRange() { final long startValue = 9876; final long endValue = startValue + 15; final MultiDimensionalNumericData indexedData = getIndexedRange(startValue, endValue); final List insertionIds = strategy.getInsertionIds(indexedData).getCompositeInsertionIds(); Assert.assertEquals(insertionIds.size(), (int) ((endValue - startValue) + 1)); int i = 0; for (final byte[] insertionId : insertionIds) { Assert.assertEquals( castToLong(strategy.getLexicoder().fromByteArray(insertionId)), startValue + i++); } } } ================================================ FILE: core/index/src/test/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi ================================================ org.locationtech.geowave.core.index.TestIndexPersistableRegistry ================================================ FILE: core/ingest/.gitignore ================================================ src/main/java/org/locationtech/geowave/core/ingest/avro/AvroWholeFile.java /bin/ ================================================ FILE: core/ingest/pom.xml ================================================ 4.0.0 geowave-core-parent org.locationtech.geowave ../ 2.0.2-SNAPSHOT geowave-core-ingest GeoWave Ingest Framework org.apache.avro avro org.apache.avro avro-mapred hadoop2 org.apache.hadoop hadoop-client compile jdk.tools jdk.tools junit junit guava com.google.guava org.locationtech.geowave geowave-core-cli ${project.version} org.locationtech.geowave geowave-core-store ${project.version} org.locationtech.geowave geowave-core-index ${project.version} org.locationtech.geowave geowave-core-mapreduce ${project.version} org.apache.kafka ${kafka.artifact} ${kafka.version} org.apache.zookeeper zookeeper com.sun.jdmk jmxtools com.sun.jmx jmxri net.jpountz.lz4 lz4 com.github.jsr203hadoop jsr203hadoop 1.0.1 org.apache.spark spark-core_2.12 org.apache.spark spark-sql_2.12 ${spark.version} io.findify s3mock_2.12 0.2.6 test org.apache.avro avro-maven-plugin ================================================ FILE: core/ingest/src/main/avro/wholefile.avsc ================================================ { "type" : "record", "name" : "AvroWholeFile", "namespace" : "org.locationtech.geowave.core.ingest.avro", "fields" : [{ "name" : "originalFile", "type" : "bytes", "doc" : "Original file data" }, { "name" : "originalFilePath", "type" : ["string", "null"], "doc" : "Original file path" } ], "doc:" : "Stores the original files from a local file system in an avro" } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/HdfsIngestHandler.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Properties; import org.locationtech.geowave.core.ingest.URLIngestUtils.URLTYPE; import org.locationtech.geowave.core.store.ingest.IngestUrlHandlerSpi; import org.locationtech.geowave.mapreduce.operations.ConfigHDFSCommand; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class HdfsIngestHandler implements IngestUrlHandlerSpi { private static final Logger LOGGER = LoggerFactory.getLogger(HdfsIngestHandler.class); public HdfsIngestHandler() {} @Override public Path handlePath(final String inputPath, final Properties configProperties) throws IOException { // If input path is HDFS if (inputPath.startsWith("hdfs://")) { try { URLIngestUtils.setURLStreamHandlerFactory(URLTYPE.HDFS); } catch (final Error | NoSuchFieldException | SecurityException | IllegalArgumentException | IllegalAccessException e) { LOGGER.error("Error in setStreamHandlerFactory for HDFS", e); return null; } final String hdfsFSUrl = ConfigHDFSCommand.getHdfsUrl(configProperties); final String hdfsInputPath = inputPath.replaceFirst("hdfs://", "/"); try { final URI uri = new URI(hdfsFSUrl + hdfsInputPath); // HP Fortify "Path Traversal" false positive // What Fortify considers "user input" comes only // from users with OS-level access anyway final Path path = Paths.get(uri); if (!Files.exists(path)) { LOGGER.error("Input path " + inputPath + " does not exist"); return null; } return path; } catch (final URISyntaxException e) { LOGGER.error("Unable to ingest data, Inavlid HDFS Path", e); return null; } } return null; } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/S3IngestHandler.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest; import java.io.IOException; import java.nio.file.Path; import java.util.Properties; import org.locationtech.geowave.core.ingest.URLIngestUtils.URLTYPE; import org.locationtech.geowave.core.ingest.operations.ConfigAWSCommand; import org.locationtech.geowave.core.store.ingest.IngestUrlHandlerSpi; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class S3IngestHandler implements IngestUrlHandlerSpi { private static final Logger LOGGER = LoggerFactory.getLogger(S3IngestHandler.class); public S3IngestHandler() {} @Override public Path handlePath(final String inputPath, final Properties configProperties) throws IOException { // If input path is S3 if (inputPath.startsWith("s3://")) { try { URLIngestUtils.setURLStreamHandlerFactory(URLTYPE.S3); } catch (NoSuchFieldException | SecurityException | IllegalArgumentException | IllegalAccessException e1) { LOGGER.error("Error in setting up S3URLStreamHandler Factory", e1); return null; } if (configProperties == null) { LOGGER.error("Unable to load config properties"); return null; } String s3EndpointUrl = configProperties.getProperty(ConfigAWSCommand.AWS_S3_ENDPOINT_URL); if (s3EndpointUrl == null) { LOGGER.warn( "S3 endpoint URL is empty. Config using \"geowave config aws \""); s3EndpointUrl = "s3.amazonaws.com"; } if (!s3EndpointUrl.contains("://")) { s3EndpointUrl = "s3://" + s3EndpointUrl; } return URLIngestUtils.setupS3FileSystem(inputPath, s3EndpointUrl); } return null; } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/URLIngestUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest; import java.io.IOException; import java.lang.reflect.Field; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.net.URLStreamHandlerFactory; import java.nio.file.FileSystem; import java.nio.file.FileSystemAlreadyExistsException; import java.nio.file.FileSystems; import java.nio.file.InvalidPathException; import java.nio.file.Path; import java.util.Collections; import org.locationtech.geowave.mapreduce.hdfs.HdfsUrlStreamHandlerFactory; import org.locationtech.geowave.mapreduce.s3.GeoWaveAmazonS3Factory; import org.locationtech.geowave.mapreduce.s3.S3URLStreamHandlerFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.upplication.s3fs.S3FileSystemProvider; public class URLIngestUtils { private static final Logger LOGGER = LoggerFactory.getLogger(URLIngestUtils.class); public static enum URLTYPE { S3, HDFS } private static boolean hasS3Handler = false; private static boolean hasHdfsHandler = false; public static void setURLStreamHandlerFactory(final URLTYPE urlType) throws NoSuchFieldException, SecurityException, IllegalArgumentException, IllegalAccessException { // One-time init for each type if ((urlType == URLTYPE.S3) && hasS3Handler) { return; } else if ((urlType == URLTYPE.HDFS) && hasHdfsHandler) { return; } final Field lockField = URL.class.getDeclaredField("streamHandlerLock"); // HP Fortify "Access Control" false positive // The need to change the accessibility here is // necessary, has been review and judged to be safe lockField.setAccessible(true); synchronized (lockField.get(null)) { // check again synchronized if ((urlType == URLTYPE.S3) && hasS3Handler) { return; } else if ((urlType == URLTYPE.HDFS) && hasHdfsHandler) { return; } final Field factoryField = URL.class.getDeclaredField("factory"); // HP Fortify "Access Control" false positive // The need to change the accessibility here is // necessary, has been review and judged to be safe factoryField.setAccessible(true); final URLStreamHandlerFactory urlStreamHandlerFactory = (URLStreamHandlerFactory) factoryField.get(null); if (urlStreamHandlerFactory == null) { if (urlType == URLTYPE.S3) { URL.setURLStreamHandlerFactory(new S3URLStreamHandlerFactory()); hasS3Handler = true; } else { // HDFS URL.setURLStreamHandlerFactory(new HdfsUrlStreamHandlerFactory()); hasHdfsHandler = true; } } else { factoryField.set(null, null); if (urlType == URLTYPE.S3) { URL.setURLStreamHandlerFactory(new S3URLStreamHandlerFactory(urlStreamHandlerFactory)); hasS3Handler = true; } else { // HDFS URL.setURLStreamHandlerFactory(new HdfsUrlStreamHandlerFactory(urlStreamHandlerFactory)); hasHdfsHandler = true; } } } } public static Path setupS3FileSystem(final String basePath, final String s3EndpointUrl) throws IOException { Path path = null; FileSystem fs = null; try { fs = FileSystems.newFileSystem( new URI(s3EndpointUrl + "/"), Collections.singletonMap( S3FileSystemProvider.AMAZON_S3_FACTORY_CLASS, GeoWaveAmazonS3Factory.class.getName()), Thread.currentThread().getContextClassLoader()); // HP Fortify "Path Traversal" false positive // What Fortify considers "user input" comes only // from users with OS-level access anyway } catch (final URISyntaxException e) { LOGGER.error("Unable to ingest data, Inavlid S3 path"); return null; } catch (final FileSystemAlreadyExistsException e) { LOGGER.info("File system " + s3EndpointUrl + "already exists"); try { fs = FileSystems.getFileSystem(new URI(s3EndpointUrl + "/")); } catch (final URISyntaxException e1) { LOGGER.error("Unable to ingest data, Inavlid S3 path"); return null; } } final String s3InputPath = basePath.replaceFirst("s3://", "/"); try { path = fs.getPath(s3InputPath); } catch (final InvalidPathException e) { LOGGER.error("Input valid input path " + s3InputPath); return null; } return path; } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/avro/AbstractStageWholeFileToAvro.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.avro; import java.io.IOException; import java.net.URL; import java.nio.ByteBuffer; import org.apache.avro.Schema; import org.apache.commons.io.IOUtils; import org.locationtech.geowave.core.store.CloseableIterator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Iterators; /** * This class can be sub-classed as a general-purpose recipe for parallelizing ingestion of files by * directly staging the binary of the file to Avro. */ public abstract class AbstractStageWholeFileToAvro implements GeoWaveAvroFormatPlugin { private static final Logger LOGGER = LoggerFactory.getLogger(AbstractStageWholeFileToAvro.class); @Override public Schema getAvroSchema() { return AvroWholeFile.getClassSchema(); } @Override public CloseableIterator toAvroObjects(final URL f) { try { // TODO: consider a streaming mechanism in case a single file is too // large return new CloseableIterator.Wrapper<>( Iterators.singletonIterator( new AvroWholeFile(ByteBuffer.wrap(IOUtils.toByteArray(f)), f.getPath()))); } catch (final IOException e) { LOGGER.warn("Unable to read file", e); } return new CloseableIterator.Empty<>(); } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/avro/GenericAvroSerializer.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.avro; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.HashMap; import java.util.Map; import org.apache.avro.Schema; import org.apache.avro.io.BinaryDecoder; import org.apache.avro.io.BinaryEncoder; import org.apache.avro.io.DecoderFactory; import org.apache.avro.io.EncoderFactory; import org.apache.avro.specific.SpecificDatumReader; import org.apache.avro.specific.SpecificDatumWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Generic Avro serializer/deserializer, can convert Avro Java object to a byte array and a byte * array back to a usable Avro Java object. * * @param - Base Avro class extended by all generated class files */ public class GenericAvroSerializer { private static final Logger LOGGER = LoggerFactory.getLogger(GenericAvroSerializer.class); private static final EncoderFactory ef = EncoderFactory.get(); private static final DecoderFactory df = DecoderFactory.get(); private static final Map writers = new HashMap<>(); private static final Map readers = new HashMap<>(); public GenericAvroSerializer() {} public static synchronized byte[] serialize(final T avroObject, final Schema avroSchema) { try { final ByteArrayOutputStream os = new ByteArrayOutputStream(); final BinaryEncoder encoder = ef.binaryEncoder(os, null); final String schemaName = getSchemaName(avroSchema); if (!writers.containsKey(schemaName)) { writers.put(schemaName, new SpecificDatumWriter(avroSchema)); } final SpecificDatumWriter writer = writers.get(schemaName); writer.write(avroObject, encoder); encoder.flush(); return os.toByteArray(); } catch (final IOException e) { LOGGER.error("Unable to serialize Avro record to byte[]: " + e.getMessage(), e); return null; } } public static synchronized T deserialize(final byte[] avroData, final Schema avroSchema) { try { final BinaryDecoder decoder = df.binaryDecoder(avroData, null); final String schemaName = getSchemaName(avroSchema); if (!readers.containsKey(schemaName)) { readers.put(schemaName, new SpecificDatumReader(avroSchema)); } final SpecificDatumReader reader = readers.get(schemaName); return reader.read(null, decoder); } catch (final IOException e) { LOGGER.error("Unable to deserialize byte[] to Avro object: " + e.getMessage(), e); return null; } } private static String getSchemaName(final Schema schema) { return schema.getNamespace() + "." + schema.getName(); } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/avro/GeoWaveAvroFormatPlugin.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.avro; import org.locationtech.geowave.core.store.ingest.IndexProvider; import org.locationtech.geowave.core.store.ingest.IngestPluginBase; import org.locationtech.geowave.core.store.ingest.LocalPluginBase; /** * This is the main plugin interface for reading from a local file system, and formatting * intermediate data (for example, to HDFS or to Kafka for further processing or ingest) from any * file that is supported to Avro. * * @param The type for the input data * @param The type that represents each data entry being ingested */ public interface GeoWaveAvroFormatPlugin extends GeoWaveAvroPluginBase, LocalPluginBase, IndexProvider { /** * An implementation of ingestion that ingests Avro Java objects into GeoWave * * @return The implementation for ingestion from Avro */ public IngestPluginBase getIngestWithAvroPlugin(); } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/avro/GeoWaveAvroPluginBase.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.avro; import java.net.URL; import org.locationtech.geowave.core.store.CloseableIterator; /** * All plugins based off of staged intermediate data (either reading or writing) must implement this * interface. For handling intermediate data, the GeoWave ingestion framework has standardized on * Avro for java object serialization and an Avro schema must be provided for handling any * intermediate data. */ public interface GeoWaveAvroPluginBase extends GeoWaveAvroSchemaProvider { /** * Converts the supported file into an Avro encoded Java object. * * @param file The file to convert to Avro * @return The Avro encoded Java object */ public CloseableIterator toAvroObjects(URL file); } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/avro/GeoWaveAvroSchemaProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.avro; import org.apache.avro.Schema; public interface GeoWaveAvroSchemaProvider { /** * Returns the Avro schema for the plugin * * @return the Avro schema for the intermediate data */ public Schema getAvroSchema(); } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/HdfsFile.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ /** * Autogenerated by Avro * *

DO NOT EDIT DIRECTLY */ package org.locationtech.geowave.core.ingest.hdfs; @SuppressWarnings("all") @org.apache.avro.specific.AvroGenerated public class HdfsFile extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse( "{\"type\":\"record\",\"name\":\"HdfsFile\",\"namespace\":\"org.locationtech.geowave.core.ingest.hdfs\",\"fields\":[{\"name\":\"originalFile\",\"type\":\"bytes\",\"doc\":\"Original file data\"},{\"name\":\"originalFilePath\",\"type\":[\"string\",\"null\"],\"doc\":\"Original file path\"}],\"doc:\":\"Stores the original files from a local file system in HDFS\"}"); public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } /** Original file data */ @Deprecated public java.nio.ByteBuffer originalFile; /** Original file path */ @Deprecated public java.lang.CharSequence originalFilePath; /** * Default constructor. Note that this does not initialize fields to their default values from the * schema. If that is desired then one should use newBuilder(). */ public HdfsFile() {} /** All-args constructor. */ public HdfsFile( final java.nio.ByteBuffer originalFile, final java.lang.CharSequence originalFilePath) { this.originalFile = originalFile; this.originalFilePath = originalFilePath; } @Override public org.apache.avro.Schema getSchema() { return SCHEMA$; } // Used by DatumWriter. Applications should not call. @Override public java.lang.Object get(final int field$) { switch (field$) { case 0: return originalFile; case 1: return originalFilePath; default: throw new org.apache.avro.AvroRuntimeException("Bad index"); } } // Used by DatumReader. Applications should not call. @Override @SuppressWarnings(value = "unchecked") public void put(final int field$, final java.lang.Object value$) { switch (field$) { case 0: originalFile = (java.nio.ByteBuffer) value$; break; case 1: originalFilePath = (java.lang.CharSequence) value$; break; default: throw new org.apache.avro.AvroRuntimeException("Bad index"); } } /** Gets the value of the 'originalFile' field. Original file data */ public java.nio.ByteBuffer getOriginalFile() { return originalFile; } /** * Sets the value of the 'originalFile' field. Original file data * @param value the value to set. */ public void setOriginalFile(final java.nio.ByteBuffer value) { originalFile = value; } /** Gets the value of the 'originalFilePath' field. Original file path */ public java.lang.CharSequence getOriginalFilePath() { return originalFilePath; } /** * Sets the value of the 'originalFilePath' field. Original file path * @param value the value to * set. */ public void setOriginalFilePath(final java.lang.CharSequence value) { originalFilePath = value; } /** Creates a new HdfsFile RecordBuilder */ public static org.locationtech.geowave.core.ingest.hdfs.HdfsFile.Builder newBuilder() { return new org.locationtech.geowave.core.ingest.hdfs.HdfsFile.Builder(); } /** Creates a new HdfsFile RecordBuilder by copying an existing Builder */ public static org.locationtech.geowave.core.ingest.hdfs.HdfsFile.Builder newBuilder( final org.locationtech.geowave.core.ingest.hdfs.HdfsFile.Builder other) { return new org.locationtech.geowave.core.ingest.hdfs.HdfsFile.Builder(other); } /** Creates a new HdfsFile RecordBuilder by copying an existing HdfsFile instance */ public static org.locationtech.geowave.core.ingest.hdfs.HdfsFile.Builder newBuilder( final org.locationtech.geowave.core.ingest.hdfs.HdfsFile other) { return new org.locationtech.geowave.core.ingest.hdfs.HdfsFile.Builder(other); } /** RecordBuilder for HdfsFile instances. */ public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase implements org.apache.avro.data.RecordBuilder { private java.nio.ByteBuffer originalFile; private java.lang.CharSequence originalFilePath; /** Creates a new Builder */ private Builder() { super(org.locationtech.geowave.core.ingest.hdfs.HdfsFile.SCHEMA$); } /** Creates a Builder by copying an existing Builder */ private Builder(final org.locationtech.geowave.core.ingest.hdfs.HdfsFile.Builder other) { super(other); if (isValidValue(fields()[0], other.originalFile)) { originalFile = data().deepCopy(fields()[0].schema(), other.originalFile); fieldSetFlags()[0] = true; } if (isValidValue(fields()[1], other.originalFilePath)) { originalFilePath = data().deepCopy(fields()[1].schema(), other.originalFilePath); fieldSetFlags()[1] = true; } } /** Creates a Builder by copying an existing HdfsFile instance */ private Builder(final org.locationtech.geowave.core.ingest.hdfs.HdfsFile other) { super(org.locationtech.geowave.core.ingest.hdfs.HdfsFile.SCHEMA$); if (isValidValue(fields()[0], other.originalFile)) { originalFile = data().deepCopy(fields()[0].schema(), other.originalFile); fieldSetFlags()[0] = true; } if (isValidValue(fields()[1], other.originalFilePath)) { originalFilePath = data().deepCopy(fields()[1].schema(), other.originalFilePath); fieldSetFlags()[1] = true; } } /** Gets the value of the 'originalFile' field */ public java.nio.ByteBuffer getOriginalFile() { return originalFile; } /** Sets the value of the 'originalFile' field */ public org.locationtech.geowave.core.ingest.hdfs.HdfsFile.Builder setOriginalFile( final java.nio.ByteBuffer value) { validate(fields()[0], value); originalFile = value; fieldSetFlags()[0] = true; return this; } /** Checks whether the 'originalFile' field has been set */ public boolean hasOriginalFile() { return fieldSetFlags()[0]; } /** Clears the value of the 'originalFile' field */ public org.locationtech.geowave.core.ingest.hdfs.HdfsFile.Builder clearOriginalFile() { originalFile = null; fieldSetFlags()[0] = false; return this; } /** Gets the value of the 'originalFilePath' field */ public java.lang.CharSequence getOriginalFilePath() { return originalFilePath; } /** Sets the value of the 'originalFilePath' field */ public org.locationtech.geowave.core.ingest.hdfs.HdfsFile.Builder setOriginalFilePath( final java.lang.CharSequence value) { validate(fields()[1], value); originalFilePath = value; fieldSetFlags()[1] = true; return this; } /** Checks whether the 'originalFilePath' field has been set */ public boolean hasOriginalFilePath() { return fieldSetFlags()[1]; } /** Clears the value of the 'originalFilePath' field */ public org.locationtech.geowave.core.ingest.hdfs.HdfsFile.Builder clearOriginalFilePath() { originalFilePath = null; fieldSetFlags()[1] = false; return this; } @Override public HdfsFile build() { try { final HdfsFile record = new HdfsFile(); record.originalFile = fieldSetFlags()[0] ? originalFile : (java.nio.ByteBuffer) defaultValue(fields()[0]); record.originalFilePath = fieldSetFlags()[1] ? originalFilePath : (java.lang.CharSequence) defaultValue(fields()[1]); return record; } catch (final Exception e) { throw new org.apache.avro.AvroRuntimeException(e); } } } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/StageRunData.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.hdfs; import java.io.IOException; import java.util.HashMap; import java.util.Map; import org.apache.avro.file.CodecFactory; import org.apache.avro.file.DataFileWriter; import org.apache.avro.generic.GenericDatumWriter; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.locationtech.geowave.core.ingest.avro.GeoWaveAvroFormatPlugin; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A class to hold intermediate stage data that must be used throughout the life of the HDFS stage * process. */ public class StageRunData { private static final Logger LOGGER = LoggerFactory.getLogger(StageRunData.class); private final Map cachedWriters = new HashMap<>(); private final Path hdfsBaseDirectory; private final FileSystem fs; public StageRunData(final Path hdfsBaseDirectory, final FileSystem fs) { this.hdfsBaseDirectory = hdfsBaseDirectory; this.fs = fs; } public DataFileWriter getWriter(final String typeName, final GeoWaveAvroFormatPlugin plugin) { return getDataWriterCreateIfNull(typeName, plugin); } private synchronized DataFileWriter getDataWriterCreateIfNull( final String typeName, final GeoWaveAvroFormatPlugin plugin) { if (!cachedWriters.containsKey(typeName)) { FSDataOutputStream out = null; final DataFileWriter dfw = new DataFileWriter(new GenericDatumWriter()); cachedWriters.put(typeName, dfw); dfw.setCodec(CodecFactory.snappyCodec()); try { // TODO: we should probably clean up the type name to make it // HDFS path safe in case there are invalid characters // also, if a file already exists do we want to delete it or // append to it? out = fs.create(new Path(hdfsBaseDirectory, typeName)); dfw.create(plugin.getAvroSchema(), out); } catch (final IOException e) { LOGGER.error("Unable to create output stream", e); // cache a null value so we don't continually try to recreate cachedWriters.put(typeName, null); return null; } } return cachedWriters.get(typeName); } public synchronized void close() { for (final DataFileWriter dfw : cachedWriters.values()) { try { dfw.close(); } catch (final IOException e) { LOGGER.warn("Unable to close sequence file stream", e); } } cachedWriters.clear(); } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/StageToHdfsDriver.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.hdfs; import java.io.File; import java.io.IOException; import java.net.URL; import java.util.Map; import org.apache.avro.file.DataFileWriter; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.locationtech.geowave.core.ingest.avro.GeoWaveAvroFormatPlugin; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.ingest.AbstractLocalFileDriver; import org.locationtech.geowave.core.store.ingest.LocalInputCommandLineOptions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class actually executes the staging of data to HDFS based on the available type plugin * providers that are discovered through SPI. */ public class StageToHdfsDriver extends AbstractLocalFileDriver, StageRunData> { private static final Logger LOGGER = LoggerFactory.getLogger(StageToHdfsDriver.class); private final Map> ingestPlugins; private final String hdfsHostPort; private final String basePath; public StageToHdfsDriver( final Map> ingestPlugins, final String hdfsHostPort, final String basePath, final LocalInputCommandLineOptions inputOptions) { super(inputOptions); this.ingestPlugins = ingestPlugins; this.hdfsHostPort = hdfsHostPort; this.basePath = basePath; } @Override protected void processFile( final URL file, final String typeName, final GeoWaveAvroFormatPlugin plugin, final StageRunData runData) { final DataFileWriter writer = runData.getWriter(typeName, plugin); if (writer != null) { try (final CloseableIterator objs = plugin.toAvroObjects(file)) { while (objs.hasNext()) { final Object obj = objs.next(); try { writer.append(obj); } catch (final IOException e) { LOGGER.error("Cannot append data to sequence file", e); } } } } } public boolean runOperation(final String inputPath, final File configFile) { // first collect the stage to hdfs plugins final Map> stageToHdfsPlugins = ingestPlugins; final Configuration conf = new Configuration(); conf.set("fs.defaultFS", hdfsHostPort); conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()); final Path hdfsBaseDirectory = new Path(basePath); try { try (final FileSystem fs = FileSystem.get(conf)) { if (!fs.exists(hdfsBaseDirectory)) { fs.mkdirs(hdfsBaseDirectory); } try { final StageRunData runData = new StageRunData(hdfsBaseDirectory, fs); processInput(inputPath, configFile, stageToHdfsPlugins, runData); runData.close(); return true; } catch (final IOException e) { LOGGER.error("Unexpected I/O exception when reading input files", e); return false; } } } catch (final IOException e) { LOGGER.error("Unable to create remote HDFS directory", e); return false; } } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/mapreduce/AbstractLocalIngestWithMapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.hdfs.mapreduce; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.util.Collections; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.ingest.avro.AbstractStageWholeFileToAvro; import org.locationtech.geowave.core.ingest.avro.AvroWholeFile; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.ingest.GeoWaveData; import org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class can be sub-classed as a general-purpose recipe for parallelizing ingestion of files * either locally or by directly staging the binary of the file to HDFS and then ingesting it within * the map phase of a map-reduce job. */ public abstract class AbstractLocalIngestWithMapper extends AbstractStageWholeFileToAvro implements LocalFileIngestPlugin, IngestFromHdfsPlugin, Persistable { private static final Logger LOGGER = LoggerFactory.getLogger(AbstractLocalIngestWithMapper.class); @Override public boolean isUseReducerPreferred() { return false; } @Override public IngestWithMapper ingestWithMapper() { return new InternalIngestWithMapper<>(this); } @Override public CloseableIterator> toGeoWaveData( final URL input, final String[] indexNames) { try (final InputStream inputStream = input.openStream()) { return toGeoWaveDataInternal(inputStream, indexNames); } catch (final IOException e) { LOGGER.warn("Cannot open file, unable to ingest", e); } return new CloseableIterator.Wrapper(Collections.emptyIterator()); } protected abstract CloseableIterator> toGeoWaveDataInternal( final InputStream file, final String[] indexNames); @Override public IngestWithReducer ingestWithReducer() { return null; } protected static class InternalIngestWithMapper implements IngestWithMapper { private AbstractLocalIngestWithMapper parentPlugin; public InternalIngestWithMapper() {} public InternalIngestWithMapper(final AbstractLocalIngestWithMapper parentPlugin) { this.parentPlugin = parentPlugin; } @Override public DataTypeAdapter[] getDataAdapters() { return parentPlugin.getDataAdapters(); } @Override public CloseableIterator> toGeoWaveData( final AvroWholeFile input, final String[] indexNames) { final InputStream inputStream = new ByteBufferBackedInputStream(input.getOriginalFile()); return parentPlugin.toGeoWaveDataInternal(inputStream, indexNames); } @Override public byte[] toBinary() { return PersistenceUtils.toClassId(parentPlugin); } @Override public void fromBinary(final byte[] bytes) { parentPlugin = (AbstractLocalIngestWithMapper) PersistenceUtils.fromClassId(bytes); } @Override public String[] getSupportedIndexTypes() { return parentPlugin.getSupportedIndexTypes(); } } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/mapreduce/AbstractMapReduceIngest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.hdfs.mapreduce; import java.util.ArrayList; import java.util.List; import org.apache.avro.mapreduce.AvroJob; import org.apache.avro.mapreduce.AvroKeyInputFormat; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.util.Tool; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.QueryBuilder; import org.locationtech.geowave.core.store.api.VisibilityHandler; import org.locationtech.geowave.core.store.cli.VisibilityOptions; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.ingest.DataAdapterProvider; import org.locationtech.geowave.core.store.operations.MetadataType; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputFormat; import com.clearspring.analytics.util.Lists; /** * This class can be sub-classed to run map-reduce jobs within the ingest framework using plugins * provided by types that are discovered through SPI. * * @param The type of map-reduce ingest plugin that can be persisted to the map-reduce job * configuration and used by the mapper and/or reducer to ingest data */ public abstract class AbstractMapReduceIngest> extends Configured implements Tool { public static final String INGEST_PLUGIN_KEY = "INGEST_PLUGIN"; public static final String GLOBAL_VISIBILITY_KEY = "GLOBAL_VISIBILITY"; public static final String INDEX_NAMES_KEY = "INDEX_NAMES"; private static String JOB_NAME = "%s ingest from %s to namespace %s (%s)"; protected final DataStorePluginOptions dataStoreOptions; protected final List indices; protected final VisibilityOptions visibilityOptions; protected final Path inputFile; protected final String formatPluginName; protected final IngestFromHdfsPlugin parentPlugin; protected final T ingestPlugin; public AbstractMapReduceIngest( final DataStorePluginOptions dataStoreOptions, final List indices, final VisibilityOptions visibilityOptions, final Path inputFile, final String formatPluginName, final IngestFromHdfsPlugin parentPlugin, final T ingestPlugin) { this.dataStoreOptions = dataStoreOptions; this.indices = indices; this.visibilityOptions = visibilityOptions; this.inputFile = inputFile; this.formatPluginName = formatPluginName; this.parentPlugin = parentPlugin; this.ingestPlugin = ingestPlugin; } public String getJobName() { return String.format( JOB_NAME, formatPluginName, inputFile.toString(), dataStoreOptions.getGeoWaveNamespace(), getIngestDescription()); } protected abstract String getIngestDescription(); public static String[] getIndexNames(final Configuration conf) { final String primaryIndexNamesStr = conf.get(AbstractMapReduceIngest.INDEX_NAMES_KEY); if ((primaryIndexNamesStr != null) && !primaryIndexNamesStr.isEmpty()) { return primaryIndexNamesStr.split(","); } return new String[0]; } @Override public int run(final String[] args) throws Exception { final Configuration conf = getConf(); conf.set( INGEST_PLUGIN_KEY, ByteArrayUtils.byteArrayToString(PersistenceUtils.toBinary(ingestPlugin))); final VisibilityHandler visibilityHandler = visibilityOptions.getConfiguredVisibilityHandler(); if (visibilityHandler != null) { conf.set( GLOBAL_VISIBILITY_KEY, ByteArrayUtils.byteArrayToString(PersistenceUtils.toBinary(visibilityHandler))); } final Job job = new Job(conf, getJobName()); final StringBuilder indexNames = new StringBuilder(); final List indexes = new ArrayList<>(); for (final Index primaryIndex : indices) { indexes.add(primaryIndex); if (primaryIndex != null) { // add index GeoWaveOutputFormat.addIndex(job.getConfiguration(), primaryIndex); if (indexNames.length() != 0) { indexNames.append(","); } indexNames.append(primaryIndex.getName()); } } job.getConfiguration().set(INDEX_NAMES_KEY, indexNames.toString()); job.setJarByClass(AbstractMapReduceIngest.class); job.setInputFormatClass(AvroKeyInputFormat.class); AvroJob.setInputKeySchema(job, parentPlugin.getAvroSchema()); FileInputFormat.setInputPaths(job, inputFile); setupMapper(job); setupReducer(job); // set geowave output format job.setOutputFormatClass(GeoWaveOutputFormat.class); GeoWaveOutputFormat.setStoreOptions(job.getConfiguration(), dataStoreOptions); final DataStore store = dataStoreOptions.createDataStore(); final PersistentAdapterStore adapterStore = dataStoreOptions.createAdapterStore(); final InternalAdapterStore internalAdapterStore = dataStoreOptions.createInternalAdapterStore(); final DataTypeAdapter[] dataAdapters = ingestPlugin.getDataAdapters(); final Index[] indices = indexes.toArray(new Index[indexes.size()]); if ((dataAdapters != null) && (dataAdapters.length > 0)) { for (final DataTypeAdapter dataAdapter : dataAdapters) { // from a controlled client, intialize the writer within the // context of the datastore before distributing ingest // however, after ingest we should cleanup any pre-created // metadata for which there is no data try { store.addType( dataAdapter, visibilityOptions.getConfiguredVisibilityHandler(), Lists.newArrayList(), indices); final short adapterId = internalAdapterStore.getAdapterId(dataAdapter.getTypeName()); final InternalDataAdapter internalAdapter = adapterStore.getAdapter(adapterId); GeoWaveOutputFormat.addDataAdapter(job.getConfiguration(), internalAdapter); } catch (IllegalArgumentException e) { // Skip any adapters that can't be mapped to the input indices } } } else { // if the adapter is unknown by the ingest format, at least add the // indices from the client for (final Index index : indices) { store.addIndex(index); } if (indices.length > 0) { for (final MetadataType type : MetadataType.values()) { // stats and index metadata writers are created elsewhere if (!MetadataType.INDEX.equals(type) && !MetadataType.STATISTIC_VALUES.equals(type)) { dataStoreOptions.createDataStoreOperations().createMetadataWriter(type).close(); } } } } // this is done primarily to ensure stats merging is enabled before the // distributed ingest if (dataStoreOptions.getFactoryOptions().getStoreOptions().isPersistDataStatistics()) { dataStoreOptions.createDataStoreOperations().createMetadataWriter( MetadataType.STATISTIC_VALUES).close(); } job.setSpeculativeExecution(false); // add required indices final Index[] requiredIndices = parentPlugin.getRequiredIndices(); if (requiredIndices != null) { for (final Index requiredIndex : requiredIndices) { GeoWaveOutputFormat.addIndex(job.getConfiguration(), requiredIndex); } } final int retVal = job.waitForCompletion(true) ? 0 : -1; // when it is complete, delete any empty adapters and index mappings // that were created from this driver but didn't actually have data // ingests if ((dataAdapters != null) && (dataAdapters.length > 0)) { AdapterIndexMappingStore adapterIndexMappingStore = null; for (final DataTypeAdapter dataAdapter : dataAdapters) { final String typeName = dataAdapter.getTypeName(); try (CloseableIterator it = store.query(QueryBuilder.newBuilder().addTypeName(typeName).limit(1).build())) { if (!it.hasNext()) { if (adapterIndexMappingStore == null) { adapterIndexMappingStore = dataStoreOptions.createAdapterIndexMappingStore(); } final Short adapterId = internalAdapterStore.getAdapterId(typeName); if (adapterId != null) { internalAdapterStore.remove(adapterId); adapterStore.removeAdapter(adapterId); adapterIndexMappingStore.remove(adapterId); } } } } } return retVal; } protected abstract void setupMapper(Job job); protected abstract void setupReducer(Job job); } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/mapreduce/ByteBufferBackedInputStream.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.hdfs.mapreduce; import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; public class ByteBufferBackedInputStream extends InputStream { private final ByteBuffer buf; public ByteBufferBackedInputStream(final ByteBuffer buf) { this.buf = buf; } @Override public int read() throws IOException { if (!buf.hasRemaining()) { return -1; } return buf.get() & 0xFF; } @Override public int read(final byte[] bytes, final int off, int len) throws IOException { if (!buf.hasRemaining()) { return -1; } len = Math.min(len, buf.remaining()); buf.get(bytes, off, len); return len; } @Override public int available() throws IOException { return buf.remaining(); } @Override public int read(final byte[] bytes) throws IOException { if (!buf.hasRemaining()) { return -1; } final int len = Math.min(bytes.length, buf.remaining()); buf.get(bytes, 0, len); return len; } @Override public synchronized void reset() throws IOException { buf.reset(); } @Override public long skip(final long len) throws IOException { buf.get(new byte[(int) len]); return len; } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/mapreduce/IngestFromHdfsDriver.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.hdfs.mapreduce; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.util.ToolRunner; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.cli.VisibilityOptions; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.ingest.DataAdapterProvider; import org.locationtech.geowave.core.store.ingest.IngestUtils; import org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; /** * This class actually executes the ingestion of intermediate data into GeoWave that had been staged * in HDFS. */ public class IngestFromHdfsDriver { private static final Logger LOGGER = LoggerFactory.getLogger(IngestFromHdfsDriver.class); private static final int NUM_CONCURRENT_JOBS = 5; private static final int DAYS_TO_AWAIT_COMPLETION = 999; protected final DataStorePluginOptions storeOptions; protected final List indices; protected final VisibilityOptions ingestOptions; private final MapReduceCommandLineOptions mapReduceOptions; private final Map> ingestPlugins; private final String hdfsHostPort; private final String basePath; private static ExecutorService singletonExecutor; public IngestFromHdfsDriver( final DataStorePluginOptions storeOptions, final List indices, final VisibilityOptions ingestOptions, final MapReduceCommandLineOptions mapReduceOptions, final Map> ingestPlugins, final String hdfsHostPort, final String basePath) { this.storeOptions = storeOptions; this.indices = indices; this.ingestOptions = ingestOptions; this.mapReduceOptions = mapReduceOptions; this.ingestPlugins = ingestPlugins; this.hdfsHostPort = hdfsHostPort; this.basePath = basePath; } private static synchronized ExecutorService getSingletonExecutorService() { if ((singletonExecutor == null) || singletonExecutor.isShutdown()) { singletonExecutor = Executors.newFixedThreadPool(NUM_CONCURRENT_JOBS); } return singletonExecutor; } private boolean checkIndexesAgainstProvider( final String providerName, final DataAdapterProvider adapterProvider) { boolean valid = true; for (final Index index : indices) { if (!IngestUtils.isCompatible(adapterProvider, index)) { // HP Fortify "Log Forging" false positive // What Fortify considers "user input" comes only // from users with OS-level access anyway LOGGER.warn( "HDFS file ingest plugin for ingest type '" + providerName + "' is not supported by index '" + index.getName() + "'"); valid = false; } } return valid; } public boolean runOperation() { final Path hdfsBaseDirectory = new Path(basePath); try { final Configuration conf = new Configuration(); GeoWaveConfiguratorBase.setRemoteInvocationParams( hdfsHostPort, mapReduceOptions.getJobTrackerOrResourceManagerHostPort(), conf); mapReduceOptions.applyConfigurationProperties(conf); try (FileSystem fs = FileSystem.get(conf)) { if (!fs.exists(hdfsBaseDirectory)) { LOGGER.error("HDFS base directory {} does not exist", hdfsBaseDirectory); return false; } for (final Entry> pluginProvider : ingestPlugins.entrySet()) { // if an appropriate sequence file does not exist, continue // TODO: we should probably clean up the type name to make // it HDFS path safe in case there are invalid characters final Path inputFile = new Path(hdfsBaseDirectory, pluginProvider.getKey()); if (!fs.exists(inputFile)) { LOGGER.warn( "HDFS file '" + inputFile + "' does not exist for ingest type '" + pluginProvider.getKey() + "'"); continue; } final IngestFromHdfsPlugin ingestFromHdfsPlugin = pluginProvider.getValue(); IngestWithReducer ingestWithReducer = null; IngestWithMapper ingestWithMapper = null; // first find one preferred method of ingest from HDFS // (exclusively setting one or the other instance above) if (ingestFromHdfsPlugin.isUseReducerPreferred()) { ingestWithReducer = ingestFromHdfsPlugin.ingestWithReducer(); if (ingestWithReducer == null) { LOGGER.warn( "Plugin provider '" + pluginProvider.getKey() + "' prefers ingest with reducer but it is unimplemented"); } } if (ingestWithReducer == null) { // check for ingest with mapper ingestWithMapper = ingestFromHdfsPlugin.ingestWithMapper(); if ((ingestWithMapper == null) && !ingestFromHdfsPlugin.isUseReducerPreferred()) { ingestWithReducer = ingestFromHdfsPlugin.ingestWithReducer(); if (ingestWithReducer == null) { LOGGER.warn( "Plugin provider '" + pluginProvider.getKey() + "' does not does not support ingest from HDFS"); continue; } else { LOGGER.warn( "Plugin provider '" + pluginProvider.getKey() + "' prefers ingest with mapper but it is unimplemented"); } } } AbstractMapReduceIngest jobRunner = null; if (ingestWithReducer != null) { if (!checkIndexesAgainstProvider(pluginProvider.getKey(), ingestWithReducer)) { continue; } jobRunner = new IngestWithReducerJobRunner( storeOptions, indices, ingestOptions, inputFile, pluginProvider.getKey(), ingestFromHdfsPlugin, ingestWithReducer); } else if (ingestWithMapper != null) { if (!checkIndexesAgainstProvider(pluginProvider.getKey(), ingestWithMapper)) { continue; } jobRunner = new IngestWithMapperJobRunner( storeOptions, indices, ingestOptions, inputFile, pluginProvider.getKey(), ingestFromHdfsPlugin, ingestWithMapper); } if (jobRunner != null) { try { runJob(conf, jobRunner); } catch (final Exception e) { LOGGER.warn("Error running ingest job", e); return false; } } } } } catch (final IOException e) { LOGGER.warn("Error in accessing HDFS file system", e); return false; } finally { final ExecutorService executorService = getSingletonExecutorService(); executorService.shutdown(); // do we want to just exit once our jobs are submitted or wait? // for now let's just wait a REALLY long time until all of the // submitted jobs complete try { executorService.awaitTermination(DAYS_TO_AWAIT_COMPLETION, TimeUnit.DAYS); } catch (final InterruptedException e) { LOGGER.error("Error waiting for submitted jobs to complete", e); } } // we really do not know if the service failed...bummer return true; } private void runJob(final Configuration conf, final AbstractMapReduceIngest jobRunner) throws Exception { final ExecutorService executorService = getSingletonExecutorService(); executorService.execute(new Runnable() { @Override public void run() { try { final int res = ToolRunner.run(conf, jobRunner, new String[0]); if (res != 0) { LOGGER.error( "Mapper ingest job '" + jobRunner.getJobName() + "' exited with error code: " + res); } } catch (final Exception e) { LOGGER.error("Error running mapper ingest job: " + jobRunner.getJobName(), e); } } }); } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/mapreduce/IngestFromHdfsPlugin.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.hdfs.mapreduce; import org.locationtech.geowave.core.ingest.avro.GeoWaveAvroSchemaProvider; import org.locationtech.geowave.core.store.ingest.IndexProvider; /** * This is the main plugin interface for ingesting intermediate data into Geowave that has * previously been staged in HDFS. Although both of the available map-reduce ingestion techniques * can be implemented (one that simply uses the mapper only, and another that uses the reducer as * well), the framework will choose only one based on this plugin's preference, so it is unnecessary * to implement both (in this case returning null is expected if its not implemented). * * @param the type for intermediate data, it must match the type supported by the Avro schema * @param the type that represents each data entry being ingested */ public interface IngestFromHdfsPlugin extends IndexProvider, GeoWaveAvroSchemaProvider { /** * Returns a flag indicating to the ingestion framework whether it should try to use the * ingestWithMapper() implementation or the ingestWithReducer() implementation in the case that * both implementations are non-null. * * @return If true, the framework will use ingestWithReducer() and only fall back to * ingestWithMapper() if necessary, otherwise the behavior will be the reverse */ public boolean isUseReducerPreferred(); /** * An implementation of ingestion that can be persisted to a mapper within the map-reduce job * configuration to perform an ingest of data into GeoWave from intermediate data * * @return The implementation for ingestion with only a mapper */ public IngestWithMapper ingestWithMapper(); /** * An implementation of ingestion that can be persisted to a mapper and reducer within the * map-reduce job configuration to aggregate intermediate data by defined keys within a reducer * and perform an ingest of data into GeoWave from the key-value pairs emitted by the mapper. * * @return The implementation for ingestion with a mapper and reducer. It is important to provide * the correct concrete implementation of Key and Value classes within the appropriate * generics because the framework will use reflection to set the key and value classes for * map-reduce. */ public IngestWithReducer ingestWithReducer(); } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/mapreduce/IngestMapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.hdfs.mapreduce; import java.io.IOException; import org.apache.avro.mapred.AvroKey; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapreduce.Mapper; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.ingest.GeoWaveData; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey; /** This class is the map-reduce mapper for ingestion with the mapper only. */ public class IngestMapper extends Mapper { private IngestWithMapper ingestWithMapper; private String[] indexNames; @Override protected void map( final AvroKey key, final NullWritable value, final org.apache.hadoop.mapreduce.Mapper.Context context) throws IOException, InterruptedException { try (CloseableIterator data = ingestWithMapper.toGeoWaveData(key.datum(), indexNames)) { while (data.hasNext()) { final GeoWaveData d = data.next(); context.write(new GeoWaveOutputKey<>(d), d.getValue()); } } } @Override protected void setup(final org.apache.hadoop.mapreduce.Mapper.Context context) throws IOException, InterruptedException { super.setup(context); try { final String ingestWithMapperStr = context.getConfiguration().get(AbstractMapReduceIngest.INGEST_PLUGIN_KEY); final byte[] ingestWithMapperBytes = ByteArrayUtils.byteArrayFromString(ingestWithMapperStr); ingestWithMapper = (IngestWithMapper) PersistenceUtils.fromBinary(ingestWithMapperBytes); indexNames = AbstractMapReduceIngest.getIndexNames(context.getConfiguration()); } catch (final Exception e) { throw new IllegalArgumentException(e); } } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/mapreduce/IngestPersistableRegistry.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.hdfs.mapreduce; import org.locationtech.geowave.core.index.persist.InternalPersistableRegistry; import org.locationtech.geowave.core.index.persist.PersistableRegistrySpi; import org.locationtech.geowave.core.ingest.hdfs.mapreduce.AbstractLocalIngestWithMapper.InternalIngestWithMapper; public class IngestPersistableRegistry implements PersistableRegistrySpi, InternalPersistableRegistry { @Override public PersistableIdAndConstructor[] getSupportedPersistables() { return new PersistableIdAndConstructor[] { new PersistableIdAndConstructor((short) 400, InternalIngestWithMapper::new),}; } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/mapreduce/IngestReducer.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.hdfs.mapreduce; import java.io.IOException; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.mapreduce.Reducer; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.ingest.GeoWaveData; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey; /** This is the map-reduce reducer for ingestion with both the mapper and reducer. */ public class IngestReducer extends Reducer, Writable, GeoWaveOutputKey, Object> { private IngestWithReducer ingestWithReducer; private String[] indexNames; @Override protected void reduce( final WritableComparable key, final Iterable values, final Context context) throws IOException, InterruptedException { try (CloseableIterator data = ingestWithReducer.toGeoWaveData(key, indexNames, values)) { while (data.hasNext()) { final GeoWaveData d = data.next(); context.write(new GeoWaveOutputKey<>(d), d.getValue()); } } } @Override protected void setup(final Context context) throws IOException, InterruptedException { super.setup(context); try { final String ingestWithReducerStr = context.getConfiguration().get(AbstractMapReduceIngest.INGEST_PLUGIN_KEY); final byte[] ingestWithReducerBytes = ByteArrayUtils.byteArrayFromString(ingestWithReducerStr); ingestWithReducer = (IngestWithReducer) PersistenceUtils.fromBinary(ingestWithReducerBytes); indexNames = AbstractMapReduceIngest.getIndexNames(context.getConfiguration()); } catch (final Exception e) { throw new IllegalArgumentException(e); } } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/mapreduce/IngestWithMapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.hdfs.mapreduce; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.ingest.IngestPluginBase; /** * This interface is used by the IngestFromHdfsPlugin to implement ingestion within a mapper only. * The implementation will be directly persisted to a mapper and called to produce GeoWaveData to be * written. * * @param data type for intermediate data * @param data type that will be ingested into GeoWave */ public interface IngestWithMapper extends IngestPluginBase, Persistable { } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/mapreduce/IngestWithMapperJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.hdfs.mapreduce; import java.util.List; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.Job; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.cli.VisibilityOptions; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey; /** This will run the mapper only ingest process. */ public class IngestWithMapperJobRunner extends AbstractMapReduceIngest> { public IngestWithMapperJobRunner( final DataStorePluginOptions storeOptions, final List indices, final VisibilityOptions ingestOptions, final Path inputFile, final String formatPluginName, final IngestFromHdfsPlugin plugin, final IngestWithMapper mapperIngest) { super(storeOptions, indices, ingestOptions, inputFile, formatPluginName, plugin, mapperIngest); } @Override protected void setupReducer(final Job job) { job.setNumReduceTasks(0); } @Override protected String getIngestDescription() { return "map only"; } @Override protected void setupMapper(final Job job) { job.setMapperClass(IngestMapper.class); // set mapper output info job.setMapOutputKeyClass(GeoWaveOutputKey.class); job.setMapOutputValueClass(Object.class); } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/mapreduce/IngestWithReducer.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.hdfs.mapreduce; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableComparable; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.ingest.DataAdapterProvider; import org.locationtech.geowave.core.store.ingest.GeoWaveData; /** * This interface is used by the IngestFromHdfsPlugin to implement ingestion with a mapper to * aggregate key value pairs and a reducer to ingest data into GeoWave. The implementation will be * directly persisted to the job configuration and called to generate key value pairs from * intermediate data in the mapper and to produce GeoWaveData to be written in the reducer. * * @param data type for intermediate data * @param K the type for the keys to be produced by the mapper from intermediate data, this should * be the concrete type that is used because through reflection it will be given as the key * class for map-reduce * @param V the type for the values to be produced by the mapper from intermediate data, this should * be the concrete type that is used because through reflection it will be given as the value * class for map-reduce * @param data type that will be ingested into GeoWave */ public interface IngestWithReducer, V extends Writable, O> extends DataAdapterProvider, Persistable { public CloseableIterator> toIntermediateMapReduceData(I input); public CloseableIterator> toGeoWaveData( K key, String[] indexNames, Iterable values); } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/mapreduce/IngestWithReducerJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.hdfs.mapreduce; import java.util.List; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.Job; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.cli.VisibilityOptions; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.util.GenericTypeResolver; /** * This class will run the ingestion process by using a mapper to aggregate key value pairs and a * reducer to ingest data into GeoWave. */ public class IngestWithReducerJobRunner extends AbstractMapReduceIngest> { public IngestWithReducerJobRunner( final DataStorePluginOptions storeOptions, final List indices, final VisibilityOptions ingestOptions, final Path inputFile, final String typeName, final IngestFromHdfsPlugin parentPlugin, final IngestWithReducer ingestPlugin) { super(storeOptions, indices, ingestOptions, inputFile, typeName, parentPlugin, ingestPlugin); } @Override protected String getIngestDescription() { return "with reducer"; } @Override protected void setupMapper(final Job job) { job.setMapperClass(IntermediateKeyValueMapper.class); final Class[] genericClasses = GenericTypeResolver.resolveTypeArguments(ingestPlugin.getClass(), IngestWithReducer.class); // set mapper output info job.setMapOutputKeyClass(genericClasses[1]); job.setMapOutputValueClass(genericClasses[2]); } @Override protected void setupReducer(final Job job) { job.setReducerClass(IngestReducer.class); if (job.getNumReduceTasks() <= 1) { // the default is one reducer, if its only one, set it to 8 as the // default job.setNumReduceTasks(8); } } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/mapreduce/IntermediateKeyValueMapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.hdfs.mapreduce; import java.io.IOException; import org.apache.avro.mapred.AvroKey; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.mapreduce.Mapper; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.CloseableIterator; /** * This class is the mapper used when aggregating key value pairs from intermediate data to be * ingested into GeoWave using a reducer. */ public class IntermediateKeyValueMapper extends Mapper, Writable> { private IngestWithReducer ingestWithReducer; @Override protected void map( final AvroKey key, final NullWritable value, final org.apache.hadoop.mapreduce.Mapper.Context context) throws IOException, InterruptedException { try (CloseableIterator, Writable>> data = ingestWithReducer.toIntermediateMapReduceData(key.datum())) { while (data.hasNext()) { final KeyValueData, Writable> d = data.next(); context.write(d.getKey(), d.getValue()); } } } @Override protected void setup(final org.apache.hadoop.mapreduce.Mapper.Context context) throws IOException, InterruptedException { super.setup(context); try { final String ingestWithReducerStr = context.getConfiguration().get(AbstractMapReduceIngest.INGEST_PLUGIN_KEY); final byte[] ingestWithReducerBytes = ByteArrayUtils.byteArrayFromString(ingestWithReducerStr); ingestWithReducer = (IngestWithReducer) PersistenceUtils.fromBinary(ingestWithReducerBytes); } catch (final Exception e) { throw new IllegalArgumentException(e); } } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/mapreduce/KeyValueData.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.hdfs.mapreduce; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableComparable; /** * The Key-Value pair that will be emitted from a mapper and used by a reducer in the * IngestWithReducer flow. * * @param The type for the key to be emitted * @param The type for the value to be emitted */ public class KeyValueData, V extends Writable> { private final K key; private final V value; public KeyValueData(final K key, final V value) { this.key = key; this.value = value; } public K getKey() { return key; } public V getValue() { return value; } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/hdfs/mapreduce/MapReduceCommandLineOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.hdfs.mapreduce; import java.util.List; import org.apache.hadoop.conf.Configuration; import com.beust.jcommander.Parameter; import com.beust.jcommander.converters.IParameterSplitter; import com.google.common.collect.Lists; /** * This class encapsulates all of the options and parsed values specific to setting up the GeoWave * ingestion framework to run on hadoop map-reduce. Currently the only required parameter is the * host name and port for the hadoop job tracker. */ public class MapReduceCommandLineOptions { @Parameter( names = "--jobtracker", description = "Hadoop job tracker hostname and port in the format hostname:port") private String jobTrackerHostPort; @Parameter( names = "--resourceman", description = "Yarn resource manager hostname and port in the format hostname:port") private String resourceManager; @Parameter( names = "--conf", description = "Job configuration property in the format Name=Value", splitter = NoSplitter.class) private List configurationProperties; public MapReduceCommandLineOptions() {} public String getJobTrackerHostPort() { return jobTrackerHostPort; } public void setJobTrackerHostPort(final String jobTrackerHostPort) { this.jobTrackerHostPort = jobTrackerHostPort; } public String getResourceManager() { return resourceManager; } public void setResourceManager(final String resourceManager) { this.resourceManager = resourceManager; } public String getJobTrackerOrResourceManagerHostPort() { return jobTrackerHostPort == null ? resourceManager : jobTrackerHostPort; } public void setConfigurationProperties(final List configurationProperties) { this.configurationProperties = configurationProperties; } public void applyConfigurationProperties(final Configuration conf) { if (configurationProperties != null) { for (final String property : configurationProperties) { final String[] kvp = property.split("="); if (kvp.length != 2) { throw new IllegalArgumentException("Unable to use configuration property: " + property); } conf.set(kvp[0], kvp[1]); } } } public static class NoSplitter implements IParameterSplitter { @Override public List split(final String value) { return Lists.newArrayList(value); } } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/kafka/IngestFromKafkaDriver.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.kafka; import java.io.IOException; import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.clients.consumer.ConsumerRebalanceListener; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.serialization.ByteArrayDeserializer; import org.locationtech.geowave.core.ingest.avro.GenericAvroSerializer; import org.locationtech.geowave.core.ingest.avro.GeoWaveAvroFormatPlugin; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.VisibilityHandler; import org.locationtech.geowave.core.store.api.Writer; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.ingest.GeoWaveData; import org.locationtech.geowave.core.store.ingest.IndexProvider; import org.locationtech.geowave.core.store.ingest.IngestPluginBase; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** This class executes the ingestion of intermediate data from a Kafka topic into GeoWave. */ public class IngestFromKafkaDriver { private static final Logger LOGGER = LoggerFactory.getLogger(IngestFromKafkaDriver.class); private final DataStorePluginOptions storeOptions; private final List indices; private final Map> ingestPlugins; private final KafkaConsumerCommandLineOptions kafkaOptions; private final VisibilityHandler visibilityHandler; private final List> futures = new ArrayList<>(); public IngestFromKafkaDriver( final DataStorePluginOptions storeOptions, final List indices, final Map> ingestPlugins, final KafkaConsumerCommandLineOptions kafkaOptions, final VisibilityHandler visibilityHandler) { this.storeOptions = storeOptions; this.indices = indices; this.ingestPlugins = ingestPlugins; this.kafkaOptions = kafkaOptions; this.visibilityHandler = visibilityHandler; } public boolean runOperation() { final DataStore dataStore = storeOptions.createDataStore(); final List queue = new ArrayList<>(); addPluginsToQueue(ingestPlugins, queue); configureAndLaunchPlugins(dataStore, ingestPlugins, queue); int counter = 0; while (queue.size() > 0) { if (counter > 30) { for (final String pluginFormatName : queue) { LOGGER.error("Unable to start up Kafka consumer for plugin [" + pluginFormatName + "]"); } break; } try { Thread.sleep(1000); } catch (final InterruptedException e) { LOGGER.error("Thread interrupted", e); } counter++; } if (queue.size() == 0) { LOGGER.info("All format plugins are now listening on Kafka topics"); } else { LOGGER.warn("Unable to setup Kafka consumers for the following format plugins:"); for (final String formatPluginName : queue) { LOGGER.warn("\t[" + formatPluginName + "]"); } return false; } return true; } private void addPluginsToQueue( final Map> pluginProviders, final List queue) { queue.addAll(pluginProviders.keySet()); } private void configureAndLaunchPlugins( final DataStore dataStore, final Map> pluginProviders, final List queue) { try { for (final Entry> pluginProvider : pluginProviders.entrySet()) { final List> adapters = new ArrayList<>(); GeoWaveAvroFormatPlugin avroFormatPlugin = null; try { avroFormatPlugin = pluginProvider.getValue(); final IngestPluginBase ingestWithAvroPlugin = avroFormatPlugin.getIngestWithAvroPlugin(); final DataTypeAdapter[] dataAdapters = ingestWithAvroPlugin.getDataAdapters(); adapters.addAll(Arrays.asList(dataAdapters)); final KafkaIngestRunData runData = new KafkaIngestRunData(adapters, dataStore); futures.add( launchTopicConsumer(pluginProvider.getKey(), avroFormatPlugin, runData, queue)); } catch (final UnsupportedOperationException e) { LOGGER.warn( "Plugin provider '" + pluginProvider.getKey() + "' does not support ingest from Kafka", e); continue; } } } catch (final Exception e) { LOGGER.warn("Error in accessing Kafka stream", e); } } private Consumer buildKafkaConsumer() { final Properties kafkaProperties = kafkaOptions.getProperties(); final Consumer consumer = new KafkaConsumer<>( kafkaProperties, new ByteArrayDeserializer(), new ByteArrayDeserializer()); return consumer; } private Future launchTopicConsumer( final String formatPluginName, final GeoWaveAvroFormatPlugin avroFormatPlugin, final KafkaIngestRunData ingestRunData, final List queue) throws IllegalArgumentException { final ExecutorService executorService = Executors.newFixedThreadPool(queue.size()); return executorService.submit(new Runnable() { @Override public void run() { try { consumeFromTopic(formatPluginName, avroFormatPlugin, ingestRunData, queue); } catch (final Exception e) { LOGGER.error("Error consuming from Kafka topic [" + formatPluginName + "]", e); } } }); } public void consumeFromTopic( final String formatPluginName, final GeoWaveAvroFormatPlugin avroFormatPlugin, final KafkaIngestRunData ingestRunData, final List queue) { try (final Consumer consumer = buildKafkaConsumer()) { if (consumer == null) { throw new RuntimeException( "Kafka consumer connector is null, unable to create message streams"); } LOGGER.debug( "Kafka consumer setup for format [" + formatPluginName + "] against topic [" + formatPluginName + "]"); queue.remove(formatPluginName); consumer.subscribe(Collections.singletonList(formatPluginName)); final String timeoutMs = kafkaOptions.getConsumerTimeoutMs(); long millis = -1; if ((timeoutMs != null) && !timeoutMs.trim().isEmpty()) { try { millis = Long.parseLong(timeoutMs); } catch (final Exception e) { LOGGER.warn("Cannot parse consumer timeout", e); } } final Duration timeout = millis > 0 ? Duration.ofMillis(millis) : Duration.ofDays(1000); consumeMessages(formatPluginName, avroFormatPlugin, ingestRunData, consumer, timeout); } } protected void consumeMessages( final String formatPluginName, final GeoWaveAvroFormatPlugin avroFormatPlugin, final KafkaIngestRunData ingestRunData, final Consumer consumer, final Duration timeout) { int currentBatchId = 0; final int batchSize = kafkaOptions.getBatchSize(); try { final ConsumerRecords iterator = consumer.poll(timeout); for (final ConsumerRecord msg : iterator) { LOGGER.info("[" + formatPluginName + "] message received"); final T dataRecord = GenericAvroSerializer.deserialize(msg.value(), avroFormatPlugin.getAvroSchema()); if (dataRecord != null) { try { processMessage(dataRecord, ingestRunData, avroFormatPlugin); if (++currentBatchId > batchSize) { if (LOGGER.isDebugEnabled()) { LOGGER.debug(String.format("Flushing %d items", currentBatchId)); } ingestRunData.flush(); currentBatchId = 0; } } catch (final Exception e) { LOGGER.error("Error processing message: " + e.getMessage(), e); } } } // Flush any outstanding items if (currentBatchId > 0) { if (LOGGER.isDebugEnabled()) { LOGGER.debug(String.format("Flushing %d items", currentBatchId)); } ingestRunData.flush(); currentBatchId = 0; } if (kafkaOptions.isFlushAndReconnect()) { LOGGER.info( "Consumer timed out from Kafka topic [" + formatPluginName + "]... Reconnecting..."); consumeMessages(formatPluginName, avroFormatPlugin, ingestRunData, consumer, timeout); } else { LOGGER.info("Consumer timed out from Kafka topic [" + formatPluginName + "]... "); } } catch (final Exception e) { LOGGER.warn("Consuming from Kafka topic [" + formatPluginName + "] was interrupted... ", e); } } protected synchronized void processMessage( final T dataRecord, final KafkaIngestRunData ingestRunData, final GeoWaveAvroFormatPlugin plugin) throws IOException { final IngestPluginBase ingestPlugin = plugin.getIngestWithAvroPlugin(); final IndexProvider indexProvider = plugin; final Map writerMap = new HashMap<>(); final Map indexMap = new HashMap<>(); for (final Index index : indices) { indexMap.put(index.getName(), index); } final Index[] requiredIndices = indexProvider.getRequiredIndices(); if ((requiredIndices != null) && (requiredIndices.length > 0)) { for (final Index requiredIndex : requiredIndices) { indexMap.put(requiredIndex.getName(), requiredIndex); } } try (CloseableIterator geowaveDataIt = ingestPlugin.toGeoWaveData(dataRecord, indexMap.keySet().toArray(new String[0]))) { while (geowaveDataIt.hasNext()) { final GeoWaveData geowaveData = (GeoWaveData) geowaveDataIt.next(); final DataTypeAdapter adapter = ingestRunData.getDataAdapter(geowaveData); if (adapter == null) { LOGGER.warn("Adapter not found for " + geowaveData.getValue()); continue; } Writer indexWriter = writerMap.get(adapter.getTypeName()); if (indexWriter == null) { final List indexList = new ArrayList<>(); for (final String indexName : geowaveData.getIndexNames()) { final Index index = indexMap.get(indexName); if (index == null) { LOGGER.warn("Index '" + indexName + "' not found for " + geowaveData.getValue()); continue; } indexList.add(index); } indexWriter = ingestRunData.getIndexWriter( adapter, visibilityHandler, indexList.toArray(new Index[indexList.size()])); writerMap.put(adapter.getTypeName(), indexWriter); } indexWriter.write(geowaveData.getValue()); } } } public List> getFutures() { return futures; } /** * @return {@code true} if all futures are complete */ public boolean isComplete() { for (final Future future : futures) { if (!future.isDone()) { return false; } } return true; } /** * Wait for all kafka topics to complete, then return the result objects. * * @return the future results * @throws InterruptedException * @throws ExecutionException */ public List waitFutures() throws InterruptedException, ExecutionException { final List results = new ArrayList<>(); for (final Future future : futures) { results.add(future.get()); } return results; } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/kafka/KafkaCommandLineArgument.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.kafka; public class KafkaCommandLineArgument { private final String argName; private final String argDescription; private final String kafkaParamName; private final boolean required; public KafkaCommandLineArgument( final String argName, final String argDescription, final String kafkaParamName, final boolean required) { this.argName = argName; this.argDescription = "See Kafka documention for '" + kafkaParamName + "'" + argDescription; this.kafkaParamName = kafkaParamName; this.required = required; } public String getArgName() { return argName; } public String getArgDescription() { return argDescription; } public String getKafkaParamName() { return kafkaParamName; } public boolean isRequired() { return required; } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/kafka/KafkaCommandLineOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.kafka; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.util.Properties; import org.locationtech.geowave.core.cli.prefix.JCommanderPrefixTranslator; import org.locationtech.geowave.core.cli.prefix.JCommanderTranslationMap; import org.locationtech.geowave.core.cli.prefix.TranslationEntry; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; public class KafkaCommandLineOptions { private static final Logger LOGGER = LoggerFactory.getLogger(KafkaCommandLineOptions.class); @Parameter( names = "--kafkaprops", required = false, description = "Properties file containing Kafka properties") private String kafkaPropertyFile; // After initProperties() private Properties kafkaProperties = null; public KafkaCommandLineOptions() {} public Properties getProperties() { initProperties(); return kafkaProperties; } public synchronized void initProperties() { if (kafkaProperties == null) { final Properties properties = new Properties(); if (kafkaPropertyFile != null) { if (!readAndVerifyProperties(kafkaPropertyFile, properties)) { throw new ParameterException("Unable to read properties file"); } } applyOverrides(properties); kafkaProperties = properties; } } /** * This function looks as 'this' and checks for @PropertyReference annotations, and overrides the * string values into the props list based on the propety name in the annotation value. */ private void applyOverrides(final Properties properties) { // Get the parameters specified in this object. final JCommanderPrefixTranslator translator = new JCommanderPrefixTranslator(); translator.addObject(this); final JCommanderTranslationMap map = translator.translate(); // Find objects with the PropertyReference annotation for (final TranslationEntry entry : map.getEntries().values()) { if (entry.hasValue()) { final PropertyReference ref = entry.getMember().getAnnotation(PropertyReference.class); if (ref != null) { final String propKey = ref.value(); final String propStringValue = entry.getParam().get(entry.getObject()).toString(); properties.setProperty(propKey, propStringValue); } } } } private static boolean readAndVerifyProperties( final String kafkaPropertiesPath, final Properties properties) { final File propFile = new File(kafkaPropertiesPath); if (!propFile.exists()) { LOGGER.error("File does not exist: " + kafkaPropertiesPath); return false; } try (final FileInputStream fileInputStream = new FileInputStream(propFile); final InputStreamReader inputStreamReader = new InputStreamReader(fileInputStream, "UTF-8")) { properties.load(inputStreamReader); inputStreamReader.close(); } catch (final IOException e) { LOGGER.error("Unable to load Kafka properties file: ", e); return false; } return true; } /** * Find bugs complained, so I added synchronized. * * @param kafkaPropertyFile */ public synchronized void setKafkaPropertyFile(final String kafkaPropertyFile) { this.kafkaPropertyFile = kafkaPropertyFile; } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/kafka/KafkaConsumerCommandLineOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.kafka; import org.apache.kafka.clients.CommonClientConfigs; import com.beust.jcommander.Parameter; public class KafkaConsumerCommandLineOptions extends KafkaCommandLineOptions { @PropertyReference("group.id") @Parameter( names = "--groupId", description = "A string that uniquely identifies the group of consumer processes to which this consumer belongs. By setting the same group id multiple processes indicate that they are all part of the same consumer group.") private String groupId; @PropertyReference(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG) @Parameter( names = "--bootstrapServers", description = "This is for bootstrapping and the consumer will only use it for getting metadata (topics, partitions and replicas). The socket connections for sending the actual data will be established based on the broker information returned in the metadata. The format is host1:port1,host2:port2, and the list can be a subset of brokers or a VIP pointing to a subset of brokers.") private String bootstrapServers; @PropertyReference("auto.offset.reset") @Parameter( names = "--autoOffsetReset", description = "What to do when there is no initial offset in ZooKeeper or if an offset is out of range:\n" + "\t* earliest: automatically reset the offset to the earliest offset\n" + "\t* latest: automatically reset the offset to the latest offset\n" + "\t* none: don't reset the offset\n" + "\t* anything else: throw exception to the consumer\n") private String autoOffsetReset; @PropertyReference("max.partition.fetch.bytes") @Parameter( names = "--maxPartitionFetchBytes", description = "The number of bytes of messages to attempt to fetch for each topic-partition in each fetch request. These bytes will be read into memory for each partition, so this helps control the memory used by the consumer. The fetch request size must be at least as large as the maximum message size the server allows or else it is possible for the producer to send messages larger than the consumer can fetch.") private String maxPartitionFetchBytes; @Parameter( names = "--consumerTimeoutMs", description = "By default, this value is -1 and a consumer blocks indefinitely if no new message is available for consumption. By setting the value to a positive integer, a timeout exception is thrown to the consumer if no message is available for consumption after the specified timeout value.") private String consumerTimeoutMs; @Parameter( names = "--reconnectOnTimeout", description = "This flag will flush when the consumer timeout occurs (based on kafka property 'consumer.timeout.ms') and immediately reconnect") private boolean reconnectOnTimeout = false; @Parameter( names = "--batchSize", description = "The data will automatically flush after this number of entries") private int batchSize = 10000; public boolean isFlushAndReconnect() { return reconnectOnTimeout; } public int getBatchSize() { return batchSize; } public String getBootstrapServers() { return bootstrapServers; } public void setBootstrapServers(final String bootstrapServers) { this.bootstrapServers = bootstrapServers; } public String getGroupId() { return groupId; } public void setGroupId(final String groupId) { this.groupId = groupId; } public String getAutoOffsetReset() { return autoOffsetReset; } public void setAutoOffsetReset(final String autoOffsetReset) { this.autoOffsetReset = autoOffsetReset; } public String getMaxPartitionFetchBytes() { return maxPartitionFetchBytes; } public void setMaxPartitionFetchBytes(final String maxPartitionFetchBytes) { this.maxPartitionFetchBytes = maxPartitionFetchBytes; } public String getConsumerTimeoutMs() { return consumerTimeoutMs; } public void setConsumerTimeoutMs(final String consumerTimeoutMs) { this.consumerTimeoutMs = consumerTimeoutMs; } public boolean isReconnectOnTimeout() { return reconnectOnTimeout; } public void setReconnectOnTimeout(final boolean reconnectOnTimeout) { this.reconnectOnTimeout = reconnectOnTimeout; } public void setBatchSize(final int batchSize) { this.batchSize = batchSize; } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/kafka/KafkaIngestRunData.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.kafka; import java.io.Closeable; import java.io.IOException; import java.util.HashMap; import java.util.List; import java.util.Map; import org.locationtech.geowave.core.store.adapter.TransientAdapterStore; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.VisibilityHandler; import org.locationtech.geowave.core.store.api.Writer; import org.locationtech.geowave.core.store.ingest.GeoWaveData; import org.locationtech.geowave.core.store.memory.MemoryAdapterStore; import com.clearspring.analytics.util.Lists; /** * A class to hold intermediate run data that must be used throughout the life of an ingest process. */ public class KafkaIngestRunData implements Closeable { private final Map adapterIdToWriterCache = new HashMap<>(); private final TransientAdapterStore adapterCache; private final DataStore dataStore; public KafkaIngestRunData(final List> adapters, final DataStore dataStore) { this.dataStore = dataStore; adapterCache = new MemoryAdapterStore(adapters.toArray(new DataTypeAdapter[adapters.size()])); } public DataTypeAdapter getDataAdapter(final GeoWaveData data) { return data.getAdapter(adapterCache); } public synchronized Writer getIndexWriter( final DataTypeAdapter adapter, final VisibilityHandler visibilityHandler, final Index... requiredIndices) { Writer indexWriter = adapterIdToWriterCache.get(adapter.getTypeName()); if (indexWriter == null) { dataStore.addType(adapter, visibilityHandler, Lists.newArrayList(), requiredIndices); indexWriter = dataStore.createWriter(adapter.getTypeName(), visibilityHandler); adapterIdToWriterCache.put(adapter.getTypeName(), indexWriter); } return indexWriter; } @Override public void close() throws IOException { synchronized (this) { for (final Writer indexWriter : adapterIdToWriterCache.values()) { indexWriter.close(); } adapterIdToWriterCache.clear(); } } public void flush() { synchronized (this) { for (final Writer indexWriter : adapterIdToWriterCache.values()) { indexWriter.flush(); } } } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/kafka/KafkaProducerCommandLineOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.kafka; import com.beust.jcommander.Parameter; public class KafkaProducerCommandLineOptions extends KafkaCommandLineOptions { @PropertyReference("bootstrap.servers") @Parameter( names = "--bootstrapServers", description = "This is for bootstrapping and the producer will only use it for getting metadata (topics, partitions and replicas). The socket connections for sending the actual data will be established based on the broker information returned in the metadata. The format is host1:port1,host2:port2, and the list can be a subset of brokers or a VIP pointing to a subset of brokers.") private String bootstrapServers; @PropertyReference("retry.backoff.ms") @Parameter( names = "--retryBackoffMs", description = "The amount of time to wait before attempting to retry a failed produce request to a given topic partition. This avoids repeated sending-and-failing in a tight loop.") private String retryBackoffMs; public String getBootstrapServers() { return bootstrapServers; } public void setBootstrapServers(final String bootstrapServers) { this.bootstrapServers = bootstrapServers; } public String getRetryBackoffMs() { return retryBackoffMs; } public void setRetryBackoffMs(final String retryBackoffMs) { this.retryBackoffMs = retryBackoffMs; } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/kafka/PropertyReference.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.kafka; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * This is just a hack to get access to the property name that we need to overwrite in the kafka * config property file. */ @Retention(RetentionPolicy.RUNTIME) @Target({ElementType.FIELD}) public @interface PropertyReference { String value(); } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/kafka/StageKafkaData.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.kafka; import java.util.HashMap; import java.util.Map; import java.util.Properties; import org.apache.avro.specific.SpecificRecordBase; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.Producer; import org.apache.kafka.common.serialization.ByteArraySerializer; import org.locationtech.geowave.core.ingest.avro.GeoWaveAvroFormatPlugin; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A class to hold intermediate stage data that must be used throughout the life of the Kafka stage * process. */ public class StageKafkaData { private static final Logger LOGGER = LoggerFactory.getLogger(StageKafkaData.class); private final Map> cachedProducers = new HashMap<>(); private final Properties properties; public StageKafkaData(final Properties properties) { this.properties = properties; } public Producer getProducer( final String typeName, final GeoWaveAvroFormatPlugin plugin) { return getProducerCreateIfNull(typeName, plugin); } private synchronized Producer getProducerCreateIfNull( final String typeName, final GeoWaveAvroFormatPlugin plugin) { if (!cachedProducers.containsKey(typeName)) { final Producer producer = new KafkaProducer<>(properties, new ByteArraySerializer(), new ByteArraySerializer()); cachedProducers.put(typeName, producer); } return cachedProducers.get(typeName); } public synchronized void close() { for (final Producer producer : cachedProducers.values()) { try { producer.close(); } catch (final Exception e) { LOGGER.warn("Unable to close kafka producer", e); } } cachedProducers.clear(); } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/kafka/StageToKafkaDriver.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.kafka; import java.io.File; import java.io.IOException; import java.net.URL; import java.util.Map; import org.apache.avro.specific.SpecificRecordBase; import org.apache.kafka.clients.producer.Producer; import org.apache.kafka.clients.producer.ProducerRecord; import org.locationtech.geowave.core.ingest.avro.GenericAvroSerializer; import org.locationtech.geowave.core.ingest.avro.GeoWaveAvroFormatPlugin; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.ingest.AbstractLocalFileDriver; import org.locationtech.geowave.core.store.ingest.LocalInputCommandLineOptions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class actually executes the staging of data to a Kafka topic based on the available type * plugin providers that are discovered through SPI. */ public class StageToKafkaDriver extends AbstractLocalFileDriver, StageKafkaData> { private static final Logger LOGGER = LoggerFactory.getLogger(StageToKafkaDriver.class); private final Map> ingestPlugins; private final KafkaProducerCommandLineOptions kafkaOptions; public StageToKafkaDriver( final KafkaProducerCommandLineOptions kafkaOptions, final Map> ingestPlugins, final LocalInputCommandLineOptions localOptions) { super(localOptions); this.kafkaOptions = kafkaOptions; this.ingestPlugins = ingestPlugins; } @Override protected void processFile( final URL file, final String typeName, final GeoWaveAvroFormatPlugin plugin, final StageKafkaData runData) { try { final Producer producer = runData.getProducer(typeName, plugin); try (final CloseableIterator avroRecords = plugin.toAvroObjects(file)) { while (avroRecords.hasNext()) { final Object avroRecord = avroRecords.next(); final ProducerRecord data = new ProducerRecord<>( typeName, GenericAvroSerializer.serialize(avroRecord, plugin.getAvroSchema())); producer.send(data); } } } catch (final Exception e) { LOGGER.info( "Unable to send file [" + file.getPath() + "] to Kafka topic: " + e.getMessage(), e); } } public boolean runOperation(final String inputPath, final File configFile) { final Map> stageToKafkaPlugins = ingestPlugins; try { final StageKafkaData runData = new StageKafkaData<>(kafkaOptions.getProperties()); processInput(inputPath, configFile, stageToKafkaPlugins, runData); runData.close(); return true; } catch (final IOException e) { LOGGER.error("Unable to process input", e); return false; } } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/local/LocalFileIngestCLIDriver.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.local; import java.io.IOException; import java.util.HashMap; import java.util.List; import java.util.Map; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.VisibilityHandler; import org.locationtech.geowave.core.store.cli.VisibilityOptions; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.ingest.AbstractLocalFileIngestDriver; import org.locationtech.geowave.core.store.ingest.DataAdapterProvider; import org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin; import org.locationtech.geowave.core.store.ingest.LocalInputCommandLineOptions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This extends the local file driver to directly ingest data into GeoWave utilizing the * LocalFileIngestPlugin's that are discovered by the system. */ public class LocalFileIngestCLIDriver extends AbstractLocalFileIngestDriver { private static final Logger LOGGER = LoggerFactory.getLogger(LocalFileIngestCLIDriver.class); protected DataStorePluginOptions storeOptions; protected List indices; protected VisibilityOptions visibilityOptions; protected Map> ingestPlugins; protected int threads; public LocalFileIngestCLIDriver( final DataStorePluginOptions storeOptions, final List indices, final Map> ingestPlugins, final VisibilityOptions visibilityOptions, final LocalInputCommandLineOptions inputOptions, final int threads) { super(inputOptions); this.storeOptions = storeOptions; this.indices = indices; this.visibilityOptions = visibilityOptions; this.ingestPlugins = ingestPlugins; this.threads = threads; } @Override protected Map getIndices() throws IOException { final Map specifiedPrimaryIndexes = new HashMap<>(); for (final Index primaryIndex : indices) { specifiedPrimaryIndexes.put(primaryIndex.getName(), primaryIndex); } return specifiedPrimaryIndexes; } @Override protected boolean isSupported( final String providerName, final DataAdapterProvider adapterProvider) { return checkIndexesAgainstProvider(providerName, adapterProvider, indices); } @Override protected int getNumThreads() { return threads; } @Override protected VisibilityHandler getVisibilityHandler() { return visibilityOptions.getConfiguredVisibilityHandler(); } @Override protected Map> getIngestPlugins() { return ingestPlugins; } @Override protected DataStore getDataStore() { return storeOptions.createDataStore(); } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/operations/AddTypeCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.operations; import java.io.File; import java.io.IOException; import java.net.URL; import java.nio.file.FileVisitResult; import java.nio.file.FileVisitor; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.attribute.BasicFileAttributes; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.ingest.operations.options.IngestFormatPluginOptions; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.VisibilityOptions; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.cli.type.TypeSection; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.ingest.AbstractLocalFileIngestDriver; import org.locationtech.geowave.core.store.ingest.DataAdapterProvider; import org.locationtech.geowave.core.store.ingest.IngestUtils; import org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin; import org.locationtech.geowave.core.store.ingest.LocalInputCommandLineOptions; import org.locationtech.geowave.core.store.ingest.LocalPluginBase; import org.locationtech.geowave.core.store.ingest.LocalPluginFileVisitor.PluginVisitor; import org.locationtech.geowave.core.store.util.DataStoreUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import com.beust.jcommander.ParametersDelegate; import com.beust.jcommander.internal.Maps; import com.clearspring.analytics.util.Lists; @GeowaveOperation(name = "add", parentOperation = TypeSection.class) @Parameters(commandDescription = "Add a type with a given name to the data store") public class AddTypeCommand extends ServiceEnabledCommand { private static final Logger LOGGER = LoggerFactory.getLogger(AddTypeCommand.class); @Parameter(description = " ") private List parameters = new ArrayList<>(); @ParametersDelegate private VisibilityOptions visibilityOptions = new VisibilityOptions(); @ParametersDelegate private LocalInputCommandLineOptions localInputOptions = new LocalInputCommandLineOptions(); // This helper is used to load the list of format SPI plugins that will be // used @ParametersDelegate private IngestFormatPluginOptions pluginFormats = new IngestFormatPluginOptions(); private DataStorePluginOptions inputStoreOptions = null; private List inputIndices = null; @Override public boolean prepare(final OperationParams params) { // Based on the selected formats, select the format plugins pluginFormats.selectPlugin(localInputOptions.getFormats()); return true; } /** Prep the driver & run the operation. */ @Override public void execute(final OperationParams params) { computeResults(params); } @Override public boolean runAsync() { return true; } public List getParameters() { return parameters; } public void setParameters( final String fileOrDirectory, final String storeName, final String commaDelimitedIndexes) { parameters = new ArrayList<>(); parameters.add(fileOrDirectory); parameters.add(storeName); parameters.add(commaDelimitedIndexes); } public VisibilityOptions getVisibilityOptions() { return visibilityOptions; } public void setVisibilityOptions(final VisibilityOptions visibilityOptions) { this.visibilityOptions = visibilityOptions; } public LocalInputCommandLineOptions getLocalInputOptions() { return localInputOptions; } public void setLocalInputOptions(final LocalInputCommandLineOptions localInputOptions) { this.localInputOptions = localInputOptions; } public IngestFormatPluginOptions getPluginFormats() { return pluginFormats; } public void setPluginFormats(final IngestFormatPluginOptions pluginFormats) { this.pluginFormats = pluginFormats; } public DataStorePluginOptions getInputStoreOptions() { return inputStoreOptions; } public List getInputIndices() { return inputIndices; } @Override public Void computeResults(final OperationParams params) { // Ensure we have all the required arguments if (parameters.size() != 3) { throw new ParameterException( "Requires arguments: "); } final String inputPath = parameters.get(0); final String inputStoreName = parameters.get(1); final String indexList = parameters.get(2); // Config file final File configFile = getGeoWaveConfigFile(params); inputStoreOptions = CLIUtils.loadStore(inputStoreName, configFile, params.getConsole()); final IndexStore indexStore = inputStoreOptions.createIndexStore(); inputIndices = DataStoreUtils.loadIndices(indexStore, indexList); try { final List> adapters = getAllDataAdapters(inputPath, configFile); if (adapters.size() == 0) { throw new ParameterException("No types could be found with the given options."); } final DataStore dataStore = inputStoreOptions.createDataStore(); final Index[] indices = inputIndices.toArray(new Index[inputIndices.size()]); adapters.forEach(adapter -> { dataStore.addType( adapter, visibilityOptions.getConfiguredVisibilityHandler(), Lists.newArrayList(), indices); params.getConsole().println("Added type: " + adapter.getTypeName()); }); } catch (IOException e) { throw new RuntimeException("Failed to get data types from specified directory.", e); } return null; } public List> getAllDataAdapters(final String inputPath, final File configFile) throws IOException { final Map> ingestPlugins = pluginFormats.createLocalIngestPlugins(); final Map> localFileIngestPlugins = new HashMap<>(); final Map> adapters = Maps.newHashMap(); for (final Entry> pluginEntry : ingestPlugins.entrySet()) { if (!isSupported(pluginEntry.getKey(), pluginEntry.getValue())) { continue; } localFileIngestPlugins.put(pluginEntry.getKey(), pluginEntry.getValue()); Arrays.stream(pluginEntry.getValue().getDataAdapters()).forEach(adapter -> { adapters.put(adapter.getTypeName(), adapter); }); } Properties configProperties = null; if ((configFile != null) && configFile.exists()) { configProperties = ConfigOptions.loadProperties(configFile); } Path path = IngestUtils.handleIngestUrl(inputPath, configProperties); if (path == null) { final File f = new File(inputPath); if (!f.exists()) { LOGGER.error("Input file '" + f.getAbsolutePath() + "' does not exist"); throw new IllegalArgumentException(inputPath + " does not exist"); } path = Paths.get(inputPath); } for (final LocalPluginBase localPlugin : localFileIngestPlugins.values()) { localPlugin.init(path.toUri().toURL()); } final DataAdapterFileVisitor fileURLs = new DataAdapterFileVisitor( localFileIngestPlugins, localInputOptions.getExtensions(), adapters); Files.walkFileTree(path, fileURLs); return Lists.newArrayList(adapters.values()); } /** * This class is used by the local file driver to recurse a directory of files and find all * DataAdapters that would be created by the ingest. */ public static class DataAdapterFileVisitor implements FileVisitor { private static final Logger LOGGER = LoggerFactory.getLogger(DataAdapterFileVisitor.class); private final List>> pluginVisitors; private final Map> adapters; public DataAdapterFileVisitor( final Map> localPlugins, final String[] userExtensions, final Map> adapters) { pluginVisitors = new ArrayList<>(localPlugins.size()); for (final Entry> localPluginBase : localPlugins.entrySet()) { pluginVisitors.add( new PluginVisitor<>( localPluginBase.getValue(), localPluginBase.getKey(), userExtensions)); } this.adapters = adapters; } @Override public FileVisitResult postVisitDirectory(final Path path, final IOException e) throws IOException { return FileVisitResult.CONTINUE; } @Override public FileVisitResult preVisitDirectory(final Path path, final BasicFileAttributes bfa) throws IOException { return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFile(final Path path, final BasicFileAttributes bfa) throws IOException { final URL file = path.toUri().toURL(); for (final PluginVisitor> visitor : pluginVisitors) { if (visitor.supportsFile(file)) { Arrays.stream(visitor.getLocalPluginBase().getDataAdapters(file)).forEach(adapter -> { adapters.put(adapter.getTypeName(), adapter); }); } } return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFileFailed(final Path path, final IOException bfa) throws IOException { LOGGER.error("Cannot visit path: " + path); return FileVisitResult.CONTINUE; } } private boolean isSupported( final String providerName, final DataAdapterProvider adapterProvider) { return AbstractLocalFileIngestDriver.checkIndexesAgainstProvider( providerName, adapterProvider, inputIndices); } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/operations/ConfigAWSCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.operations; import java.util.ArrayList; import java.util.List; import java.util.Properties; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.Command; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.operations.config.ConfigSection; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "aws", parentOperation = ConfigSection.class) @Parameters(commandDescription = "Create a local configuration for AWS S3") public class ConfigAWSCommand extends DefaultOperation implements Command { private static final Logger LOGGER = LoggerFactory.getLogger(ConfigAWSCommand.class); public static final String AWS_S3_ENDPOINT_PREFIX = "s3.endpoint"; public static final String AWS_S3_ENDPOINT_URL = AWS_S3_ENDPOINT_PREFIX + ".url"; @Parameter(description = " (for example s3.amazonaws.com)") private List parameters = new ArrayList<>(); private String url = null; @Override public boolean prepare(final OperationParams params) { boolean retval = true; retval |= super.prepare(params); return retval; } @Override public void execute(final OperationParams params) throws Exception { if (parameters.size() != 1) { throw new ParameterException("Requires argument: "); } url = parameters.get(0); final Properties existingProps = getGeoWaveConfigProperties(params); // all switches are optional if (url != null) { existingProps.setProperty(AWS_S3_ENDPOINT_URL, url); } // Write properties file ConfigOptions.writeProperties( getGeoWaveConfigFile(params), existingProps, this.getClass(), AWS_S3_ENDPOINT_PREFIX, params.getConsole()); } public static String getS3Url(final Properties configProperties) { String s3EndpointUrl = configProperties.getProperty(ConfigAWSCommand.AWS_S3_ENDPOINT_URL); if (s3EndpointUrl == null) { LOGGER.warn( "S3 endpoint URL is empty. Config using \"geowave config aws \""); s3EndpointUrl = "s3.amazonaws.com"; } if (!s3EndpointUrl.contains("://")) { s3EndpointUrl = "s3://" + s3EndpointUrl; } return s3EndpointUrl; } public void setS3UrlParameter(final String s3EndpointUrl) { parameters = new ArrayList<>(); parameters.add(s3EndpointUrl); } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/operations/IngestOperationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.operations; import org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi; public class IngestOperationProvider implements CLIOperationProviderSpi { private static final Class[] OPERATIONS = new Class[] { IngestSection.class, AddTypeCommand.class, KafkaToGeoWaveCommand.class, ListIngestPluginsCommand.class, LocalToGeoWaveCommand.class, LocalToHdfsCommand.class, LocalToKafkaCommand.class, LocalToMapReduceToGeoWaveCommand.class, MapReduceToGeoWaveCommand.class, ConfigAWSCommand.class, SparkToGeoWaveCommand.class}; @Override public Class[] getOperations() { return OPERATIONS; } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/operations/IngestSection.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.operations; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.operations.GeoWaveTopLevelSection; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "ingest", parentOperation = GeoWaveTopLevelSection.class) @Parameters( commandDescription = "Commands that ingest data directly into GeoWave or stage data to be ingested into GeoWave") public class IngestSection extends DefaultOperation { } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/operations/KafkaToGeoWaveCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.operations; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.ingest.avro.GeoWaveAvroFormatPlugin; import org.locationtech.geowave.core.ingest.kafka.IngestFromKafkaDriver; import org.locationtech.geowave.core.ingest.kafka.KafkaConsumerCommandLineOptions; import org.locationtech.geowave.core.ingest.operations.options.IngestFormatPluginOptions; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.VisibilityOptions; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.ingest.LocalInputCommandLineOptions; import org.locationtech.geowave.core.store.util.DataStoreUtils; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import com.beust.jcommander.ParametersDelegate; @GeowaveOperation(name = "kafkaToGW", parentOperation = IngestSection.class) @Parameters(commandDescription = "Subscribe to a Kafka topic and ingest into GeoWave") public class KafkaToGeoWaveCommand extends ServiceEnabledCommand { @Parameter(description = " ") private List parameters = new ArrayList<>(); @ParametersDelegate private VisibilityOptions visibilityOptions = new VisibilityOptions(); @ParametersDelegate private KafkaConsumerCommandLineOptions kafkaOptions = new KafkaConsumerCommandLineOptions(); @ParametersDelegate private LocalInputCommandLineOptions localInputOptions = new LocalInputCommandLineOptions(); // This helper is used to load the list of format SPI plugins that will be // used @ParametersDelegate private IngestFormatPluginOptions pluginFormats = new IngestFormatPluginOptions(); private DataStorePluginOptions inputStoreOptions = null; private List inputIndices = null; protected IngestFromKafkaDriver driver = null; @Override public boolean prepare(final OperationParams params) { // TODO: localInputOptions has 'extensions' which doesn't mean // anything for Kafka to Geowave // Based on the selected formats, select the format plugins pluginFormats.selectPlugin(localInputOptions.getFormats()); return true; } /** * Prep the driver & run the operation. * * @throws Exception */ @Override public void execute(final OperationParams params) throws Exception { // Ensure we have all the required arguments if (parameters.size() != 2) { throw new ParameterException("Requires arguments: "); } computeResults(params); } @Override public boolean runAsync() { return true; } public IngestFromKafkaDriver getDriver() { return driver; } public List getParameters() { return parameters; } public void setParameters(final String storeName, final String commaSeparatedIndexes) { parameters = new ArrayList<>(); parameters.add(storeName); parameters.add(commaSeparatedIndexes); } public VisibilityOptions getVisibilityOptions() { return visibilityOptions; } public void setVisibilityOptions(final VisibilityOptions visibilityOptions) { this.visibilityOptions = visibilityOptions; } public KafkaConsumerCommandLineOptions getKafkaOptions() { return kafkaOptions; } public void setKafkaOptions(final KafkaConsumerCommandLineOptions kafkaOptions) { this.kafkaOptions = kafkaOptions; } public LocalInputCommandLineOptions getLocalInputOptions() { return localInputOptions; } public void setLocalInputOptions(final LocalInputCommandLineOptions localInputOptions) { this.localInputOptions = localInputOptions; } public IngestFormatPluginOptions getPluginFormats() { return pluginFormats; } public void setPluginFormats(final IngestFormatPluginOptions pluginFormats) { this.pluginFormats = pluginFormats; } public DataStorePluginOptions getInputStoreOptions() { return inputStoreOptions; } public List getInputIndices() { return inputIndices; } @Override public Void computeResults(final OperationParams params) throws Exception { final String inputStoreName = parameters.get(0); final String indexList = parameters.get(1); inputStoreOptions = CLIUtils.loadStore(inputStoreName, getGeoWaveConfigFile(params), params.getConsole()); final IndexStore indexStore = inputStoreOptions.createIndexStore(); inputIndices = DataStoreUtils.loadIndices(indexStore, indexList); // Ingest Plugins final Map> ingestPlugins = pluginFormats.createAvroPlugins(); // Driver driver = new IngestFromKafkaDriver( inputStoreOptions, inputIndices, ingestPlugins, kafkaOptions, visibilityOptions.getConfiguredVisibilityHandler()); // Execute if (!driver.runOperation()) { throw new RuntimeException("Ingest failed to execute"); } return null; } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/operations/ListIngestPluginsCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.operations; import java.util.Map.Entry; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi; import org.locationtech.geowave.core.ingest.spi.IngestFormatPluginRegistry; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "listplugins", parentOperation = IngestSection.class) @Parameters(commandDescription = "List supported ingest formats") public class ListIngestPluginsCommand extends ServiceEnabledCommand { @Override public void execute(final OperationParams params) { params.getConsole().println(computeResults(params)); } @Override public String computeResults(final OperationParams params) { final StringBuilder builder = new StringBuilder(); builder.append("Available ingest formats currently registered as plugins:\n"); for (final Entry> pluginProviderEntry : IngestFormatPluginRegistry.getPluginProviderRegistry().entrySet()) { final IngestFormatPluginProviderSpi pluginProvider = pluginProviderEntry.getValue(); final String desc = pluginProvider.getIngestFormatDescription() == null ? "no description" : pluginProvider.getIngestFormatDescription(); builder.append(String.format("%n %s:%n %s%n", pluginProviderEntry.getKey(), desc)); } return builder.toString(); } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/operations/LocalToGeoWaveCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.operations; import java.io.File; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.ingest.local.LocalFileIngestCLIDriver; import org.locationtech.geowave.core.ingest.operations.options.IngestFormatPluginOptions; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.VisibilityOptions; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin; import org.locationtech.geowave.core.store.ingest.LocalInputCommandLineOptions; import org.locationtech.geowave.core.store.util.DataStoreUtils; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import com.beust.jcommander.ParametersDelegate; @GeowaveOperation(name = "localToGW", parentOperation = IngestSection.class) @Parameters( commandDescription = "Ingest supported files in local file system directly, from S3 or from HDFS") public class LocalToGeoWaveCommand extends ServiceEnabledCommand { @Parameter(description = " ") private List parameters = new ArrayList<>(); @ParametersDelegate private VisibilityOptions visibilityOptions = new VisibilityOptions(); @ParametersDelegate private LocalInputCommandLineOptions localInputOptions = new LocalInputCommandLineOptions(); // This helper is used to load the list of format SPI plugins that will be // used @ParametersDelegate private IngestFormatPluginOptions pluginFormats = new IngestFormatPluginOptions(); @Parameter( names = {"-t", "--threads"}, description = "number of threads to use for ingest, default to 1 (optional)") private int threads = 1; private DataStorePluginOptions inputStoreOptions = null; private List inputIndices = null; @Override public boolean prepare(final OperationParams params) { // Based on the selected formats, select the format plugins pluginFormats.selectPlugin(localInputOptions.getFormats()); return true; } /** Prep the driver & run the operation. */ @Override public void execute(final OperationParams params) { computeResults(params); } @Override public boolean runAsync() { return true; } public List getParameters() { return parameters; } public void setParameters( final String fileOrDirectory, final String storeName, final String commaDelimitedIndexes) { parameters = new ArrayList<>(); parameters.add(fileOrDirectory); parameters.add(storeName); parameters.add(commaDelimitedIndexes); } public VisibilityOptions getVisibilityOptions() { return visibilityOptions; } public void setVisibilityOptions(final VisibilityOptions visibilityOptions) { this.visibilityOptions = visibilityOptions; } public LocalInputCommandLineOptions getLocalInputOptions() { return localInputOptions; } public void setLocalInputOptions(final LocalInputCommandLineOptions localInputOptions) { this.localInputOptions = localInputOptions; } public IngestFormatPluginOptions getPluginFormats() { return pluginFormats; } public void setPluginFormats(final IngestFormatPluginOptions pluginFormats) { this.pluginFormats = pluginFormats; } public int getThreads() { return threads; } public void setThreads(final int threads) { this.threads = threads; } public DataStorePluginOptions getInputStoreOptions() { return inputStoreOptions; } public List getInputIndices() { return inputIndices; } @Override public Void computeResults(final OperationParams params) { // Ensure we have all the required arguments if (parameters.size() != 3) { throw new ParameterException( "Requires arguments: "); } final String inputPath = parameters.get(0); final String inputStoreName = parameters.get(1); final String indexList = parameters.get(2); // Config file final File configFile = getGeoWaveConfigFile(params); inputStoreOptions = CLIUtils.loadStore(inputStoreName, configFile, params.getConsole()); final IndexStore indexStore = inputStoreOptions.createIndexStore(); inputIndices = DataStoreUtils.loadIndices(indexStore, indexList); // Ingest Plugins final Map> ingestPlugins = pluginFormats.createLocalIngestPlugins(); // Driver final LocalFileIngestCLIDriver driver = new LocalFileIngestCLIDriver( inputStoreOptions, inputIndices, ingestPlugins, visibilityOptions, localInputOptions, threads); // Execute if (!driver.runOperation(inputPath, configFile)) { throw new RuntimeException("Ingest failed to execute"); } return null; } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/operations/LocalToHdfsCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.operations; import java.io.File; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Properties; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.ingest.avro.GeoWaveAvroFormatPlugin; import org.locationtech.geowave.core.ingest.hdfs.StageToHdfsDriver; import org.locationtech.geowave.core.ingest.operations.options.IngestFormatPluginOptions; import org.locationtech.geowave.core.store.ingest.LocalInputCommandLineOptions; import org.locationtech.geowave.mapreduce.operations.ConfigHDFSCommand; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import com.beust.jcommander.ParametersDelegate; @GeowaveOperation(name = "localToHdfs", parentOperation = IngestSection.class) @Parameters(commandDescription = "Stage supported files in local file system to HDFS") public class LocalToHdfsCommand extends ServiceEnabledCommand { @Parameter(description = " ") private List parameters = new ArrayList<>(); // This helper is used to load the list of format SPI plugins that will be // used @ParametersDelegate private IngestFormatPluginOptions pluginFormats = new IngestFormatPluginOptions(); @ParametersDelegate private LocalInputCommandLineOptions localInputOptions = new LocalInputCommandLineOptions(); @Override public boolean prepare(final OperationParams params) { // Based on the selected formats, select the format plugins pluginFormats.selectPlugin(localInputOptions.getFormats()); return true; } /** * Prep the driver & run the operation. * * @throws Exception */ @Override public void execute(final OperationParams params) throws Exception { computeResults(params); } @Override public boolean runAsync() { return true; } public List getParameters() { return parameters; } public void setParameters(final String fileOrDirectory, final String hdfsPath) { parameters = new ArrayList<>(); parameters.add(fileOrDirectory); parameters.add(hdfsPath); } public IngestFormatPluginOptions getPluginFormats() { return pluginFormats; } public void setPluginFormats(final IngestFormatPluginOptions pluginFormats) { this.pluginFormats = pluginFormats; } public LocalInputCommandLineOptions getLocalInputOptions() { return localInputOptions; } public void setLocalInputOptions(final LocalInputCommandLineOptions localInputOptions) { this.localInputOptions = localInputOptions; } @Override public Void computeResults(final OperationParams params) throws Exception { // Ensure we have all the required arguments if (parameters.size() != 2) { throw new ParameterException( "Requires arguments: "); } // Config file final File configFile = getGeoWaveConfigFile(params); final Properties configProperties = ConfigOptions.loadProperties(configFile); final String hdfsHostPort = ConfigHDFSCommand.getHdfsUrl(configProperties); final String inputPath = parameters.get(0); final String basePath = parameters.get(1); // Ingest Plugins final Map> ingestPlugins = pluginFormats.createAvroPlugins(); // Driver final StageToHdfsDriver driver = new StageToHdfsDriver(ingestPlugins, hdfsHostPort, basePath, localInputOptions); // Execute if (!driver.runOperation(inputPath, configFile)) { throw new RuntimeException("Ingest failed to execute"); } return null; } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/operations/LocalToKafkaCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.operations; import java.io.File; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.ingest.kafka.KafkaProducerCommandLineOptions; import org.locationtech.geowave.core.ingest.kafka.StageToKafkaDriver; import org.locationtech.geowave.core.ingest.operations.options.IngestFormatPluginOptions; import org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin; import org.locationtech.geowave.core.store.ingest.LocalInputCommandLineOptions; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import com.beust.jcommander.ParametersDelegate; @GeowaveOperation(name = "localToKafka", parentOperation = IngestSection.class) @Parameters(commandDescription = "Stage supported files in local file system to a Kafka topic") public class LocalToKafkaCommand extends ServiceEnabledCommand { @Parameter(description = "") private List parameters = new ArrayList<>(); @ParametersDelegate private KafkaProducerCommandLineOptions kafkaOptions = new KafkaProducerCommandLineOptions(); @ParametersDelegate private LocalInputCommandLineOptions localInputOptions = new LocalInputCommandLineOptions(); // This helper is used to load the list of format SPI plugins that will be // used @ParametersDelegate private IngestFormatPluginOptions pluginFormats = new IngestFormatPluginOptions(); @Override public boolean prepare(final OperationParams params) { // Based on the selected formats, select the format plugins pluginFormats.selectPlugin(localInputOptions.getFormats()); return true; } /** * Prep the driver & run the operation. * * @throws Exception */ @Override public void execute(final OperationParams params) throws Exception { computeResults(params); } @Override public boolean runAsync() { return true; } public List getParameters() { return parameters; } public void setParameters(final String fileOrDirectory) { parameters = new ArrayList<>(); parameters.add(fileOrDirectory); } public KafkaProducerCommandLineOptions getKafkaOptions() { return kafkaOptions; } public void setKafkaOptions(final KafkaProducerCommandLineOptions kafkaOptions) { this.kafkaOptions = kafkaOptions; } public LocalInputCommandLineOptions getLocalInputOptions() { return localInputOptions; } public void setLocalInputOptions(final LocalInputCommandLineOptions localInputOptions) { this.localInputOptions = localInputOptions; } public IngestFormatPluginOptions getPluginFormats() { return pluginFormats; } public void setPluginFormats(final IngestFormatPluginOptions pluginFormats) { this.pluginFormats = pluginFormats; } @Override public Void computeResults(final OperationParams params) throws Exception { // Ensure we have all the required arguments if (parameters.size() != 1) { throw new ParameterException("Requires arguments: "); } final String inputPath = parameters.get(0); // Ingest Plugins final Map> ingestPlugins = pluginFormats.createLocalIngestPlugins(); // Driver final StageToKafkaDriver driver = new StageToKafkaDriver(kafkaOptions, ingestPlugins, localInputOptions); // Config file final File configFile = getGeoWaveConfigFile(params); // Execute if (!driver.runOperation(inputPath, configFile)) { throw new RuntimeException("Ingest failed to execute"); } return null; } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/operations/LocalToMapReduceToGeoWaveCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.operations; import java.io.File; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Properties; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.ingest.avro.GeoWaveAvroFormatPlugin; import org.locationtech.geowave.core.ingest.hdfs.StageToHdfsDriver; import org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestFromHdfsDriver; import org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestFromHdfsPlugin; import org.locationtech.geowave.core.ingest.hdfs.mapreduce.MapReduceCommandLineOptions; import org.locationtech.geowave.core.ingest.operations.options.IngestFormatPluginOptions; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.VisibilityOptions; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.ingest.LocalInputCommandLineOptions; import org.locationtech.geowave.core.store.util.DataStoreUtils; import org.locationtech.geowave.mapreduce.operations.ConfigHDFSCommand; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import com.beust.jcommander.ParametersDelegate; @GeowaveOperation(name = "localToMrGW", parentOperation = IngestSection.class) @Parameters( commandDescription = "Copy supported files from local file system to HDFS and ingest from HDFS") public class LocalToMapReduceToGeoWaveCommand extends ServiceEnabledCommand { @Parameter( description = " ") private List parameters = new ArrayList<>(); @ParametersDelegate private VisibilityOptions ingestOptions = new VisibilityOptions(); @ParametersDelegate private MapReduceCommandLineOptions mapReduceOptions = new MapReduceCommandLineOptions(); @ParametersDelegate private LocalInputCommandLineOptions localInputOptions = new LocalInputCommandLineOptions(); // This helper is used to load the list of format SPI plugins that will be // used @ParametersDelegate private IngestFormatPluginOptions pluginFormats = new IngestFormatPluginOptions(); private DataStorePluginOptions inputStoreOptions = null; private List inputIndices = null; @Override public boolean prepare(final OperationParams params) { // Based on the selected formats, select the format plugins pluginFormats.selectPlugin(localInputOptions.getFormats()); return true; } /** * Prep the driver & run the operation. * * @throws Exception */ @Override public void execute(final OperationParams params) throws Exception { // Ensure we have all the required arguments if (parameters.size() != 4) { throw new ParameterException( "Requires arguments: "); } computeResults(params); } @Override public boolean runAsync() { return true; } public List getParameters() { return parameters; } public void setParameters( final String fileOrDirectory, final String pathToBaseDirectory, final String storeName, final String indexList) { parameters = new ArrayList<>(); parameters.add(fileOrDirectory); parameters.add(pathToBaseDirectory); parameters.add(storeName); parameters.add(indexList); } public DataStorePluginOptions getInputStoreOptions() { return inputStoreOptions; } public List getInputIndices() { return inputIndices; } public VisibilityOptions getIngestOptions() { return ingestOptions; } public void setIngestOptions(final VisibilityOptions ingestOptions) { this.ingestOptions = ingestOptions; } public MapReduceCommandLineOptions getMapReduceOptions() { return mapReduceOptions; } public void setMapReduceOptions(final MapReduceCommandLineOptions mapReduceOptions) { this.mapReduceOptions = mapReduceOptions; } public LocalInputCommandLineOptions getLocalInputOptions() { return localInputOptions; } public void setLocalInputOptions(final LocalInputCommandLineOptions localInputOptions) { this.localInputOptions = localInputOptions; } public IngestFormatPluginOptions getPluginFormats() { return pluginFormats; } public void setPluginFormats(final IngestFormatPluginOptions pluginFormats) { this.pluginFormats = pluginFormats; } @Override public Void computeResults(final OperationParams params) throws Exception { if (mapReduceOptions.getJobTrackerOrResourceManagerHostPort() == null) { throw new ParameterException( "Requires job tracker or resource manager option (try geowave help ...)"); } final String inputPath = parameters.get(0); final String basePath = parameters.get(1); final String inputStoreName = parameters.get(2); final String indexList = parameters.get(3); // Config file final File configFile = getGeoWaveConfigFile(params); final Properties configProperties = ConfigOptions.loadProperties(configFile); final String hdfsHostPort = ConfigHDFSCommand.getHdfsUrl(configProperties); inputStoreOptions = CLIUtils.loadStore(inputStoreName, configFile, params.getConsole()); final IndexStore indexStore = inputStoreOptions.createIndexStore(); inputIndices = DataStoreUtils.loadIndices(indexStore, indexList); // Ingest Plugins final Map> avroIngestPlugins = pluginFormats.createAvroPlugins(); // Ingest Plugins final Map> hdfsIngestPlugins = pluginFormats.createHdfsIngestPlugins(); { // Driver final StageToHdfsDriver driver = new StageToHdfsDriver(avroIngestPlugins, hdfsHostPort, basePath, localInputOptions); // Execute if (!driver.runOperation(inputPath, configFile)) { throw new RuntimeException("Ingest failed to execute"); } } { // Driver final IngestFromHdfsDriver driver = new IngestFromHdfsDriver( inputStoreOptions, inputIndices, ingestOptions, mapReduceOptions, hdfsIngestPlugins, hdfsHostPort, basePath); // Execute if (!driver.runOperation()) { throw new RuntimeException("Ingest failed to execute"); } } return null; }; } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/operations/MapReduceToGeoWaveCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.operations; import java.io.File; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Properties; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestFromHdfsDriver; import org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestFromHdfsPlugin; import org.locationtech.geowave.core.ingest.hdfs.mapreduce.MapReduceCommandLineOptions; import org.locationtech.geowave.core.ingest.operations.options.IngestFormatPluginOptions; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.VisibilityOptions; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.ingest.LocalInputCommandLineOptions; import org.locationtech.geowave.core.store.util.DataStoreUtils; import org.locationtech.geowave.mapreduce.operations.ConfigHDFSCommand; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import com.beust.jcommander.ParametersDelegate; @GeowaveOperation(name = "mrToGW", parentOperation = IngestSection.class) @Parameters(commandDescription = "Ingest supported files that already exist in HDFS") public class MapReduceToGeoWaveCommand extends ServiceEnabledCommand { @Parameter( description = " ") private List parameters = new ArrayList<>(); @ParametersDelegate private VisibilityOptions ingestOptions = new VisibilityOptions(); @ParametersDelegate private MapReduceCommandLineOptions mapReduceOptions = new MapReduceCommandLineOptions(); @ParametersDelegate private LocalInputCommandLineOptions localInputOptions = new LocalInputCommandLineOptions(); // This helper is used to load the list of format SPI plugins that will be // used @ParametersDelegate private IngestFormatPluginOptions pluginFormats = new IngestFormatPluginOptions(); private DataStorePluginOptions inputStoreOptions = null; private List inputIndices = null; @Override public boolean prepare(final OperationParams params) { // TODO: localInputOptions has 'extensions' which doesn't mean // anything for MapReduce to GeoWave. // Based on the selected formats, select the format plugins pluginFormats.selectPlugin(localInputOptions.getFormats()); return true; } /** * Prep the driver & run the operation. * * @throws Exception */ @Override public void execute(final OperationParams params) throws Exception { // Ensure we have all the required arguments if (parameters.size() != 3) { throw new ParameterException( "Requires arguments: "); } computeResults(params); } @Override public boolean runAsync() { return true; } public List getParameters() { return parameters; } public void setParameters( final String hdfsPath, final String storeName, final String commaSeparatedIndexes) { parameters = new ArrayList<>(); parameters.add(hdfsPath); parameters.add(storeName); parameters.add(commaSeparatedIndexes); } public VisibilityOptions getIngestOptions() { return ingestOptions; } public void setIngestOptions(final VisibilityOptions ingestOptions) { this.ingestOptions = ingestOptions; } public MapReduceCommandLineOptions getMapReduceOptions() { return mapReduceOptions; } public void setMapReduceOptions(final MapReduceCommandLineOptions mapReduceOptions) { this.mapReduceOptions = mapReduceOptions; } public LocalInputCommandLineOptions getLocalInputOptions() { return localInputOptions; } public void setLocalInputOptions(final LocalInputCommandLineOptions localInputOptions) { this.localInputOptions = localInputOptions; } public IngestFormatPluginOptions getPluginFormats() { return pluginFormats; } public void setPluginFormats(final IngestFormatPluginOptions pluginFormats) { this.pluginFormats = pluginFormats; } public DataStorePluginOptions getInputStoreOptions() { return inputStoreOptions; } public List getInputIndices() { return inputIndices; } @Override public Void computeResults(final OperationParams params) throws Exception { if (mapReduceOptions.getJobTrackerOrResourceManagerHostPort() == null) { throw new ParameterException( "Requires job tracker or resource manager option (try geowave help ...)"); } final String basePath = parameters.get(0); final String inputStoreName = parameters.get(1); final String indexList = parameters.get(2); // Config file final File configFile = getGeoWaveConfigFile(params); final Properties configProperties = ConfigOptions.loadProperties(configFile); final String hdfsHostPort = ConfigHDFSCommand.getHdfsUrl(configProperties); inputStoreOptions = CLIUtils.loadStore(inputStoreName, configFile, params.getConsole()); final IndexStore indexStore = inputStoreOptions.createIndexStore(); inputIndices = DataStoreUtils.loadIndices(indexStore, indexList); // Ingest Plugins final Map> ingestPlugins = pluginFormats.createHdfsIngestPlugins(); // Driver final IngestFromHdfsDriver driver = new IngestFromHdfsDriver( inputStoreOptions, inputIndices, ingestOptions, mapReduceOptions, ingestPlugins, hdfsHostPort, basePath); // Execute if (!driver.runOperation()) { throw new RuntimeException("Ingest failed to execute"); } return null; } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/operations/SparkToGeoWaveCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.operations; import java.io.File; import java.util.ArrayList; import java.util.List; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.ingest.spark.SparkCommandLineOptions; import org.locationtech.geowave.core.ingest.spark.SparkIngestDriver; import org.locationtech.geowave.core.store.cli.VisibilityOptions; import org.locationtech.geowave.core.store.ingest.LocalInputCommandLineOptions; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import com.beust.jcommander.ParametersDelegate; @GeowaveOperation(name = "sparkToGW", parentOperation = IngestSection.class) @Parameters(commandDescription = "Ingest supported files that already exist in HDFS or S3") public class SparkToGeoWaveCommand extends ServiceEnabledCommand { @Parameter(description = " ") private List parameters = new ArrayList<>(); @ParametersDelegate private VisibilityOptions ingestOptions = new VisibilityOptions(); @ParametersDelegate private SparkCommandLineOptions sparkOptions = new SparkCommandLineOptions(); @ParametersDelegate private LocalInputCommandLineOptions localInputOptions = new LocalInputCommandLineOptions(); @Override public boolean prepare(final OperationParams params) { return true; } /** * Prep the driver & run the operation. * * @throws Exception */ @Override public void execute(final OperationParams params) throws Exception { // Ensure we have all the required arguments if (parameters.size() != 3) { throw new ParameterException( "Requires arguments: "); } computeResults(params); } public List getParameters() { return parameters; } public void setParameters( final String inputPath, final String storeName, final String commaSeparatedIndexes) { parameters = new ArrayList<>(); parameters.add(inputPath); parameters.add(storeName); parameters.add(commaSeparatedIndexes); } public VisibilityOptions getIngestOptions() { return ingestOptions; } public void setIngestOptions(final VisibilityOptions ingestOptions) { this.ingestOptions = ingestOptions; } public SparkCommandLineOptions getSparkOptions() { return sparkOptions; } public void setSparkOptions(final SparkCommandLineOptions sparkOptions) { this.sparkOptions = sparkOptions; } public LocalInputCommandLineOptions getLocalInputOptions() { return localInputOptions; } public void setLocalInputOptions(final LocalInputCommandLineOptions localInputOptions) { this.localInputOptions = localInputOptions; } @Override public Void computeResults(final OperationParams params) throws Exception { // Ensure we have all the required arguments if (parameters.size() != 3) { throw new ParameterException( "Requires arguments: "); } final String inputPath = parameters.get(0); final String inputStoreName = parameters.get(1); final String indexList = parameters.get(2); // Config file final File configFile = getGeoWaveConfigFile(params); // Driver final SparkIngestDriver driver = new SparkIngestDriver(); // Execute if (!driver.runOperation( configFile, localInputOptions, inputStoreName, indexList, ingestOptions, sparkOptions, inputPath, params.getConsole())) { throw new RuntimeException("Ingest failed to execute"); } return null; } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/operations/options/IngestFormatPluginOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.operations.options; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import org.locationtech.geowave.core.cli.api.DefaultPluginOptions; import org.locationtech.geowave.core.cli.api.PluginOptions; import org.locationtech.geowave.core.ingest.avro.GeoWaveAvroFormatPlugin; import org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestFromHdfsPlugin; import org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi; import org.locationtech.geowave.core.ingest.spi.IngestFormatPluginRegistry; import org.locationtech.geowave.core.store.ingest.IngestFormatOptions; import org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.ParameterException; import com.beust.jcommander.ParametersDelegate; /** * This convenience class has methods for loading a list of plugins based on command line options * set by the user. */ public class IngestFormatPluginOptions extends DefaultPluginOptions implements PluginOptions { private static final Logger LOGGER = LoggerFactory.getLogger(IngestFormatPluginOptions.class); private String formats; private Map> plugins = new HashMap<>(); @ParametersDelegate private HashMap options = new HashMap<>(); @Override public void selectPlugin(final String qualifier) { // This is specified as so: format1,format2,... formats = qualifier; if ((qualifier != null) && (qualifier.length() > 0)) { for (final String name : qualifier.split(",")) { addFormat(name.trim()); } } else { // Add all for (final String formatName : IngestFormatPluginRegistry.getPluginProviderRegistry().keySet()) { addFormat(formatName); } } } private void addFormat(final String formatName) { final IngestFormatPluginProviderSpi formatPlugin = IngestFormatPluginRegistry.getPluginProviderRegistry().get(formatName); if (formatPlugin == null) { throw new ParameterException("Unknown format type specified: " + formatName); } plugins.put(formatName, formatPlugin); IngestFormatOptions optionObject = formatPlugin.createOptionsInstances(); if (optionObject == null) { optionObject = new IngestFormatOptions() {}; } options.put(formatName, optionObject); } @Override public String getType() { return formats; } public Map> createLocalIngestPlugins() { final Map> ingestPlugins = new HashMap<>(); for (final Entry> entry : plugins.entrySet()) { final IngestFormatPluginProviderSpi formatPlugin = entry.getValue(); final IngestFormatOptions formatOptions = options.get(entry.getKey()); LocalFileIngestPlugin plugin = null; try { plugin = formatPlugin.createLocalFileIngestPlugin(formatOptions); if (plugin == null) { throw new UnsupportedOperationException(); } } catch (final UnsupportedOperationException e) { LOGGER.warn( "Plugin provider for ingest type '" + formatPlugin.getIngestFormatName() + "' does not support local file ingest", e); continue; } ingestPlugins.put(formatPlugin.getIngestFormatName(), plugin); } return ingestPlugins; } public Map> createHdfsIngestPlugins() { final Map> ingestPlugins = new HashMap<>(); for (final Entry> entry : plugins.entrySet()) { final IngestFormatPluginProviderSpi formatPlugin = entry.getValue(); final IngestFormatOptions formatOptions = options.get(entry.getKey()); IngestFromHdfsPlugin plugin = null; try { plugin = formatPlugin.createIngestFromHdfsPlugin(formatOptions); if (plugin == null) { throw new UnsupportedOperationException(); } } catch (final UnsupportedOperationException e) { LOGGER.warn( "Plugin provider for ingest type '" + formatPlugin.getIngestFormatName() + "' does not support hdfs ingest", e); continue; } ingestPlugins.put(formatPlugin.getIngestFormatName(), plugin); } return ingestPlugins; } public Map> createAvroPlugins() { final Map> ingestPlugins = new HashMap<>(); for (final Entry> entry : plugins.entrySet()) { final IngestFormatPluginProviderSpi formatPlugin = entry.getValue(); final IngestFormatOptions formatOptions = options.get(entry.getKey()); GeoWaveAvroFormatPlugin plugin = null; try { plugin = formatPlugin.createAvroFormatPlugin(formatOptions); if (plugin == null) { throw new UnsupportedOperationException(); } } catch (final UnsupportedOperationException e) { LOGGER.warn( "Plugin provider for ingest type '" + formatPlugin.getIngestFormatName() + "' does not support avro ingest", e); continue; } ingestPlugins.put(formatPlugin.getIngestFormatName(), plugin); } return ingestPlugins; } public Map> getPlugins() { return plugins; } public void setPlugins(final Map> plugins) { this.plugins = plugins; } public Map getOptions() { return options; } public void setOptions(final HashMap options) { this.options = options; } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/spark/SparkCommandLineOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.spark; import com.beust.jcommander.Parameter; public class SparkCommandLineOptions { @Parameter(names = {"-n", "--name"}, description = "The spark application name") private String appName = "Spark Ingest"; @Parameter(names = {"-ho", "--host"}, description = "The spark driver host") private String host = "localhost"; @Parameter(names = {"-m", "--master"}, description = "The spark master designation") private String master = "local"; @Parameter(names = {"-e", "--numexecutors"}, description = "Number of executors") private int numExecutors = -1; @Parameter(names = {"-c", "--numcores"}, description = "Number of cores") private int numCores = -1; public SparkCommandLineOptions() {} public String getAppName() { return appName; } public void setAppName(final String appName) { this.appName = appName; } public String getHost() { return host; } public void setHost(final String host) { this.host = host; } public String getMaster() { return master; } public void setMaster(final String master) { this.master = master; } public int getNumExecutors() { return numExecutors; } public void setNumExecutors(final int numExecutors) { this.numExecutors = numExecutors; } public int getNumCores() { return numCores; } public void setNumCores(final int numCores) { this.numCores = numCores; } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/spark/SparkIngestDriver.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.spark; import java.io.File; import java.io.IOException; import java.io.Serializable; import java.lang.reflect.Field; import java.net.MalformedURLException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.net.URLStreamHandlerFactory; import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; import org.apache.hadoop.fs.FsUrlStreamHandlerFactory; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.sql.SparkSession; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.ingest.URLIngestUtils; import org.locationtech.geowave.core.ingest.URLIngestUtils.URLTYPE; import org.locationtech.geowave.core.ingest.local.LocalFileIngestCLIDriver; import org.locationtech.geowave.core.ingest.operations.ConfigAWSCommand; import org.locationtech.geowave.core.ingest.operations.options.IngestFormatPluginOptions; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.VisibilityOptions; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.ingest.IngestUtils; import org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin; import org.locationtech.geowave.core.store.ingest.LocalIngestRunData; import org.locationtech.geowave.core.store.ingest.LocalInputCommandLineOptions; import org.locationtech.geowave.core.store.ingest.LocalPluginFileVisitor.PluginVisitor; import org.locationtech.geowave.core.store.util.DataStoreUtils; import org.locationtech.geowave.mapreduce.operations.ConfigHDFSCommand; import org.locationtech.geowave.mapreduce.s3.GeoWaveAmazonS3Factory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.JCommander; import com.beust.jcommander.internal.Console; import com.google.common.collect.Lists; import com.upplication.s3fs.S3FileSystem; import com.upplication.s3fs.S3FileSystemProvider; public class SparkIngestDriver implements Serializable { /** * */ private static final long serialVersionUID = 1L; private static final Logger LOGGER = LoggerFactory.getLogger(SparkIngestDriver.class); public SparkIngestDriver() {} public boolean runOperation( final File configFile, final LocalInputCommandLineOptions localInput, final String inputStoreName, final String indexList, final VisibilityOptions ingestOptions, final SparkCommandLineOptions sparkOptions, final String basePath, final Console console) throws IOException { final Properties configProperties = ConfigOptions.loadProperties(configFile); JavaSparkContext jsc = null; SparkSession session = null; int numExecutors; int numCores; int numPartitions; Path inputPath; String s3EndpointUrl = null; final boolean isS3 = basePath.startsWith("s3://"); final boolean isHDFS = !isS3 && (basePath.startsWith("hdfs://") || basePath.startsWith("file:/")); // If input path is S3 if (isS3) { s3EndpointUrl = ConfigAWSCommand.getS3Url(configProperties); inputPath = URLIngestUtils.setupS3FileSystem(basePath, s3EndpointUrl); } // If input path is HDFS else if (isHDFS) { final String hdfsFSUrl = ConfigHDFSCommand.getHdfsUrl(configProperties); inputPath = setUpHDFSFilesystem(basePath, hdfsFSUrl, basePath.startsWith("file:/")); } else { LOGGER.warn("Spark ingest support only S3 or HDFS as input location"); return false; } if ((inputPath == null) || (!Files.exists(inputPath))) { LOGGER.error("Error in accessing Input path " + basePath); return false; } final List inputFileList = new ArrayList<>(); Files.walkFileTree(inputPath, new SimpleFileVisitor() { @Override public FileVisitResult visitFile(final Path file, final BasicFileAttributes attrs) throws IOException { inputFileList.add(file); return FileVisitResult.CONTINUE; } }); final int numInputFiles = inputFileList.size(); if (sparkOptions.getNumExecutors() < 1) { numExecutors = (int) Math.ceil((double) numInputFiles / 8); } else { numExecutors = sparkOptions.getNumExecutors(); } if (sparkOptions.getNumCores() < 1) { numCores = 4; } else { numCores = sparkOptions.getNumCores(); } numPartitions = numExecutors * numCores * 2; if (session == null) { String jar = ""; try { jar = SparkIngestDriver.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath(); } catch (final URISyntaxException e) { LOGGER.error("Unable to set jar location in spark configuration", e); } session = SparkSession.builder().appName(sparkOptions.getAppName()).master( sparkOptions.getMaster()).config("spark.driver.host", sparkOptions.getHost()).config( "spark.jars", jar).config("spark.executor.instances", Integer.toString(numExecutors)).config( "spark.executor.cores", Integer.toString(numCores)).getOrCreate(); jsc = JavaSparkContext.fromSparkContext(session.sparkContext()); } final JavaRDD fileRDD = jsc.parallelize(Lists.transform(inputFileList, path -> path.toUri()), numPartitions); if (isS3) { final String s3FinalEndpointUrl = s3EndpointUrl; fileRDD.foreachPartition(uri -> { final S3FileSystem fs = initializeS3FS(s3FinalEndpointUrl); final List inputFiles = new ArrayList<>(); while (uri.hasNext()) { final Path inputFile = fs.getPath(uri.next().toString().replaceFirst(s3FinalEndpointUrl, "")); inputFiles.add(inputFile.toUri()); } processInput( configFile, localInput, inputStoreName, indexList, ingestOptions, configProperties, inputFiles.iterator(), console); }); } else if (isHDFS) { try { setHdfsURLStreamHandlerFactory(); } catch (NoSuchFieldException | SecurityException | IllegalArgumentException | IllegalAccessException e) { // TODO Auto-generated catch block e.printStackTrace(); } fileRDD.foreachPartition(uri -> { processInput( configFile, localInput, inputStoreName, indexList, ingestOptions, configProperties, uri, new JCommander().getConsole()); }); } close(session); return true; } public void processInput( final File configFile, final LocalInputCommandLineOptions localInput, final String inputStoreName, final String indexList, final VisibilityOptions visibilityOptions, final Properties configProperties, final Iterator inputFiles, final Console console) throws IOException { // Based on the selected formats, select the format plugins final IngestFormatPluginOptions pluginFormats = new IngestFormatPluginOptions(); // Based on the selected formats, select the format plugins pluginFormats.selectPlugin(localInput.getFormats()); DataStorePluginOptions inputStoreOptions = null; List indices = null; // Ingest Plugins final Map> ingestPlugins = pluginFormats.createLocalIngestPlugins(); inputStoreOptions = CLIUtils.loadStore(configProperties, inputStoreName, configFile, console); final IndexStore indexStore = inputStoreOptions.createIndexStore(); indices = DataStoreUtils.loadIndices(indexStore, indexList); // first collect the local file ingest plugins final Map> localFileIngestPlugins = new HashMap<>(); final List> adapters = new ArrayList<>(); for (final Entry> pluginEntry : ingestPlugins.entrySet()) { if (!IngestUtils.checkIndexesAgainstProvider( pluginEntry.getKey(), pluginEntry.getValue(), indices)) { continue; } localFileIngestPlugins.put(pluginEntry.getKey(), pluginEntry.getValue()); adapters.addAll(Arrays.asList(pluginEntry.getValue().getDataAdapters())); } final LocalFileIngestCLIDriver localIngestDriver = new LocalFileIngestCLIDriver( inputStoreOptions, indices, localFileIngestPlugins, visibilityOptions, localInput, 1); localIngestDriver.startExecutor(); final DataStore dataStore = inputStoreOptions.createDataStore(); try (LocalIngestRunData runData = new LocalIngestRunData( adapters, dataStore, visibilityOptions.getConfiguredVisibilityHandler())) { final List>> pluginVisitors = new ArrayList<>(localFileIngestPlugins.size()); for (final Entry> localPlugin : localFileIngestPlugins.entrySet()) { pluginVisitors.add( new PluginVisitor>( localPlugin.getValue(), localPlugin.getKey(), localInput.getExtensions())); } while (inputFiles.hasNext()) { final URL file = inputFiles.next().toURL(); for (final PluginVisitor> visitor : pluginVisitors) { if (visitor.supportsFile(file)) { localIngestDriver.processFile( file, visitor.getTypeName(), visitor.getLocalPluginBase(), runData); } } } } catch (final MalformedURLException e) { LOGGER.error("Error in converting input path to URL for " + inputFiles, e); throw new MalformedURLException("Error in converting input path to URL for " + inputFiles); } catch (final Exception e) { LOGGER.error("Error processing in processing input", e); throw new RuntimeException("Error processing in processing input", e); } finally { localIngestDriver.shutdownExecutor(); } } public void close(SparkSession session) { if (session != null) { session.close(); session = null; } } public Path setUpHDFSFilesystem( final String basePath, final String hdfsFSUrl, final boolean isLocalPath) { final String hdfsInputPath = basePath.replaceFirst("hdfs://", "/"); Path path = null; try { URI uri = null; if (isLocalPath) { uri = new URI(hdfsInputPath); } else { uri = new URI(hdfsFSUrl + hdfsInputPath); } path = Paths.get(uri); // HP Fortify "Path Traversal" false positive // What Fortify considers "user input" comes only // from users with OS-level access anyway } catch (final URISyntaxException e) { LOGGER.error("Unable to ingest data, Inavlid HDFS Path", e); return null; } return path; } public S3FileSystem initializeS3FS(final String s3EndpointUrl) throws URISyntaxException { try { URLIngestUtils.setURLStreamHandlerFactory(URLTYPE.S3); } catch (NoSuchFieldException | SecurityException | IllegalArgumentException | IllegalAccessException e1) { LOGGER.error("Error in setting up S3URLStreamHandler Factory", e1); throw new RuntimeException("Error in setting up S3URLStreamHandler Factory", e1); } return (S3FileSystem) new S3FileSystemProvider().getFileSystem( new URI(s3EndpointUrl), Collections.singletonMap( S3FileSystemProvider.AMAZON_S3_FACTORY_CLASS, GeoWaveAmazonS3Factory.class.getName())); } public static void setHdfsURLStreamHandlerFactory() throws NoSuchFieldException, SecurityException, IllegalArgumentException, IllegalAccessException { final Field factoryField = URL.class.getDeclaredField("factory"); factoryField.setAccessible(true); // HP Fortify "Access Control" false positive // The need to change the accessibility here is // necessary, has been review and judged to be safe final URLStreamHandlerFactory urlStreamHandlerFactory = (URLStreamHandlerFactory) factoryField.get(null); if (urlStreamHandlerFactory == null) { URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory()); } else { try { factoryField.setAccessible(true); // HP Fortify "Access Control" false positive // The need to change the accessibility here is // necessary, has been review and judged to be safe factoryField.set(null, new FsUrlStreamHandlerFactory()); } catch (final IllegalAccessException e1) { LOGGER.error("Could not access URLStreamHandler factory field on URL class: {}", e1); throw new RuntimeException( "Could not access URLStreamHandler factory field on URL class: {}", e1); } } } } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/spi/IngestFormatPluginProviderSpi.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.spi; import org.locationtech.geowave.core.ingest.avro.GeoWaveAvroFormatPlugin; import org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestFromHdfsPlugin; import org.locationtech.geowave.core.store.ingest.IngestFormatOptions; import org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin; /** * This interface can be injected and automatically discovered using SPI to provide a new ingest * format to the GeoWave ingestion framework. It is not required that a new ingest format implement * all of the plugins. However, each plugin directly corresponds to a user selected operation and * only the plugins that are supported will result in usable operations. * * @param The type for intermediate data * @param The type for the resulting data that is ingested into GeoWave */ public interface IngestFormatPluginProviderSpi { /** * This plugin will be used by the ingestion framework to read data from HDFS in the form of the * intermediate data format, and translate the intermediate data into the data entries that will * be written in GeoWave. * * @return The plugin for ingesting data from HDFS * @throws UnsupportedOperationException If ingesting intermediate data from HDFS is not supported */ public IngestFromHdfsPlugin createIngestFromHdfsPlugin(IngestFormatOptions options) throws UnsupportedOperationException; /** * This plugin will be used by the ingestion framework to read data from a local file system, and * translate supported files into the data entries that will be written directly in GeoWave. * * @return The plugin for ingesting data from a local file system directly into GeoWave * @throws UnsupportedOperationException If ingesting data directly from a local file system is * not supported */ public LocalFileIngestPlugin createLocalFileIngestPlugin(IngestFormatOptions options) throws UnsupportedOperationException; /** * This will represent the name for the format that is registered with the ingest framework and * presented as a data format option via the commandline. For consistency, this name is preferably * lower-case and without spaces, and should uniquely identify the data format as much as * possible. * * @return The name that will be associated with this format */ public String getIngestFormatName(); /** * This is a means for a plugin to provide custom command-line options. If this is null, there * will be no custom options added. * * @return The ingest format's option provider or null for no custom options */ public IngestFormatOptions createOptionsInstances(); /** * This is a user-friendly full description of the data format that this plugin provider supports. * It will be presented to the command-line user as help when the registered data formats are * listed. * * @return The user-friendly full description for this data format */ public String getIngestFormatDescription(); /** * This plugin will be used by the ingestion framework to stage intermediate data from a local * filesystem (for example to HDFS for map reduce ingest or to kafka for kafka ingest). * * @return The plugin for staging to avro if it is supported * @throws UnsupportedOperationException If staging data is not supported (generally this implies * that ingesting using map-reduce or kafka will not be supported) */ public GeoWaveAvroFormatPlugin createAvroFormatPlugin(IngestFormatOptions options) throws UnsupportedOperationException; } ================================================ FILE: core/ingest/src/main/java/org/locationtech/geowave/core/ingest/spi/IngestFormatPluginRegistry.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.ingest.spi; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import java.util.stream.Collectors; import org.locationtech.geowave.core.index.SPIServiceRegistry; import org.locationtech.geowave.core.store.config.ConfigUtils; import org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin; import org.locationtech.geowave.core.store.ingest.LocalFileIngestPluginRegistrySpi; public class IngestFormatPluginRegistry implements LocalFileIngestPluginRegistrySpi { private static Map> pluginProviderRegistry = null; public IngestFormatPluginRegistry() {} @SuppressWarnings("rawtypes") private static void initPluginProviderRegistry() { pluginProviderRegistry = new HashMap<>(); final Iterator pluginProviders = new SPIServiceRegistry(IngestFormatPluginRegistry.class).load( IngestFormatPluginProviderSpi.class); while (pluginProviders.hasNext()) { final IngestFormatPluginProviderSpi pluginProvider = pluginProviders.next(); pluginProviderRegistry.put( ConfigUtils.cleanOptionName(pluginProvider.getIngestFormatName()), pluginProvider); } } public static Map> getPluginProviderRegistry() { if (pluginProviderRegistry == null) { initPluginProviderRegistry(); } return pluginProviderRegistry; } @Override public Map> getDefaultLocalIngestPlugins() { return getPluginProviderRegistry().entrySet().stream().collect( Collectors.toMap( Entry::getKey, e -> e.getValue().createLocalFileIngestPlugin(e.getValue().createOptionsInstances()))); } } ================================================ FILE: core/ingest/src/main/resources/META-INF/services/java.nio.file.spi.FileSystemProvider ================================================ com.upplication.s3fs.S3FileSystemProvider hdfs.jsr203.HadoopFileSystemProvider ================================================ FILE: core/ingest/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi ================================================ org.locationtech.geowave.core.ingest.operations.IngestOperationProvider ================================================ FILE: core/ingest/src/main/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi ================================================ org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestPersistableRegistry ================================================ FILE: core/ingest/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.ingest.IngestUrlHandlerSpi ================================================ org.locationtech.geowave.core.ingest.S3IngestHandler org.locationtech.geowave.core.ingest.HdfsIngestHandler ================================================ FILE: core/ingest/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.ingest.LocalFileIngestPluginRegistrySpi ================================================ org.locationtech.geowave.core.ingest.spi.IngestFormatPluginRegistry ================================================ FILE: core/ingest/src/test/java/org/locationtech/geowave/ingest/s3/DefaultGeoWaveAWSCredentialsProviderTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.ingest.s3; import java.io.File; import java.io.IOException; import java.net.URISyntaxException; import java.nio.file.Files; import java.nio.file.Path; import java.util.stream.Stream; import org.junit.Assert; import org.junit.Test; import org.locationtech.geowave.core.ingest.URLIngestUtils; import org.locationtech.geowave.core.ingest.URLIngestUtils.URLTYPE; import org.locationtech.geowave.core.ingest.spark.SparkIngestDriver; import com.upplication.s3fs.S3FileSystem; import io.findify.s3mock.S3Mock; public class DefaultGeoWaveAWSCredentialsProviderTest { @Test public void testAnonymousAccess() throws NoSuchFieldException, SecurityException, IllegalArgumentException, IllegalAccessException, URISyntaxException, IOException { final File temp = File.createTempFile("temp", Long.toString(System.nanoTime())); temp.mkdirs(); final S3Mock mockS3 = new S3Mock.Builder().withPort(8001).withFileBackend( temp.getAbsolutePath()).withInMemoryBackend().build(); mockS3.start(); URLIngestUtils.setURLStreamHandlerFactory(URLTYPE.S3); final SparkIngestDriver sparkDriver = new SparkIngestDriver(); final S3FileSystem s3 = sparkDriver.initializeS3FS("s3://s3.amazonaws.com"); s3.getClient().setEndpoint("http://127.0.0.1:8001"); s3.getClient().createBucket("testbucket"); s3.getClient().putObject("testbucket", "test", "content"); try (Stream s = Files.list(URLIngestUtils.setupS3FileSystem("s3://testbucket/", "s3://s3.amazonaws.com"))) { Assert.assertEquals(1, s.count()); } mockS3.shutdown(); } } ================================================ FILE: core/mapreduce/pom.xml ================================================ 4.0.0 geowave-core-parent org.locationtech.geowave ../ 2.0.2-SNAPSHOT geowave-core-mapreduce GeoWave MapReduce org.locationtech.geowave geowave-core-cli ${project.version} org.locationtech.geowave geowave-core-store ${project.version} com.upplication s3fs 1.5.3 org.apache.hadoop hadoop-client jdk.tools jdk.tools junit junit guava com.google.guava javax.servlet * io.netty netty io.netty netty-all ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/AbstractGeoWaveJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce; import org.apache.commons.cli.ParseException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.util.Tool; import org.locationtech.geowave.core.store.api.Query; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.geowave.core.store.query.options.CommonQueryOptions; import org.locationtech.geowave.core.store.query.options.DataTypeQueryOptions; import org.locationtech.geowave.core.store.query.options.IndexQueryOptions; import org.locationtech.geowave.mapreduce.input.GeoWaveInputFormat; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputFormat; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class can run a basic job to query GeoWave. It manages datastore connection params, * adapters, indices, query, min splits and max splits. */ public abstract class AbstractGeoWaveJobRunner extends Configured implements Tool { protected static final Logger LOGGER = LoggerFactory.getLogger(AbstractGeoWaveJobRunner.class); protected DataStorePluginOptions dataStoreOptions; protected QueryConstraints constraints = null; protected CommonQueryOptions commonOptions; protected DataTypeQueryOptions dataTypeOptions; protected IndexQueryOptions indexOptions; protected Integer minInputSplits = null; protected Integer maxInputSplits = null; public AbstractGeoWaveJobRunner(final DataStorePluginOptions dataStoreOptions) { this.dataStoreOptions = dataStoreOptions; } /** Main method to execute the MapReduce analytic. */ public int runJob() throws Exception { final Job job = Job.getInstance(super.getConf()); // must use the assembled job configuration final Configuration conf = job.getConfiguration(); GeoWaveInputFormat.setStoreOptions(conf, dataStoreOptions); GeoWaveOutputFormat.setStoreOptions(conf, dataStoreOptions); job.setJarByClass(this.getClass()); configure(job); if (commonOptions != null) { GeoWaveInputFormat.setCommonQueryOptions(conf, commonOptions); } if (dataTypeOptions != null) { GeoWaveInputFormat.setDataTypeQueryOptions( conf, dataTypeOptions, dataStoreOptions.createAdapterStore(), dataStoreOptions.createInternalAdapterStore()); } if (indexOptions != null) { GeoWaveInputFormat.setIndexQueryOptions( conf, indexOptions, dataStoreOptions.createIndexStore()); } if (constraints != null) { GeoWaveInputFormat.setQueryConstraints(conf, constraints); } if (minInputSplits != null) { GeoWaveInputFormat.setMinimumSplitCount(conf, minInputSplits); } if (maxInputSplits != null) { GeoWaveInputFormat.setMaximumSplitCount(conf, maxInputSplits); } final boolean jobSuccess = job.waitForCompletion(true); return (jobSuccess) ? 0 : 1; } protected abstract void configure(Job job) throws Exception; public void setMaxInputSplits(final int maxInputSplits) { this.maxInputSplits = maxInputSplits; } public void setMinInputSplits(final int minInputSplits) { this.minInputSplits = minInputSplits; } public void setQuery(final Query query) { setCommonQueryOptions(query.getCommonQueryOptions()); setDataTypeQueryOptions(query.getDataTypeQueryOptions()); setIndexQueryOptions(query.getIndexQueryOptions()); setQueryConstraints(query.getQueryConstraints()); } public void setCommonQueryOptions(final CommonQueryOptions commonOptions) { this.commonOptions = commonOptions; } public void setDataTypeQueryOptions(final DataTypeQueryOptions dataTypeOptions) { this.dataTypeOptions = dataTypeOptions; } public void setIndexQueryOptions(final IndexQueryOptions indexOptions) { this.indexOptions = indexOptions; } public void setQueryConstraints(final QueryConstraints constraints) { this.constraints = constraints; } @Override public int run(final String[] args) throws Exception { return runOperation(args) ? 0 : -1; } public boolean runOperation(final String[] args) throws ParseException { try { return runJob() == 0 ? true : false; } catch (final Exception e) { LOGGER.error("Unable to run job", e); throw new ParseException(e.getMessage()); } } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/BaseMapReduceDataStore.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce; import java.io.IOException; import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.RecordWriter; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.locationtech.geowave.core.store.DataStoreOptions; import org.locationtech.geowave.core.store.PropertyStore; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.adapter.TransientAdapterStore; import org.locationtech.geowave.core.store.base.BaseDataStore; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.geowave.core.store.query.options.CommonQueryOptions; import org.locationtech.geowave.core.store.query.options.DataTypeQueryOptions; import org.locationtech.geowave.core.store.query.options.IndexQueryOptions; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputFormat.GeoWaveRecordWriter; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey; import org.locationtech.geowave.mapreduce.splits.GeoWaveRecordReader; import org.locationtech.geowave.mapreduce.splits.SplitsProvider; public class BaseMapReduceDataStore extends BaseDataStore implements MapReduceDataStore { protected final SplitsProvider splitsProvider; public BaseMapReduceDataStore( final IndexStore indexStore, final PersistentAdapterStore adapterStore, final DataStatisticsStore statisticsStore, final AdapterIndexMappingStore indexMappingStore, final MapReduceDataStoreOperations operations, final DataStoreOptions options, final InternalAdapterStore adapterMappingStore, final PropertyStore propertyStore) { super( indexStore, adapterStore, statisticsStore, indexMappingStore, operations, options, adapterMappingStore, propertyStore); splitsProvider = createSplitsProvider(); } @Override public RecordWriter, Object> createRecordWriter( final TaskAttemptContext context, final IndexStore jobContextIndexStore, final TransientAdapterStore jobContextAdapterStore) { return new GeoWaveRecordWriter(this, jobContextIndexStore, jobContextAdapterStore); } @Override public void prepareRecordWriter(final Configuration conf) { // generally this can be a no-op, but gives the datastore an opportunity // to set specialized configuration for a job prior to submission } @Override public RecordReader createRecordReader( final CommonQueryOptions commonOptions, final DataTypeQueryOptions typeOptions, final IndexQueryOptions indexOptions, final QueryConstraints constraints, final TransientAdapterStore adapterStore, final InternalAdapterStore internalAdapterStore, final AdapterIndexMappingStore aimStore, final DataStatisticsStore statsStore, final IndexStore indexStore, final boolean isOutputWritable, final InputSplit inputSplit) throws IOException, InterruptedException { return new GeoWaveRecordReader( commonOptions, typeOptions, indexOptions, constraints, isOutputWritable, adapterStore, internalAdapterStore, aimStore, indexStore, (MapReduceDataStoreOperations) baseOperations, baseOptions.getDataIndexBatchSize()); } protected SplitsProvider createSplitsProvider() { return new SplitsProvider(); } @Override public List getSplits( final CommonQueryOptions commonOptions, final DataTypeQueryOptions typeOptions, final IndexQueryOptions indexOptions, final QueryConstraints constraints, final TransientAdapterStore adapterStore, final AdapterIndexMappingStore aimStore, final DataStatisticsStore statsStore, final InternalAdapterStore internalAdapterStore, final IndexStore indexStore, final JobContext context, final Integer minSplits, final Integer maxSplits) throws IOException, InterruptedException { return splitsProvider.getSplits( baseOperations, commonOptions, typeOptions, indexOptions, constraints, adapterStore, statsStore, internalAdapterStore, indexStore, aimStore, context, minSplits, maxSplits); } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/GeoWaveConfiguratorBase.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.MRJobConfig; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.GeoWaveStoreFinder; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.TransientAdapterStore; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Lists; import java.io.IOException; import java.lang.reflect.Method; import java.util.*; import java.util.Map.Entry; /** This class forms the basis for GeoWave input and output format configuration. */ public class GeoWaveConfiguratorBase { protected static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveConfiguratorBase.class); private static final String KEY_SEPARATOR = "-"; public static enum GeoWaveConfg { INDEX, DATA_ADAPTER, INTERNAL_ADAPTER, ADAPTER_TO_INDEX, STORE_CONFIG_OPTION } /** * Provides a configuration key for a given feature enum, prefixed by the implementingClass, and * suffixed by a custom String * * @param implementingClass the class whose name will be used as a prefix for the property * configuration key * @param e the enum used to provide the unique part of the configuration key * @param suffix the custom suffix to be used in the configuration key * @return the configuration key */ public static String enumToConfKey( final Class implementingClass, final Enum e, final String suffix) { return enumToConfKey(implementingClass, e) + KEY_SEPARATOR + suffix; } /** * Provides a configuration key for a given feature enum, prefixed by the implementingClass * * @param implementingClass the class whose name will be used as a prefix for the property * configuration key * @param e the enum used to provide the unique part of the configuration key * @return the configuration key */ public static String enumToConfKey(final Class implementingClass, final Enum e) { final String s = implementingClass.getSimpleName() + "." + e.getDeclaringClass().getSimpleName() + "." + org.apache.hadoop.util.StringUtils.camelize(e.name().toLowerCase(Locale.ENGLISH)); return s; } public static final T getInstance( final Class implementingClass, final Enum e, final JobContext context, final Class interfaceClass) throws InstantiationException, IllegalAccessException { return (T) getConfiguration(context).getClass( enumToConfKey(implementingClass, e), interfaceClass).newInstance(); } public static final T getInstance( final Class implementingClass, final Enum e, final JobContext context, final Class interfaceClass, final Class defaultClass) throws InstantiationException, IllegalAccessException { return getConfiguration(context).getClass( enumToConfKey(implementingClass, e), defaultClass, interfaceClass).newInstance(); } public static DataStore getDataStore(final Class implementingClass, final JobContext context) { return GeoWaveStoreFinder.createDataStore(getStoreOptionsMap(implementingClass, context)); } public static DataStatisticsStore getDataStatisticsStore( final Class implementingClass, final JobContext context) { return GeoWaveStoreFinder.createDataStatisticsStore( getStoreOptionsMap(implementingClass, context)); } public static void setStoreOptionsMap( final Class implementingClass, final Configuration config, final Map dataStoreOptions) { if ((dataStoreOptions != null) && !dataStoreOptions.isEmpty()) { for (final Entry entry : dataStoreOptions.entrySet()) { config.set( enumToConfKey(implementingClass, GeoWaveConfg.STORE_CONFIG_OPTION, entry.getKey()), entry.getValue()); } } else { final Map existingVals = config.getValByRegex( enumToConfKey(implementingClass, GeoWaveConfg.STORE_CONFIG_OPTION) + "*"); for (final String k : existingVals.keySet()) { config.unset(k); } } } public static DataStorePluginOptions getStoreOptions( final Class implementingClass, final JobContext context) { final Map options = getStoreOptionsMapInternal(implementingClass, getConfiguration(context)); try { return new DataStorePluginOptions(options); } catch (final IllegalArgumentException e) { LOGGER.warn("Unable to get data store options from job context", e); return null; } } public static Map getStoreOptionsMap( final Class implementingClass, final JobContext context) { return getStoreOptionsMapInternal(implementingClass, getConfiguration(context)); } public static void addIndex( final Class implementingClass, final Configuration config, final Index index) { if (index != null) { config.set( enumToConfKey(implementingClass, GeoWaveConfg.INDEX, index.getName()), ByteArrayUtils.byteArrayToString(PersistenceUtils.toBinary(index))); } } public static Index getIndex( final Class implementingClass, final JobContext context, final String indexName) { return getIndexInternal(implementingClass, getConfiguration(context), indexName); } public static Short getAdapterId( final Class implementingClass, final JobContext context, final String typeName) { return getAdapterIdInternal(implementingClass, getConfiguration(context), typeName); } private static Short getAdapterIdInternal( final Class implementingClass, final Configuration configuration, final String typeName) { final String input = configuration.get( enumToConfKey(implementingClass, GeoWaveConfg.INTERNAL_ADAPTER, typeName)); if (input != null) { return Short.valueOf(input); } return null; } public static String getTypeName( final Class implementingClass, final JobContext context, final short internalAdapterId) { return getTypeNameInternal(implementingClass, getConfiguration(context), internalAdapterId); } private static String getTypeNameInternal( final Class implementingClass, final Configuration configuration, final short internalAdapterId) { final String prefix = enumToConfKey(implementingClass, GeoWaveConfg.INTERNAL_ADAPTER); final Map input = configuration.getValByRegex(prefix + "*"); final String internalAdapterIdStr = Short.toString(internalAdapterId); for (final Entry e : input.entrySet()) { if (e.getValue().equals(internalAdapterIdStr)) { return e.getKey().substring(prefix.length() + 1); } } return null; } public static void addTypeName( final Class implementingClass, final Configuration conf, final String typeName, final short internalAdapterId) { conf.set( enumToConfKey(implementingClass, GeoWaveConfg.INTERNAL_ADAPTER, typeName), Short.toString(internalAdapterId)); } public static void addAdapterToIndexMappings( final Class implementingClass, final Configuration conf, final AdapterToIndexMapping[] adapterToIndexMappings) { if (adapterToIndexMappings != null && adapterToIndexMappings.length > 0) { conf.set( enumToConfKey( implementingClass, GeoWaveConfg.ADAPTER_TO_INDEX, Short.toString(adapterToIndexMappings[0].getAdapterId())), ByteArrayUtils.byteArrayToString( PersistenceUtils.toBinary(Lists.newArrayList(adapterToIndexMappings)))); } } public static AdapterToIndexMapping[] getAdapterToIndexMappings( final Class implementingClass, final JobContext context, final short internalAdapterId) { return getAdapterToIndexMappingsInternal( implementingClass, getConfiguration(context), internalAdapterId); } private static AdapterToIndexMapping[] getAdapterToIndexMappingsInternal( final Class implementingClass, final Configuration configuration, final short internalAdapterId) { final String input = configuration.get( enumToConfKey( implementingClass, GeoWaveConfg.ADAPTER_TO_INDEX, Short.toString(internalAdapterId))); if (input != null) { final byte[] dataAdapterBytes = ByteArrayUtils.byteArrayFromString(input); return PersistenceUtils.fromBinaryAsList(dataAdapterBytes).toArray( new AdapterToIndexMapping[0]); } return null; } public static void addDataAdapter( final Class implementingClass, final Configuration conf, final DataTypeAdapter adapter) { if (adapter != null) { conf.set( enumToConfKey(implementingClass, GeoWaveConfg.DATA_ADAPTER, adapter.getTypeName()), ByteArrayUtils.byteArrayToString(PersistenceUtils.toBinary(adapter))); } } public static void removeDataAdapter( final Class implementingClass, final Configuration conf, final String typeName) { if (typeName != null) { conf.unset(enumToConfKey(implementingClass, GeoWaveConfg.DATA_ADAPTER, typeName)); } } public static DataTypeAdapter getDataAdapter( final Class implementingClass, final JobContext context, final String typeName) { return getDataAdapterInternal(implementingClass, getConfiguration(context), typeName); } private static DataTypeAdapter getDataAdapterInternal( final Class implementingClass, final Configuration configuration, final String typeName) { final String input = configuration.get(enumToConfKey(implementingClass, GeoWaveConfg.DATA_ADAPTER, typeName)); if (input != null) { final byte[] dataAdapterBytes = ByteArrayUtils.byteArrayFromString(input); return (DataTypeAdapter) PersistenceUtils.fromBinary(dataAdapterBytes); } return null; } public static DataTypeAdapter[] getDataAdapters( final Class implementingClass, final JobContext context) { return getDataAdaptersInternal(implementingClass, getConfiguration(context)); } private static Map getStoreOptionsMapInternal( final Class implementingClass, final Configuration configuration) { final String prefix = enumToConfKey(implementingClass, GeoWaveConfg.STORE_CONFIG_OPTION) + KEY_SEPARATOR; final Map enumMap = configuration.getValByRegex(prefix + "*"); final Map retVal = new HashMap<>(); for (final Entry entry : enumMap.entrySet()) { final String key = entry.getKey(); retVal.put(key.substring(prefix.length()), entry.getValue()); } return retVal; } private static DataTypeAdapter[] getDataAdaptersInternal( final Class implementingClass, final Configuration configuration) { final Map input = configuration.getValByRegex( enumToConfKey(implementingClass, GeoWaveConfg.DATA_ADAPTER) + "*"); if (input != null) { final List> adapters = new ArrayList<>(input.size()); for (final String dataAdapterStr : input.values()) { final byte[] dataAdapterBytes = ByteArrayUtils.byteArrayFromString(dataAdapterStr); adapters.add((DataTypeAdapter) PersistenceUtils.fromBinary(dataAdapterBytes)); } return adapters.toArray(new DataTypeAdapter[adapters.size()]); } return new DataTypeAdapter[] {}; } private static Index getIndexInternal( final Class implementingClass, final Configuration configuration, final String indexName) { final String input = configuration.get(enumToConfKey(implementingClass, GeoWaveConfg.INDEX, indexName)); if (input != null) { final byte[] indexBytes = ByteArrayUtils.byteArrayFromString(input); return (Index) PersistenceUtils.fromBinary(indexBytes); } return null; } public static Index[] getIndices(final Class implementingClass, final JobContext context) { return getIndicesInternal(implementingClass, getConfiguration(context)); } public static IndexStore getJobContextIndexStore( final Class implementingClass, final JobContext context) { final Map configOptions = getStoreOptionsMap(implementingClass, context); return new JobContextIndexStore(context, GeoWaveStoreFinder.createIndexStore(configOptions)); } public static TransientAdapterStore getJobContextAdapterStore( final Class implementingClass, final JobContext context) { final Map configOptions = getStoreOptionsMap(implementingClass, context); return new JobContextAdapterStore( context, GeoWaveStoreFinder.createAdapterStore(configOptions), getJobContextInternalAdapterStore(implementingClass, context)); } public static AdapterIndexMappingStore getJobContextAdapterIndexMappingStore( final Class implementingClass, final JobContext context) { final Map configOptions = getStoreOptionsMap(implementingClass, context); return new JobContextAdapterIndexMappingStore( context, GeoWaveStoreFinder.createAdapterIndexMappingStore(configOptions)); } public static InternalAdapterStore getJobContextInternalAdapterStore( final Class implementingClass, final JobContext context) { final Map configOptions = getStoreOptionsMap(implementingClass, context); return new JobContextInternalAdapterStore( context, GeoWaveStoreFinder.createInternalAdapterStore(configOptions)); } private static Index[] getIndicesInternal( final Class implementingClass, final Configuration configuration) { final Map input = configuration.getValByRegex(enumToConfKey(implementingClass, GeoWaveConfg.INDEX) + "*"); if (input != null) { final List indices = new ArrayList<>(input.size()); for (final String indexStr : input.values()) { final byte[] indexBytes = ByteArrayUtils.byteArrayFromString(indexStr); indices.add((Index) PersistenceUtils.fromBinary(indexBytes)); } return indices.toArray(new Index[indices.size()]); } return new Index[] {}; } // use reflection to pull the Configuration out of the JobContext for Hadoop // 1 and Hadoop 2 compatibility public static Configuration getConfiguration(final JobContext context) { try { final Class c = GeoWaveConfiguratorBase.class.getClassLoader().loadClass( "org.apache.hadoop.mapreduce.JobContext"); final Method m = c.getMethod("getConfiguration"); final Object o = m.invoke(context, new Object[0]); return (Configuration) o; } catch (final Exception e) { throw new RuntimeException(e); } } public static void setRemoteInvocationParams( final String hdfsHostPort, final String jobTrackerOrResourceManagerHostPort, final Configuration conf) throws IOException { conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true); conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_CLASSLOADER, true); String finalHdfsHostPort; // Ensures that the url starts with hdfs:// if (!hdfsHostPort.contains("://")) { finalHdfsHostPort = "hdfs://" + hdfsHostPort; } else { finalHdfsHostPort = hdfsHostPort; } conf.set("fs.defaultFS", finalHdfsHostPort); conf.set("fs.AbstractFileSystem.hdfs.impl", org.apache.hadoop.fs.Hdfs.class.getName()); conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()); // if this property is used, it hadoop does not support yarn conf.set("mapreduce.jobtracker.address", jobTrackerOrResourceManagerHostPort); // the following 3 properties will only be used if the hadoop version // does support yarn if ("local".equals(jobTrackerOrResourceManagerHostPort)) { conf.set("mapreduce.framework.name", "local"); } else { conf.set("mapreduce.framework.name", "yarn"); } conf.set("yarn.resourcemanager.address", jobTrackerOrResourceManagerHostPort); // if remotely submitted with yarn, the job configuration xml will be // written to this staging directory, it is generally good practice to // ensure the staging directory is different for each user String user = System.getProperty("user.name"); if ((user == null) || user.isEmpty()) { user = "default"; } conf.set("yarn.app.mapreduce.am.staging-dir", "/tmp/hadoop-" + user); } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/GeoWaveKey.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.io.WritableComparator; import org.locationtech.geowave.core.index.ByteArrayUtils; /** * This is the base class for both GeoWaveInputKey and GeoWaveOutputKey and is responsible for * persisting the adapter ID */ public abstract class GeoWaveKey implements WritableComparable, java.io.Serializable { /** */ private static final long serialVersionUID = 1L; protected Short adapterId; protected GeoWaveKey() {} public GeoWaveKey(final short adapterId) { this.adapterId = adapterId; } public short getadapterId() { return adapterId; } public void setAdapterId(final short adapterId) { this.adapterId = adapterId; } @Override public int compareTo(final GeoWaveKey o) { final byte[] internalAdapterIdBytes = ByteArrayUtils.shortToByteArray(adapterId); return WritableComparator.compareBytes( internalAdapterIdBytes, 0, internalAdapterIdBytes.length, ByteArrayUtils.shortToByteArray(o.adapterId), 0, ByteArrayUtils.shortToByteArray(o.adapterId).length); } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((adapterId == null) ? 0 : adapterId.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final GeoWaveKey other = (GeoWaveKey) obj; if (adapterId == null) { if (other.adapterId != null) { return false; } } else if (!adapterId.equals(other.adapterId)) { return false; } return true; } @Override public void readFields(final DataInput input) throws IOException { // final int adapterIdLength = input.readInt(); // final byte[] adapterIdBinary = new byte[adapterIdLength]; // input.readFully(adapterIdBinary); // adapterId = new ByteArrayId(adapterIdBinary); adapterId = input.readShort(); } @Override public void write(final DataOutput output) throws IOException { // final byte[] adapterIdBinary = adapterId.getBytes(); // output.writeInt(adapterIdBinary.length); // output.write(adapterIdBinary); output.writeShort(adapterId); } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/GeoWaveMapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce; import java.io.IOException; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.mapreduce.MapContext; import org.apache.hadoop.mapreduce.Mapper; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This abstract class can be extended by GeoWave analytics. It handles the conversion of native * GeoWave objects into objects that are writable. It is a mapper that converts to writable objects * for both inputs and outputs. This conversion will only work if the data adapter implements * HadoopDataAdapter. */ public abstract class GeoWaveMapper extends Mapper { protected static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveWritableInputMapper.class); protected HadoopWritableSerializationTool serializationTool; @Override protected void map( final GeoWaveInputKey key, final ObjectWritable value, final Mapper.Context context) throws IOException, InterruptedException { mapWritableValue(key, value, context); } protected void mapWritableValue( final GeoWaveInputKey key, final ObjectWritable value, final Mapper.Context context) throws IOException, InterruptedException { mapNativeValue( key, serializationTool.fromWritable(key.getInternalAdapterId(), value), new NativeMapContext<>(context, serializationTool)); } protected abstract void mapNativeValue( final GeoWaveInputKey key, final Object value, final MapContext context) throws IOException, InterruptedException; @Override protected void setup( final Mapper.Context context) throws IOException, InterruptedException { serializationTool = new HadoopWritableSerializationTool(context); } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/GeoWaveReducer.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce; import java.io.IOException; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.mapreduce.ReduceContext; import org.apache.hadoop.mapreduce.Reducer; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Iterables; /** * This abstract class can be extended by GeoWave analytics. It handles the conversion of native * GeoWave objects into objects that are writable.It is a reducer that converts to writable objects * for both inputs and outputs. This conversion will only work if the data adapter implements * HadoopDataAdapter. */ public abstract class GeoWaveReducer extends Reducer { protected static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveReducer.class); protected HadoopWritableSerializationTool serializationTool; @Override protected void reduce( final GeoWaveInputKey key, final Iterable values, final Reducer.Context context) throws IOException, InterruptedException { reduceWritableValues(key, values, context); } protected void reduceWritableValues( final GeoWaveInputKey key, final Iterable values, final Reducer.Context context) throws IOException, InterruptedException { final HadoopWritableSerializer serializer = serializationTool.getHadoopWritableSerializerForAdapter(key.getInternalAdapterId()); final Iterable transformedValues = Iterables.transform(values, writable -> { final Object innerObj = writable.get(); return innerObj instanceof Writable ? serializer.fromWritable((Writable) innerObj) : innerObj; }); reduceNativeValues(key, transformedValues, new NativeReduceContext(context, serializationTool)); } protected abstract void reduceNativeValues( final GeoWaveInputKey key, final Iterable values, final ReduceContext context) throws IOException, InterruptedException; @Override protected void setup( final Reducer.Context context) throws IOException, InterruptedException { serializationTool = new HadoopWritableSerializationTool(context); } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/GeoWaveWritableInputMapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce; import java.io.IOException; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.mapreduce.Mapper; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This abstract class can be extended by GeoWave analytics. It handles the conversion of native * GeoWave objects into objects that are writable. It is a mapper that converts to writable objects * for the input. This conversion will only work if the data adapter implements HadoopDataAdapter. */ public abstract class GeoWaveWritableInputMapper extends Mapper { protected static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveWritableInputMapper.class); protected HadoopWritableSerializationTool serializationTool; @Override protected void map( final GeoWaveInputKey key, final ObjectWritable value, final Mapper.Context context) throws IOException, InterruptedException { mapWritableValue(key, value, context); } protected void mapWritableValue( final GeoWaveInputKey key, final ObjectWritable value, final Mapper.Context context) throws IOException, InterruptedException { mapNativeValue(key, serializationTool.fromWritable(key.getInternalAdapterId(), value), context); } /** * Helper method to create an object writable from a value managed by the adapter. * * @param key * @param value * @return the writable object */ protected ObjectWritable toWritableValue(final GeoWaveInputKey key, final Object value) { return serializationTool.toWritable(key.getInternalAdapterId(), value); } protected abstract void mapNativeValue( final GeoWaveInputKey key, final Object value, final Mapper.Context context) throws IOException, InterruptedException; @Override protected void setup( final Mapper.Context context) throws IOException, InterruptedException { serializationTool = new HadoopWritableSerializationTool(context); } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/GeoWaveWritableInputReducer.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce; import java.io.IOException; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.mapreduce.Reducer; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Iterables; /** * This abstract class can be extended by GeoWave analytics. It handles the conversion of native * GeoWave objects into objects that are writable. It is a reducer that converts to writable objects * for the input. This conversion will only work if the data adapter implements HadoopDataAdapter. */ public abstract class GeoWaveWritableInputReducer extends Reducer { protected static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveWritableInputReducer.class); protected HadoopWritableSerializationTool serializationTool; @Override protected void reduce( final GeoWaveInputKey key, final Iterable values, final Reducer.Context context) throws IOException, InterruptedException { reduceWritableValues(key, values, context); } protected void reduceWritableValues( final GeoWaveInputKey key, final Iterable values, final Reducer.Context context) throws IOException, InterruptedException { final HadoopWritableSerializer serializer = serializationTool.getHadoopWritableSerializerForAdapter(key.getInternalAdapterId()); final Iterable transformedValues = Iterables.transform(values, writable -> { final Object innerObj = writable.get(); return (innerObj instanceof Writable) ? serializer.fromWritable((Writable) innerObj) : innerObj; }); reduceNativeValues(key, transformedValues, context); } protected abstract void reduceNativeValues( final GeoWaveInputKey key, final Iterable values, final Reducer.Context context) throws IOException, InterruptedException; @Override protected void setup( final Reducer.Context context) throws IOException, InterruptedException { serializationTool = new HadoopWritableSerializationTool(context); } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/GeoWaveWritableOutputMapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce; import java.io.IOException; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.mapreduce.MapContext; import org.apache.hadoop.mapreduce.Mapper; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This abstract class can be extended by GeoWave analytics. It handles the conversion of native * GeoWave objects into objects that are writable. It is a mapper that converts to writable objects * for the output. This conversion will only work if the data adapter implements HadoopDataAdapter. */ public abstract class GeoWaveWritableOutputMapper extends Mapper { protected static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveWritableOutputMapper.class); protected HadoopWritableSerializationTool serializationTool; @Override protected void map( final KEYIN key, final VALUEIN value, final Mapper.Context context) throws IOException, InterruptedException { mapWritableValue(key, value, context); } protected void mapWritableValue( final KEYIN key, final VALUEIN value, final Mapper.Context context) throws IOException, InterruptedException { mapNativeValue(key, value, new NativeMapContext(context, serializationTool)); } protected abstract void mapNativeValue( final KEYIN key, final VALUEIN value, final MapContext context) throws IOException, InterruptedException; @Override protected void setup( final Mapper.Context context) throws IOException, InterruptedException { serializationTool = new HadoopWritableSerializationTool(context); } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/GeoWaveWritableOutputReducer.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce; import java.io.IOException; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.mapreduce.ReduceContext; import org.apache.hadoop.mapreduce.Reducer; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This abstract class can be extended by GeoWave analytics. It handles the conversion of native * GeoWave objects into objects that are writable. It is a reducer that converts to writable objects * for the output. This conversion will only work if the data adapter implements HadoopDataAdapter. */ public abstract class GeoWaveWritableOutputReducer extends Reducer { protected static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveWritableOutputReducer.class); protected HadoopWritableSerializationTool serializationTool; @Override protected void reduce( final KEYIN key, final Iterable values, final Reducer.Context context) throws IOException, InterruptedException { reduceWritableValues(key, values, context); } protected void reduceWritableValues( final KEYIN key, final Iterable values, final Reducer.Context context) throws IOException, InterruptedException { reduceNativeValues(key, values, new NativeReduceContext(context, serializationTool)); } protected abstract void reduceNativeValues( final KEYIN key, final Iterable values, final ReduceContext context) throws IOException, InterruptedException; @Override protected void setup( final Reducer.Context context) throws IOException, InterruptedException { serializationTool = new HadoopWritableSerializationTool(context); } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/HadoopDataAdapter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce; import org.apache.hadoop.io.Writable; import org.locationtech.geowave.core.store.api.DataTypeAdapter; /** * This is an interface that extends data adapter to allow map reduce jobs to easily convert hadoop * writable objects to and from the geowave native representation of the objects. This allow for * generally applicable map reduce jobs to be written using base classes for the mapper that can * handle translations. * * @param the native type * @param the writable type */ public interface HadoopDataAdapter extends DataTypeAdapter { public HadoopWritableSerializer createWritableSerializer(); } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/HadoopWritableSerializationTool.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce; import java.util.HashMap; import java.util.Map; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.mapreduce.JobContext; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.TransientAdapterStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.mapreduce.input.GeoWaveInputFormat; /** * Use this class to maintain a set of serializers per adapters associated with the context of a * single mapper or reducer. The intent is to support maintaining single set of Writable instances. * By the nature of holding single instances of Writable instances by the serializers, this class * and its contents may be only accessed by one 'worker' (at a time). * *

The helper methods assume all Writable instances are wrapped in an ObjectWritable. The reason * for this approach, consistent with other support classes in this package, is to allow mappers and * reducers to use the generic ObjectWritable since entry inputs maybe be associated with different * adapters, and thus have different associated Writable instances. Configuration of Hadoop Mappers * and Reducers requires a specific type. */ public class HadoopWritableSerializationTool { private final TransientAdapterStore adapterStore; private final InternalAdapterStore internalAdapterStore; private final Map> serializers = new HashMap<>(); private final ObjectWritable objectWritable = new ObjectWritable(); public HadoopWritableSerializationTool(final JobContext jobContext) { this( GeoWaveInputFormat.getJobContextAdapterStore(jobContext), GeoWaveInputFormat.getJobContextInternalAdapterStore(jobContext)); } public HadoopWritableSerializationTool( final TransientAdapterStore adapterStore, final InternalAdapterStore internalAdapterStore) { super(); this.adapterStore = adapterStore; this.internalAdapterStore = internalAdapterStore; } public TransientAdapterStore getAdapterStore() { return adapterStore; } public InternalDataAdapter getInternalAdapter(final short adapterId) { final DataTypeAdapter adapter = adapterStore.getAdapter(internalAdapterStore.getTypeName(adapterId)); if (adapter instanceof InternalDataAdapter) { return (InternalDataAdapter) adapter; } return adapter.asInternalAdapter(adapterId); } public DataTypeAdapter getAdapter(final String typeName) { return adapterStore.getAdapter(typeName); } public HadoopWritableSerializer getHadoopWritableSerializerForAdapter( final short adapterId) { return getHadoopWritableSerializerForAdapter(internalAdapterStore.getTypeName(adapterId)); } public HadoopWritableSerializer getHadoopWritableSerializerForAdapter( final String typeName) { HadoopWritableSerializer serializer = serializers.get(typeName); if (serializer == null) { DataTypeAdapter adapter; HadoopDataAdapter hadoopAdapter = null; if (((adapterStore != null) && ((adapter = adapterStore.getAdapter(typeName)) != null))) { if (adapter instanceof HadoopDataAdapter) { hadoopAdapter = (HadoopDataAdapter) adapter; } else if ((adapter instanceof InternalDataAdapter) && (((InternalDataAdapter) adapter).getAdapter() instanceof HadoopDataAdapter)) { hadoopAdapter = (HadoopDataAdapter) ((InternalDataAdapter) adapter).getAdapter(); } } if (hadoopAdapter != null) { serializer = hadoopAdapter.createWritableSerializer(); serializers.put(typeName, serializer); } else { serializer = new HadoopWritableSerializer() { final ObjectWritable writable = new ObjectWritable(); @Override public ObjectWritable toWritable(final Object entry) { writable.set(entry); return writable; } @Override public Object fromWritable(final Writable writable) { return ((ObjectWritable) writable).get(); } }; } } return serializer; } public ObjectWritable toWritable(final short adapterId, final Object entry) { if (entry instanceof Writable) { objectWritable.set(entry); } else { objectWritable.set(getHadoopWritableSerializerForAdapter(adapterId).toWritable(entry)); } return objectWritable; } public Object fromWritable(final String typeName, final ObjectWritable writable) { final Object innerObj = writable.get(); return (innerObj instanceof Writable) ? getHadoopWritableSerializerForAdapter(typeName).fromWritable((Writable) innerObj) : innerObj; } public Object fromWritable(final short adapterId, final ObjectWritable writable) { final Object innerObj = writable.get(); return (innerObj instanceof Writable) ? getHadoopWritableSerializerForAdapter(adapterId).fromWritable((Writable) innerObj) : innerObj; } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/HadoopWritableSerializer.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce; import org.apache.hadoop.io.Writable; /** * @param the native type * @param the writable type */ public interface HadoopWritableSerializer { public W toWritable(T entry); public T fromWritable(W writable); } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/JobContextAdapterIndexMappingStore.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.JobContext; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils; import com.google.common.collect.Lists; /** * This class implements an adapter index mapping store by first checking the job context for an * adapter and keeping a local cache of adapters that have been discovered. It will check the * metadata store if it cannot find an adapter in the job context. */ public class JobContextAdapterIndexMappingStore implements AdapterIndexMappingStore { private static final Class CLASS = JobContextAdapterIndexMappingStore.class; private final JobContext context; private final AdapterIndexMappingStore persistentAdapterIndexMappingStore; private final Map> adapterCache = new HashMap<>(); public JobContextAdapterIndexMappingStore( final JobContext context, final AdapterIndexMappingStore persistentAdapterIndexMappingStore) { this.context = context; this.persistentAdapterIndexMappingStore = persistentAdapterIndexMappingStore; } private AdapterToIndexMapping[] getIndicesForAdapterInternal(final short internalAdapterId) { // first try to get it from the job context AdapterToIndexMapping[] adapter = getAdapterToIndexMapping(context, internalAdapterId); if (adapter == null) { // then try to get it from the persistent store adapter = persistentAdapterIndexMappingStore.getIndicesForAdapter(internalAdapterId); } if (adapter != null) { adapterCache.put(internalAdapterId, Lists.newArrayList(adapter)); } return adapter; } @Override public void removeAll() { adapterCache.clear(); } protected static AdapterToIndexMapping[] getAdapterToIndexMapping( final JobContext context, final short internalAdapterId) { return GeoWaveConfiguratorBase.getAdapterToIndexMappings(CLASS, context, internalAdapterId); } public static void addAdapterToIndexMapping( final Configuration configuration, final AdapterToIndexMapping[] adapter) { GeoWaveConfiguratorBase.addAdapterToIndexMappings(CLASS, configuration, adapter); } @Override public AdapterToIndexMapping[] getIndicesForAdapter(final short adapterId) { List adapterList = adapterCache.get(adapterId); if (adapterList == null) { return getIndicesForAdapterInternal(adapterId); } return adapterList.toArray(new AdapterToIndexMapping[adapterList.size()]); } @Override public AdapterToIndexMapping getMapping(final short adapterId, final String indexName) { if (indexName.equals(DataIndexUtils.DATA_ID_INDEX.getName())) { return new AdapterToIndexMapping(adapterId, indexName, Lists.newArrayList()); } final AdapterToIndexMapping[] adapterIndices = getIndicesForAdapter(adapterId); return Arrays.stream(adapterIndices).filter( mapping -> mapping.getIndexName().equals(indexName)).findFirst().orElse(null); } @Override public void addAdapterIndexMapping(final AdapterToIndexMapping mapping) { if (!adapterCache.containsKey(mapping.getAdapterId())) { adapterCache.put(mapping.getAdapterId(), Lists.newArrayList()); } adapterCache.get(mapping.getAdapterId()).add(mapping); } @Override public void remove(final short internalAdapterId) { adapterCache.remove(internalAdapterId); } @Override public boolean remove(final short internalAdapterId, final String indexName) { if (!adapterCache.containsKey(internalAdapterId)) { return false; } final List mappings = adapterCache.get(internalAdapterId); AdapterToIndexMapping found = null; for (int i = 0; i < mappings.size(); i++) { if (mappings.get(i).getIndexName().compareTo(indexName) == 0) { found = mappings.get(i); break; } } if (found == null) { return false; } if (mappings.size() > 1) { mappings.remove(found); } else { // otherwise just remove the mapping adapterCache.remove(internalAdapterId); } return true; } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/JobContextAdapterStore.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.collections.IteratorUtils; import org.apache.commons.collections.Transformer; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.JobContext; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.adapter.TransientAdapterStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; /** * This class implements an adapter store by first checking the job context for an adapter and * keeping a local cache of adapters that have been discovered. It will check the metadata store if * it cannot find an adapter in the job context. */ public class JobContextAdapterStore implements TransientAdapterStore { private static final Class CLASS = JobContextAdapterStore.class; private final JobContext context; private PersistentAdapterStore persistentAdapterStore = null; private InternalAdapterStore internalAdapterStore = null; private final Map> adapterCache = new HashMap<>(); public JobContextAdapterStore( final JobContext context, final PersistentAdapterStore persistentAdapterStore, final InternalAdapterStore internalAdapterStore) { this.context = context; this.persistentAdapterStore = persistentAdapterStore; this.internalAdapterStore = internalAdapterStore; } @Override public void addAdapter(final DataTypeAdapter adapter) { adapterCache.put(adapter.getTypeName(), adapter); } @Override public void removeAdapter(final String typeName) { adapterCache.remove(typeName); } @Override public DataTypeAdapter getAdapter(final String typeName) { DataTypeAdapter adapter = adapterCache.get(typeName); if (adapter == null) { adapter = getAdapterInternal(typeName); } return adapter; } @Override public boolean adapterExists(final String typeName) { if (adapterCache.containsKey(typeName)) { return true; } final DataTypeAdapter adapter = getAdapterInternal(typeName); return adapter != null; } private DataTypeAdapter getAdapterInternal(final String typeName) { // first try to get it from the job context DataTypeAdapter adapter = getDataAdapter(context, typeName); if (adapter == null) { // then try to get it from the persistent store adapter = persistentAdapterStore.getAdapter(internalAdapterStore.getAdapterId(typeName)); } if (adapter != null) { adapterCache.put(typeName, adapter); } return adapter; } @Override public void removeAll() { adapterCache.clear(); } @Override public DataTypeAdapter[] getAdapters() { final InternalDataAdapter[] adapters = persistentAdapterStore.getAdapters(); // cache any results Arrays.stream(adapters).forEach(a -> adapterCache.put(a.getTypeName(), a)); return adapters; } public List getTypeNames() { final DataTypeAdapter[] userAdapters = GeoWaveConfiguratorBase.getDataAdapters(CLASS, context); if ((userAdapters == null) || (userAdapters.length <= 0)) { return IteratorUtils.toList( IteratorUtils.transformedIterator( Arrays.stream(getAdapters()).iterator(), new Transformer() { @Override public Object transform(final Object input) { if (input instanceof DataTypeAdapter) { return ((DataTypeAdapter) input).getTypeName(); } return input; } })); } else { final List retVal = new ArrayList<>(userAdapters.length); for (final DataTypeAdapter adapter : userAdapters) { retVal.add(adapter.getTypeName()); } return retVal; } } protected static DataTypeAdapter getDataAdapter( final JobContext context, final String typeName) { return GeoWaveConfiguratorBase.getDataAdapter(CLASS, context, typeName); } public static DataTypeAdapter[] getDataAdapters(final JobContext context) { return GeoWaveConfiguratorBase.getDataAdapters(CLASS, context); } public static void addDataAdapter( final Configuration configuration, final DataTypeAdapter adapter) { GeoWaveConfiguratorBase.addDataAdapter(CLASS, configuration, adapter); } public static void removeAdapter(final Configuration configuration, final String typeName) { GeoWaveConfiguratorBase.removeDataAdapter(CLASS, configuration, typeName); } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/JobContextIndexStore.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce; import java.util.HashMap; import java.util.Map; import org.apache.commons.collections.IteratorUtils; import org.apache.commons.collections.Transformer; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.JobContext; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.CloseableIteratorWrapper; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.index.IndexStore; /** * This class implements an index store by first checking the job context for an index and keeping a * local cache of indices that have been discovered. It will check the metadata store if it cannot * find an index in the job context. */ public class JobContextIndexStore implements IndexStore { private static final Class CLASS = JobContextIndexStore.class; private final JobContext context; private final IndexStore persistentIndexStore; private final Map indexCache = new HashMap<>(); public JobContextIndexStore(final JobContext context, final IndexStore persistentIndexStore) { this.context = context; this.persistentIndexStore = persistentIndexStore; } @Override public void addIndex(final Index index) { indexCache.put(index.getName(), index); } @Override public Index getIndex(final String indexName) { Index index = indexCache.get(indexName); if (index == null) { index = getIndexInternal(indexName); } return index; } @Override public boolean indexExists(final String indexName) { if (indexCache.containsKey(indexName)) { return true; } final Index index = getIndexInternal(indexName); return index != null; } private Index getIndexInternal(final String indexName) { // first try to get it from the job context Index index = getIndex(context, indexName); if (index == null) { // then try to get it from the accumulo persistent store index = persistentIndexStore.getIndex(indexName); } if (index != null) { indexCache.put(indexName, index); } return index; } @Override public void removeAll() { indexCache.clear(); } @Override public void removeIndex(final String indexName) { indexCache.remove(indexName); } @Override public CloseableIterator getIndices() { final CloseableIterator it = persistentIndexStore.getIndices(); // cache any results return new CloseableIteratorWrapper( it, IteratorUtils.transformedIterator(it, new Transformer() { @Override public Object transform(final Object obj) { indexCache.put(((Index) obj).getName(), (Index) obj); return obj; } })); } public static void addIndex(final Configuration config, final Index index) { GeoWaveConfiguratorBase.addIndex(CLASS, config, index); } protected static Index getIndex(final JobContext context, final String indexName) { return GeoWaveConfiguratorBase.getIndex(CLASS, context, indexName); } public static Index[] getIndices(final JobContext context) { return GeoWaveConfiguratorBase.getIndices(CLASS, context); } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/JobContextInternalAdapterStore.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.JobContext; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.metadata.InternalAdapterStoreImpl; import com.google.common.collect.BiMap; import com.google.common.collect.HashBiMap; import com.google.common.collect.Maps; public class JobContextInternalAdapterStore implements InternalAdapterStore { private static final Class CLASS = JobContextInternalAdapterStore.class; private final JobContext context; private final InternalAdapterStore persistentInternalAdapterStore; protected final BiMap cache = Maps.synchronizedBiMap(HashBiMap.create()); public JobContextInternalAdapterStore( final JobContext context, final InternalAdapterStore persistentInternalAdapterStore) { this.context = context; this.persistentInternalAdapterStore = persistentInternalAdapterStore; } @Override public String getTypeName(final short adapterId) { String typeName = cache.inverse().get(adapterId); if (typeName == null) { typeName = getTypeNameIInternal(adapterId); } return typeName; } private String getTypeNameIInternal(final short adapterId) { // first try to get it from the job context String typeName = getAdapterIdFromJobContext(adapterId); if (typeName == null) { // then try to get it from the persistent store typeName = persistentInternalAdapterStore.getTypeName(adapterId); } if (typeName != null) { cache.put(typeName, adapterId); } return typeName; } private Short getAdapterIdInternal(final String typeName) { // first try to get it from the job context Short internalAdapterId = getAdapterIdFromJobContext(typeName); if (internalAdapterId == null) { // then try to get it from the persistent store internalAdapterId = persistentInternalAdapterStore.getAdapterId(typeName); } if (internalAdapterId != null) { cache.put(typeName, internalAdapterId); } return internalAdapterId; } @Override public Short getAdapterId(final String typeName) { Short internalAdapterId = cache.get(typeName); if (internalAdapterId == null) { internalAdapterId = getAdapterIdInternal(typeName); } return internalAdapterId; } @Override public short getInitialAdapterId(final String typeName) { return InternalAdapterStoreImpl.getLazyInitialAdapterId(typeName); } protected Short getAdapterIdFromJobContext(final String typeName) { return GeoWaveConfiguratorBase.getAdapterId(CLASS, context, typeName); } protected String getAdapterIdFromJobContext(final short internalAdapterId) { return GeoWaveConfiguratorBase.getTypeName(CLASS, context, internalAdapterId); } @Override public short addTypeName(final String typeName) { return persistentInternalAdapterStore.addTypeName(typeName); } @Override public boolean remove(final String typeName) { return persistentInternalAdapterStore.remove(typeName); } public static void addTypeName( final Configuration configuration, final String typeName, final short adapterId) { GeoWaveConfiguratorBase.addTypeName(CLASS, configuration, typeName, adapterId); } @Override public boolean remove(final short adapterId) { cache.inverse().remove(adapterId); return persistentInternalAdapterStore.remove(adapterId); } @Override public void removeAll() { cache.clear(); persistentInternalAdapterStore.removeAll(); } @Override public String[] getTypeNames() { return persistentInternalAdapterStore.getTypeNames(); } @Override public short[] getAdapterIds() { return persistentInternalAdapterStore.getAdapterIds(); } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/MapReduceDataStore.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce; import java.io.IOException; import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.RecordWriter; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.TransientAdapterStore; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.geowave.core.store.query.options.CommonQueryOptions; import org.locationtech.geowave.core.store.query.options.DataTypeQueryOptions; import org.locationtech.geowave.core.store.query.options.IndexQueryOptions; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey; public interface MapReduceDataStore extends DataStore { public RecordReader createRecordReader( CommonQueryOptions commonOptions, DataTypeQueryOptions typeOptions, IndexQueryOptions indexOptions, QueryConstraints constraints, TransientAdapterStore adapterStore, InternalAdapterStore internalAdapterStore, AdapterIndexMappingStore aimStore, DataStatisticsStore statsStore, IndexStore indexStore, boolean isOutputWritable, InputSplit inputSplit) throws IOException, InterruptedException; public List getSplits( CommonQueryOptions commonOptions, DataTypeQueryOptions typeOptions, IndexQueryOptions indexOptions, QueryConstraints constraints, TransientAdapterStore adapterStore, AdapterIndexMappingStore aimStore, DataStatisticsStore statsStore, InternalAdapterStore internalAdapterStore, IndexStore indexStore, JobContext context, Integer minSplits, Integer maxSplits) throws IOException, InterruptedException; public RecordWriter, Object> createRecordWriter( TaskAttemptContext context, IndexStore jobContextIndexStore, TransientAdapterStore jobContextAdapterStore); public void prepareRecordWriter(Configuration conf); } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/MapReduceDataStoreOperations.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.operations.DataStoreOperations; import org.locationtech.geowave.core.store.operations.RowReader; import org.locationtech.geowave.mapreduce.splits.RecordReaderParams; public interface MapReduceDataStoreOperations extends DataStoreOperations { RowReader createReader(RecordReaderParams readerParams); } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/MapReduceUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce; import java.util.List; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import com.google.common.base.Function; import com.google.common.collect.Lists; public class MapReduceUtils { public static List idsFromAdapters(final List> adapters) { return Lists.transform(adapters, new Function, String>() { @Override public String apply(final DataTypeAdapter adapter) { return adapter == null ? "" : adapter.getTypeName(); } }); } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/NativeMapContext.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce; import java.io.IOException; import java.net.URI; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration.IntegerRanges; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.io.RawComparator; import org.apache.hadoop.mapreduce.Counter; import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.JobID; import org.apache.hadoop.mapreduce.MapContext; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.OutputCommitter; import org.apache.hadoop.mapreduce.OutputFormat; import org.apache.hadoop.mapreduce.Partitioner; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.security.Credentials; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; /** * This class wraps an existing map context that will write hadoop writable objects as a map context * that writes the native object for ease of implementing mapreduce jobs. * * @param The map context's input type * @param The map context's output type */ public class NativeMapContext implements MapContext { private final MapContext context; private final HadoopWritableSerializationTool serializationTool; public NativeMapContext( final MapContext context, final HadoopWritableSerializationTool serializationTool) { this.context = context; this.serializationTool = serializationTool; } @Override public TaskAttemptID getTaskAttemptID() { return context.getTaskAttemptID(); } @Override public void setStatus(final String msg) { context.setStatus(msg); } @Override public String getStatus() { return context.getStatus(); } @Override public InputSplit getInputSplit() { return context.getInputSplit(); } @Override public Configuration getConfiguration() { return context.getConfiguration(); } @Override public boolean nextKeyValue() throws IOException, InterruptedException { return context.nextKeyValue(); } @Override public float getProgress() { return context.getProgress(); } @Override public int hashCode() { return context.hashCode(); } @Override public Credentials getCredentials() { return context.getCredentials(); } @Override public Counter getCounter(final Enum counterName) { return context.getCounter(counterName); } @Override public KEYIN getCurrentKey() throws IOException, InterruptedException { return context.getCurrentKey(); } @Override public JobID getJobID() { return context.getJobID(); } @Override public int getNumReduceTasks() { return context.getNumReduceTasks(); } @Override public Counter getCounter(final String groupName, final String counterName) { return context.getCounter(groupName, counterName); } @Override public VALUEIN getCurrentValue() throws IOException, InterruptedException { return context.getCurrentValue(); } @Override public Path getWorkingDirectory() throws IOException { return context.getWorkingDirectory(); } @Override public void write(final GeoWaveInputKey key, final Object value) throws IOException, InterruptedException { context.write(key, serializationTool.toWritable(key.getInternalAdapterId(), value)); } @Override public Class getOutputKeyClass() { return context.getOutputKeyClass(); } @Override public OutputCommitter getOutputCommitter() { return context.getOutputCommitter(); } @Override public Class getOutputValueClass() { return context.getOutputValueClass(); } @Override public Class getMapOutputKeyClass() { return context.getMapOutputKeyClass(); } @Override public Class getMapOutputValueClass() { return context.getMapOutputValueClass(); } @Override public String getJobName() { return context.getJobName(); } public boolean userClassesTakesPrecedence() { return context.getConfiguration().getBoolean(MAPREDUCE_JOB_USER_CLASSPATH_FIRST, false); } @Override public boolean equals(final Object obj) { return context.equals(obj); } @Override public Class> getInputFormatClass() throws ClassNotFoundException { return context.getInputFormatClass(); } @Override public Class> getMapperClass() throws ClassNotFoundException { return context.getMapperClass(); } @Override public Class> getCombinerClass() throws ClassNotFoundException { return context.getCombinerClass(); } @Override public Class> getReducerClass() throws ClassNotFoundException { return context.getReducerClass(); } @Override public Class> getOutputFormatClass() throws ClassNotFoundException { return context.getOutputFormatClass(); } @Override public Class> getPartitionerClass() throws ClassNotFoundException { return context.getPartitionerClass(); } @Override public RawComparator getSortComparator() { return context.getSortComparator(); } @Override public String getJar() { return context.getJar(); } @Override public RawComparator getCombinerKeyGroupingComparator() { return context.getCombinerKeyGroupingComparator(); } @Override public RawComparator getGroupingComparator() { return context.getGroupingComparator(); } @Override public boolean getJobSetupCleanupNeeded() { return context.getJobSetupCleanupNeeded(); } @Override public boolean getTaskCleanupNeeded() { return context.getTaskCleanupNeeded(); } @Override public boolean getProfileEnabled() { return context.getProfileEnabled(); } @Override public String getProfileParams() { return context.getProfileParams(); } @Override public IntegerRanges getProfileTaskRange(final boolean isMap) { return context.getProfileTaskRange(isMap); } @Override public String getUser() { return context.getUser(); } @Override public boolean getSymlink() { return context.getSymlink(); } @Override public Path[] getArchiveClassPaths() { return context.getArchiveClassPaths(); } @Override public URI[] getCacheArchives() throws IOException { return context.getCacheArchives(); } @Override public URI[] getCacheFiles() throws IOException { return context.getCacheFiles(); } @Override public Path[] getLocalCacheArchives() throws IOException { return context.getLocalCacheArchives(); } @Override public Path[] getLocalCacheFiles() throws IOException { return context.getLocalCacheFiles(); } @Override public Path[] getFileClassPaths() { return context.getFileClassPaths(); } @Override public String[] getArchiveTimestamps() { return context.getArchiveTimestamps(); } @Override public String[] getFileTimestamps() { return context.getFileTimestamps(); } @Override public int getMaxMapAttempts() { return context.getMaxMapAttempts(); } @Override public int getMaxReduceAttempts() { return context.getMaxReduceAttempts(); } @Override public void progress() { context.progress(); } @Override public String toString() { return context.toString(); } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/NativeReduceContext.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce; import java.io.IOException; import java.net.URI; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration.IntegerRanges; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.io.RawComparator; import org.apache.hadoop.mapreduce.Counter; import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.JobID; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.OutputCommitter; import org.apache.hadoop.mapreduce.OutputFormat; import org.apache.hadoop.mapreduce.Partitioner; import org.apache.hadoop.mapreduce.ReduceContext; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.security.Credentials; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; /** * This class wraps an existing reduce context that will write hadoop writable objects as a reduce * context that writes the native object for ease of implementing mapreduce jobs. * * @param The reduce context's input type * @param The reduce context's output type */ public class NativeReduceContext implements ReduceContext { private final ReduceContext writableContext; private final HadoopWritableSerializationTool serializationTool; public NativeReduceContext( final ReduceContext writableContext, final HadoopWritableSerializationTool serializationTool) { this.writableContext = writableContext; this.serializationTool = serializationTool; } // delegate everything, except the write method, for this transform the // object to a writable @Override public void write(final GeoWaveInputKey key, final Object value) throws IOException, InterruptedException { writableContext.write(key, serializationTool.toWritable(key.getInternalAdapterId(), value)); } @Override public TaskAttemptID getTaskAttemptID() { return writableContext.getTaskAttemptID(); } @Override public void setStatus(final String msg) { writableContext.setStatus(msg); } @Override public String getStatus() { return writableContext.getStatus(); } @Override public boolean nextKey() throws IOException, InterruptedException { return writableContext.nextKey(); } @Override public Configuration getConfiguration() { return writableContext.getConfiguration(); } @Override public boolean nextKeyValue() throws IOException, InterruptedException { return writableContext.nextKeyValue(); } @Override public float getProgress() { return writableContext.getProgress(); } @Override public int hashCode() { return writableContext.hashCode(); } @Override public Iterable getValues() throws IOException, InterruptedException { return writableContext.getValues(); } @Override public Credentials getCredentials() { return writableContext.getCredentials(); } @Override public Counter getCounter(final Enum counterName) { return writableContext.getCounter(counterName); } @Override public KEYIN getCurrentKey() throws IOException, InterruptedException { return writableContext.getCurrentKey(); } @Override public JobID getJobID() { return writableContext.getJobID(); } @Override public int getNumReduceTasks() { return writableContext.getNumReduceTasks(); } @Override public Counter getCounter(final String groupName, final String counterName) { return writableContext.getCounter(groupName, counterName); } @Override public VALUEIN getCurrentValue() throws IOException, InterruptedException { return writableContext.getCurrentValue(); } @Override public Path getWorkingDirectory() throws IOException { return writableContext.getWorkingDirectory(); } @Override public Class getOutputKeyClass() { return writableContext.getOutputKeyClass(); } @Override public OutputCommitter getOutputCommitter() { return writableContext.getOutputCommitter(); } @Override public Class getOutputValueClass() { return writableContext.getOutputValueClass(); } @Override public Class getMapOutputKeyClass() { return writableContext.getMapOutputKeyClass(); } @Override public Class getMapOutputValueClass() { return writableContext.getMapOutputValueClass(); } @Override public String getJobName() { return writableContext.getJobName(); } public boolean userClassesTakesPrecedence() { return writableContext.getConfiguration().getBoolean(MAPREDUCE_JOB_USER_CLASSPATH_FIRST, false); } @Override public boolean equals(final Object obj) { return writableContext.equals(obj); } @Override public Class> getInputFormatClass() throws ClassNotFoundException { return writableContext.getInputFormatClass(); } @Override public Class> getMapperClass() throws ClassNotFoundException { return writableContext.getMapperClass(); } @Override public Class> getCombinerClass() throws ClassNotFoundException { return writableContext.getCombinerClass(); } @Override public Class> getReducerClass() throws ClassNotFoundException { return writableContext.getReducerClass(); } @Override public Class> getOutputFormatClass() throws ClassNotFoundException { return writableContext.getOutputFormatClass(); } @Override public Class> getPartitionerClass() throws ClassNotFoundException { return writableContext.getPartitionerClass(); } @Override public RawComparator getSortComparator() { return writableContext.getSortComparator(); } @Override public String getJar() { return writableContext.getJar(); } @Override public RawComparator getCombinerKeyGroupingComparator() { return writableContext.getCombinerKeyGroupingComparator(); } @Override public RawComparator getGroupingComparator() { return writableContext.getGroupingComparator(); } @Override public boolean getJobSetupCleanupNeeded() { return writableContext.getJobSetupCleanupNeeded(); } @Override public boolean getTaskCleanupNeeded() { return writableContext.getTaskCleanupNeeded(); } @Override public boolean getProfileEnabled() { return writableContext.getProfileEnabled(); } @Override public String getProfileParams() { return writableContext.getProfileParams(); } @Override public IntegerRanges getProfileTaskRange(final boolean isMap) { return writableContext.getProfileTaskRange(isMap); } @Override public String getUser() { return writableContext.getUser(); } @Override public boolean getSymlink() { return writableContext.getSymlink(); } @Override public Path[] getArchiveClassPaths() { return writableContext.getArchiveClassPaths(); } @Override public URI[] getCacheArchives() throws IOException { return writableContext.getCacheArchives(); } @Override public URI[] getCacheFiles() throws IOException { return writableContext.getCacheFiles(); } @Override public Path[] getLocalCacheArchives() throws IOException { return writableContext.getLocalCacheArchives(); } @Override public Path[] getLocalCacheFiles() throws IOException { return writableContext.getLocalCacheFiles(); } @Override public Path[] getFileClassPaths() { return writableContext.getFileClassPaths(); } @Override public String[] getArchiveTimestamps() { return writableContext.getArchiveTimestamps(); } @Override public String[] getFileTimestamps() { return writableContext.getFileTimestamps(); } @Override public int getMaxMapAttempts() { return writableContext.getMaxMapAttempts(); } @Override public void progress() { writableContext.progress(); } @Override public String toString() { return writableContext.toString(); } @Override public int getMaxReduceAttempts() { return writableContext.getMaxReduceAttempts(); } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/URLClassloaderUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce; import java.lang.reflect.Field; import java.net.MalformedURLException; import java.net.URL; import java.net.URLStreamHandlerFactory; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Set; import org.locationtech.geowave.core.index.SPIServiceRegistry; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.util.ClasspathUtils; import org.locationtech.geowave.mapreduce.hdfs.HdfsUrlStreamHandlerFactory; import org.locationtech.geowave.mapreduce.s3.S3URLStreamHandlerFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class URLClassloaderUtils { private static final Logger LOGGER = LoggerFactory.getLogger(URLClassloaderUtils.class); private static final Object MUTEX = new Object(); private static Set initializedClassLoaders = new HashSet<>(); public static enum URLTYPE { S3, HDFS } private static boolean hasS3Handler = false; private static boolean hasHdfsHandler = false; public static void setURLStreamHandlerFactory(final URLTYPE urlType) throws NoSuchFieldException, SecurityException, IllegalArgumentException, IllegalAccessException { // One-time init for each type if ((urlType == URLTYPE.S3) && hasS3Handler) { return; } else if ((urlType == URLTYPE.HDFS) && hasHdfsHandler) { return; } final Field factoryField = URL.class.getDeclaredField("factory"); // HP Fortify "Access Control" false positive // The need to change the accessibility here is // necessary, has been review and judged to be safe factoryField.setAccessible(true); final URLStreamHandlerFactory urlStreamHandlerFactory = (URLStreamHandlerFactory) factoryField.get(null); if (urlStreamHandlerFactory == null) { if (urlType == URLTYPE.S3) { URL.setURLStreamHandlerFactory(new S3URLStreamHandlerFactory()); hasS3Handler = true; } else { // HDFS URL.setURLStreamHandlerFactory(new HdfsUrlStreamHandlerFactory()); hasHdfsHandler = true; } } else { final Field lockField = URL.class.getDeclaredField("streamHandlerLock"); // HP Fortify "Access Control" false positive // The need to change the accessibility here is // necessary, has been review and judged to be safe lockField.setAccessible(true); synchronized (lockField.get(null)) { factoryField.set(null, null); if (urlType == URLTYPE.S3) { URL.setURLStreamHandlerFactory(new S3URLStreamHandlerFactory(urlStreamHandlerFactory)); hasS3Handler = true; } else { // HDFS URL.setURLStreamHandlerFactory(new HdfsUrlStreamHandlerFactory(urlStreamHandlerFactory)); hasHdfsHandler = true; } } } } public static void initClassLoader() throws MalformedURLException { synchronized (MUTEX) { final ClassLoader myCl = URLClassloaderUtils.class.getClassLoader(); if (initializedClassLoaders.contains(myCl)) { return; } final ClassLoader classLoader = ClasspathUtils.transformClassLoader(myCl); if (classLoader != null) { SPIServiceRegistry.registerClassLoader(classLoader); } initializedClassLoaders.add(myCl); } } protected static boolean verifyProtocol(final String fileStr) { if (fileStr.contains("s3://")) { try { setURLStreamHandlerFactory(URLTYPE.S3); return true; } catch (NoSuchFieldException | SecurityException | IllegalArgumentException | IllegalAccessException e1) { LOGGER.error("Error in setting up S3URLStreamHandler Factory", e1); return false; } } else if (fileStr.contains("hdfs://")) { try { setURLStreamHandlerFactory(URLTYPE.HDFS); return true; } catch (NoSuchFieldException | SecurityException | IllegalArgumentException | IllegalAccessException e1) { LOGGER.error("Error in setting up HdfsUrlStreamHandler Factory", e1); return false; } } LOGGER.debug("Assuming good URLStreamHandler for " + fileStr); return true; } public static byte[] toBinary(final Persistable persistable) { try { initClassLoader(); } catch (final MalformedURLException e) { LOGGER.warn("Unable to initialize classloader in toBinary", e); } return PersistenceUtils.toBinary(persistable); } public static Persistable fromBinary(final byte[] bytes) { try { initClassLoader(); } catch (final MalformedURLException e) { LOGGER.warn("Unable to initialize classloader in fromBinary", e); } return PersistenceUtils.fromBinary(bytes); } public static byte[] toBinary(final Collection persistables) { try { initClassLoader(); } catch (final MalformedURLException e) { LOGGER.warn("Unable to initialize classloader in toBinary (list)", e); } return PersistenceUtils.toBinary(persistables); } public static byte[] toClassId(final Persistable persistable) { try { initClassLoader(); } catch (final MalformedURLException e) { LOGGER.warn("Unable to initialize classloader in toClassId", e); } return PersistenceUtils.toClassId(persistable); } public static Persistable fromClassId(final byte[] bytes) { try { initClassLoader(); } catch (final MalformedURLException e) { LOGGER.warn("Unable to initialize classloader in fromClassId", e); } return PersistenceUtils.fromClassId(bytes); } public static byte[] toClassId(final String className) { try { initClassLoader(); } catch (final MalformedURLException e) { LOGGER.warn("Unable to initialize classloader in toClassId(className)", e); } return PersistenceUtils.toClassId(className); } public static List fromBinaryAsList(final byte[] bytes) { try { initClassLoader(); } catch (final MalformedURLException e) { LOGGER.warn("Unable to initialize classloader in fromBinaryAsList", e); } return PersistenceUtils.fromBinaryAsList(bytes); } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/VFSClassLoaderTransformer.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce; import java.net.MalformedURLException; import java.net.URL; import java.net.URLClassLoader; import java.util.ArrayList; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.impl.VFSClassLoader; import org.locationtech.geowave.core.store.spi.ClassLoaderTransformerSpi; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class VFSClassLoaderTransformer implements ClassLoaderTransformerSpi { private static final Logger LOGGER = LoggerFactory.getLogger(VFSClassLoaderTransformer.class); @Override public ClassLoader transform(final ClassLoader classLoader) { if (classLoader instanceof VFSClassLoader) { final VFSClassLoader cl = (VFSClassLoader) classLoader; final FileObject[] fileObjs = cl.getFileObjects(); final ArrayList fileList = new ArrayList(); for (int i = 0; i < fileObjs.length; i++) { final String fileStr = fileObjs[i].toString(); if (URLClassloaderUtils.verifyProtocol(fileStr)) { try { fileList.add(new URL(fileStr)); } catch (final MalformedURLException e) { LOGGER.error("Unable to register classloader for '" + fileStr + "'", e); } } else { LOGGER.error("Failed to register class loader from: " + fileStr); } } final URL[] fileUrls = new URL[fileList.size()]; for (int i = 0; i < fileList.size(); i++) { fileUrls[i] = fileList.get(i); } return java.security.AccessController.doPrivileged( new java.security.PrivilegedAction() { @Override public URLClassLoader run() { final URLClassLoader ucl = new URLClassLoader(fileUrls, cl); return ucl; } }); } return null; } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/copy/StoreCopyJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.copy; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.cli.parser.CommandLineOperationParams; import org.locationtech.geowave.core.cli.parser.OperationParser; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase; import org.locationtech.geowave.mapreduce.JobContextAdapterIndexMappingStore; import org.locationtech.geowave.mapreduce.JobContextInternalAdapterStore; import org.locationtech.geowave.mapreduce.input.GeoWaveInputFormat; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.locationtech.geowave.mapreduce.operations.CopyCommand; import org.locationtech.geowave.mapreduce.operations.CopyCommandOptions; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputFormat; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class StoreCopyJobRunner extends Configured implements Tool { private static final Logger LOGGER = LoggerFactory.getLogger(StoreCopyJobRunner.class); private final DataStorePluginOptions inputStoreOptions; private final DataStorePluginOptions outputStoreOptions; private final CopyCommandOptions options; private final String jobName; public StoreCopyJobRunner( final DataStorePluginOptions inputStoreOptions, final DataStorePluginOptions outputStoreOptions, final CopyCommandOptions options, final String jobName) { this.inputStoreOptions = inputStoreOptions; this.outputStoreOptions = outputStoreOptions; this.options = options; this.jobName = jobName; } /** Main method to execute the MapReduce analytic. */ public int runJob() throws IOException, InterruptedException, ClassNotFoundException { Configuration conf = super.getConf(); if (conf == null) { conf = new Configuration(); setConf(conf); } GeoWaveConfiguratorBase.setRemoteInvocationParams( options.getHdfsHostPort(), options.getJobTrackerOrResourceManHostPort(), conf); final Job job = Job.getInstance(conf); job.setJarByClass(this.getClass()); job.setJobName(jobName); job.setMapperClass(StoreCopyMapper.class); job.setReducerClass(StoreCopyReducer.class); job.setInputFormatClass(GeoWaveInputFormat.class); job.setOutputFormatClass(GeoWaveOutputFormat.class); job.setMapOutputKeyClass(GeoWaveInputKey.class); job.setMapOutputValueClass(ObjectWritable.class); job.setOutputKeyClass(GeoWaveOutputKey.class); job.setOutputValueClass(Object.class); job.setNumReduceTasks(options.getNumReducers()); GeoWaveInputFormat.setMinimumSplitCount(job.getConfiguration(), options.getMinSplits()); GeoWaveInputFormat.setMaximumSplitCount(job.getConfiguration(), options.getMaxSplits()); GeoWaveInputFormat.setStoreOptions(job.getConfiguration(), inputStoreOptions); GeoWaveOutputFormat.setStoreOptions(job.getConfiguration(), outputStoreOptions); final AdapterIndexMappingStore adapterIndexMappingStore = inputStoreOptions.createAdapterIndexMappingStore(); final InternalDataAdapter[] adapters = inputStoreOptions.createAdapterStore().getAdapters(); for (final InternalDataAdapter dataAdapter : adapters) { LOGGER.debug("Adding adapter to output config: " + dataAdapter.getTypeName()); GeoWaveOutputFormat.addDataAdapter(job.getConfiguration(), dataAdapter); final AdapterToIndexMapping[] mappings = adapterIndexMappingStore.getIndicesForAdapter(dataAdapter.getAdapterId()); JobContextAdapterIndexMappingStore.addAdapterToIndexMapping(job.getConfiguration(), mappings); JobContextInternalAdapterStore.addTypeName( job.getConfiguration(), dataAdapter.getTypeName(), dataAdapter.getAdapterId()); } try (CloseableIterator indexIt = inputStoreOptions.createIndexStore().getIndices()) { while (indexIt.hasNext()) { final Index index = indexIt.next(); LOGGER.debug("Adding index to output config: " + (index.getName())); GeoWaveOutputFormat.addIndex(job.getConfiguration(), index); } } boolean retVal = false; try { retVal = job.waitForCompletion(true); } catch (final IOException ex) { LOGGER.error("Error waiting for store copy job: ", ex); } return retVal ? 0 : 1; } public static void main(final String[] args) throws Exception { final ConfigOptions opts = new ConfigOptions(); final OperationParser parser = new OperationParser(); parser.addAdditionalObject(opts); final CopyCommand command = new CopyCommand(); final CommandLineOperationParams params = parser.parse(command, args); opts.prepare(params); final int res = ToolRunner.run(new Configuration(), command.createRunner(params), args); System.exit(res); } @Override public int run(final String[] args) throws Exception { // parse args to find command line etc... return runJob(); } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/copy/StoreCopyMapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.copy; import java.io.IOException; import org.apache.hadoop.mapreduce.MapContext; import org.locationtech.geowave.mapreduce.GeoWaveWritableOutputMapper; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; /** Basically an identity mapper used for the copy job */ public class StoreCopyMapper extends GeoWaveWritableOutputMapper { @Override protected void mapNativeValue( final GeoWaveInputKey key, final Object value, final MapContext context) throws IOException, InterruptedException { context.write(key, value); } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/copy/StoreCopyReducer.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.copy; import java.io.IOException; import java.util.Arrays; import java.util.Iterator; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.mapreduce.Reducer; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.mapreduce.GeoWaveWritableInputReducer; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputFormat; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey; /** A basic implementation of copy as a reducer */ public class StoreCopyReducer extends GeoWaveWritableInputReducer { private AdapterIndexMappingStore store; private InternalAdapterStore internalAdapterStore; @Override protected void setup( final Reducer.Context context) throws IOException, InterruptedException { super.setup(context); store = GeoWaveOutputFormat.getJobContextAdapterIndexMappingStore(context); internalAdapterStore = GeoWaveOutputFormat.getJobContextInternalAdapterStore(context); } @Override protected void reduceNativeValues( final GeoWaveInputKey key, final Iterable values, final Reducer.Context context) throws IOException, InterruptedException { final Iterator objects = values.iterator(); while (objects.hasNext()) { final AdapterToIndexMapping[] mapping = store.getIndicesForAdapter(key.getInternalAdapterId()); final String[] indexNames = Arrays.stream(mapping).map(AdapterToIndexMapping::getIndexName).toArray(String[]::new); context.write( new GeoWaveOutputKey<>( internalAdapterStore.getTypeName(mapping[0].getAdapterId()), indexNames), objects.next()); } } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/dedupe/GeoWaveDedupeCombiner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.dedupe; import java.io.IOException; import java.util.Iterator; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.mapreduce.Reducer; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; /** * A basic implementation of deduplication as a combiner (using a combiner is a performance * optimization over doing all deduplication in a reducer) */ public class GeoWaveDedupeCombiner extends Reducer { @Override protected void reduce( final GeoWaveInputKey key, final Iterable values, final Reducer.Context context) throws IOException, InterruptedException { final Iterator it = values.iterator(); while (it.hasNext()) { final ObjectWritable next = it.next(); if (next != null) { context.write(key, next); return; } } } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/dedupe/GeoWaveDedupeJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.dedupe; import java.io.File; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.OutputFormat; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat; import org.apache.hadoop.util.ToolRunner; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.cli.parser.CommandLineOperationParams; import org.locationtech.geowave.core.cli.parser.OperationParser; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.cli.store.StoreLoader; import org.locationtech.geowave.mapreduce.AbstractGeoWaveJobRunner; import org.locationtech.geowave.mapreduce.input.GeoWaveInputFormat; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; /** * This class can run a basic job to query GeoWave, deduplicating results, and writing the final set * of key value pairs to a sequence file. It can be extended for more advanced capabilities or job * chaining. */ public class GeoWaveDedupeJobRunner extends AbstractGeoWaveJobRunner { public GeoWaveDedupeJobRunner(final DataStorePluginOptions dataStoreOptions) { super(dataStoreOptions); } @Override protected void configure(final Job job) throws Exception { job.setJobName("GeoWave Dedupe (" + dataStoreOptions.getGeoWaveNamespace() + ")"); job.setMapperClass(GeoWaveDedupeMapper.class); job.setCombinerClass(GeoWaveDedupeCombiner.class); job.setReducerClass(getReducer()); job.setMapOutputKeyClass(GeoWaveInputKey.class); job.setMapOutputValueClass(ObjectWritable.class); job.setOutputKeyClass(GeoWaveInputKey.class); job.setOutputValueClass(ObjectWritable.class); job.setInputFormatClass(GeoWaveInputFormat.class); job.setOutputFormatClass(getOutputFormatClass()); job.setNumReduceTasks(getNumReduceTasks()); job.setSpeculativeExecution(false); try (final FileSystem fs = FileSystem.get(job.getConfiguration())) { final Path outputPath = getHdfsOutputPath(); fs.delete(outputPath, true); FileOutputFormat.setOutputPath(job, outputPath); } } protected String getHdfsOutputBase() { return "/tmp"; } @SuppressWarnings("rawtypes") protected Class getReducer() { return GeoWaveDedupeReducer.class; } public Path getHdfsOutputPath() { return new Path(getHdfsOutputBase() + "/" + dataStoreOptions.getGeoWaveNamespace() + "_dedupe"); } protected Class getOutputFormatClass() { return SequenceFileOutputFormat.class; } protected int getNumReduceTasks() { return 8; } public static void main(final String[] args) throws Exception { final ConfigOptions opts = new ConfigOptions(); final MainParameterHolder holder = new MainParameterHolder(); final OperationParser parser = new OperationParser(); parser.addAdditionalObject(opts); parser.addAdditionalObject(holder); // Second round to get everything else. final CommandLineOperationParams params = parser.parse(args); // Set the datastore plugin if (holder.getMainParameter().size() == 0) { throw new ParameterException("Must specify datastore name as first argument."); } // Load the params for config file. opts.prepare(params); final StoreLoader loader = new StoreLoader(holder.getMainParameter().get(0)); loader.loadFromConfig( (File) params.getContext().get(ConfigOptions.PROPERTIES_FILE_CONTEXT), params.getConsole()); final int res = ToolRunner.run( new Configuration(), new GeoWaveDedupeJobRunner(loader.getDataStorePlugin()), args); System.exit(res); } public static class MainParameterHolder { @Parameter private final List mainParameter = new ArrayList<>(); public List getMainParameter() { return mainParameter; } } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/dedupe/GeoWaveDedupeMapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.dedupe; import java.io.IOException; import org.apache.hadoop.mapreduce.MapContext; import org.locationtech.geowave.mapreduce.GeoWaveWritableOutputMapper; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; /** Basically an identity mapper used for the deduplication job */ public class GeoWaveDedupeMapper extends GeoWaveWritableOutputMapper { @Override protected void mapNativeValue( final GeoWaveInputKey key, final Object value, final MapContext context) throws IOException, InterruptedException { context.write(key, value); } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/dedupe/GeoWaveDedupeReducer.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.dedupe; import java.io.IOException; import java.util.Iterator; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.mapreduce.Reducer; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; /** A basic implementation of deduplication as a reducer */ public class GeoWaveDedupeReducer extends Reducer { @Override protected void reduce( final GeoWaveInputKey key, final Iterable values, final Reducer.Context context) throws IOException, InterruptedException { final Iterator objects = values.iterator(); if (objects.hasNext()) { context.write(key, objects.next()); } } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/hdfs/HdfsUrlStreamHandlerFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.hdfs; import java.net.URLStreamHandler; import java.net.URLStreamHandlerFactory; import java.util.Optional; import org.apache.hadoop.fs.FsUrlStreamHandlerFactory; public class HdfsUrlStreamHandlerFactory extends FsUrlStreamHandlerFactory { // The wrapped URLStreamHandlerFactory's instance private final Optional delegate; /** Used in case there is no existing URLStreamHandlerFactory defined */ public HdfsUrlStreamHandlerFactory() { this(null); } /** Used in case there is an existing URLStreamHandlerFactory defined */ public HdfsUrlStreamHandlerFactory(final URLStreamHandlerFactory delegate) { this.delegate = Optional.ofNullable(delegate); } @Override public URLStreamHandler createURLStreamHandler(final String protocol) { // FsUrlStreamHandlerFactory impl final URLStreamHandler urlStreamHandler = super.createURLStreamHandler(protocol); // See if hadoop handled it if (urlStreamHandler != null) { return urlStreamHandler; } // It is not the hdfs protocol so we delegate it to the wrapped URLStreamHandlerFactory return delegate.map(factory -> factory.createURLStreamHandler(protocol)).orElse(null); } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/input/AsyncInputFormatIteratorWrapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.input; import java.util.Iterator; import org.apache.commons.lang3.tuple.Pair; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.TransientAdapterStore; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.base.dataidx.BatchDataIndexRetrieval; import org.locationtech.geowave.core.store.base.dataidx.BatchDataIndexRetrievalIteratorHelper; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.query.filter.QueryFilter; public class AsyncInputFormatIteratorWrapper extends InputFormatIteratorWrapper { private final BatchDataIndexRetrievalIteratorHelper> batchHelper; public AsyncInputFormatIteratorWrapper( final Iterator reader, final QueryFilter[] queryFilters, final TransientAdapterStore adapterStore, final InternalAdapterStore internalAdapterStore, final AdapterIndexMappingStore mappingStore, final Index index, final boolean isOutputWritable, final BatchDataIndexRetrieval dataIndexRetrieval) { super( reader, queryFilters, adapterStore, internalAdapterStore, mappingStore, index, isOutputWritable, dataIndexRetrieval); batchHelper = new BatchDataIndexRetrievalIteratorHelper<>(dataIndexRetrieval); } @Override protected void findNext() { super.findNext(); final boolean hasNextValue = (nextEntry != null); final Pair batchNextValue = batchHelper.postFindNext(hasNextValue, reader.hasNext()); if (!hasNextValue) { nextEntry = batchNextValue; } } @Override public boolean hasNext() { batchHelper.preHasNext(); return super.hasNext(); } @Override protected Pair decodeRowToEntry( final GeoWaveRow row, final QueryFilter[] clientFilters, final InternalDataAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index) { Object value = decodeRowToValue(row, clientFilters, adapter, indexMapping, index); if (value == null) { return null; } value = batchHelper.postDecodeRow((T) value, v -> valueToEntry(row, v)); if (value == null) { return null; } return valueToEntry(row, value); } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/input/GeoWaveInputConfigurator.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.input; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.JobContext; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.geowave.core.store.query.options.CommonQueryOptions; import org.locationtech.geowave.core.store.query.options.DataTypeQueryOptions; import org.locationtech.geowave.core.store.query.options.IndexQueryOptions; import org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase; /** * This class provides utility methods for accessing job context configuration parameters that are * specific to the GeoWaveInputFormat. */ public class GeoWaveInputConfigurator extends GeoWaveConfiguratorBase { protected static enum InputConfig { QUERY_CONSTRAINTS, INDEX_QUERY_OPTIONS, DATA_TYPE_QUERY_OPTIONS, COMMON_QUERY_OPTIONS, MIN_SPLITS, MAX_SPLITS, AUTHORIZATION, OUTPUT_WRITABLE // used to inform the input format to output a Writable from the HadoopDataAdapter } private static QueryConstraints getQueryConstraintsInternal( final Class implementingClass, final Configuration configuration) { final String queryStr = configuration.get(enumToConfKey(implementingClass, InputConfig.QUERY_CONSTRAINTS), ""); if ((queryStr != null) && !queryStr.isEmpty()) { final byte[] queryBytes = ByteArrayUtils.byteArrayFromString(queryStr); return (QueryConstraints) PersistenceUtils.fromBinary(queryBytes); } return null; } private static IndexQueryOptions getIndexQueryOptionsInternal( final Class implementingClass, final Configuration configuration) { final String queryStr = configuration.get(enumToConfKey(implementingClass, InputConfig.INDEX_QUERY_OPTIONS), ""); if ((queryStr != null) && !queryStr.isEmpty()) { final byte[] queryBytes = ByteArrayUtils.byteArrayFromString(queryStr); return (IndexQueryOptions) PersistenceUtils.fromBinary(queryBytes); } return null; } private static DataTypeQueryOptions getDataTypeQueryOptionsInternal( final Class implementingClass, final Configuration configuration) { final String queryStr = configuration.get( enumToConfKey(implementingClass, InputConfig.DATA_TYPE_QUERY_OPTIONS), ""); if ((queryStr != null) && !queryStr.isEmpty()) { final byte[] queryBytes = ByteArrayUtils.byteArrayFromString(queryStr); return (DataTypeQueryOptions) PersistenceUtils.fromBinary(queryBytes); } return null; } private static CommonQueryOptions getCommonQueryOptionsInternal( final Class implementingClass, final Configuration configuration) { final String queryStr = configuration.get(enumToConfKey(implementingClass, InputConfig.COMMON_QUERY_OPTIONS), ""); if ((queryStr != null) && !queryStr.isEmpty()) { final byte[] queryBytes = ByteArrayUtils.byteArrayFromString(queryStr); return (CommonQueryOptions) PersistenceUtils.fromBinary(queryBytes); } return null; } private static Integer getMinimumSplitCountInternal( final Class implementingClass, final Configuration configuration) { return getIntegerConfigInternal(implementingClass, configuration, InputConfig.MIN_SPLITS); } private static Integer getMaximumSplitCountInternal( final Class implementingClass, final Configuration configuration) { return getIntegerConfigInternal(implementingClass, configuration, InputConfig.MAX_SPLITS); } private static Integer getIntegerConfigInternal( final Class implementingClass, final Configuration configuration, final InputConfig inputConfig) { final String str = configuration.get(enumToConfKey(implementingClass, inputConfig), ""); if ((str != null) && !str.isEmpty()) { final Integer retVal = Integer.parseInt(str); return retVal; } return null; } public static Index getIndex(final Class implementingClass, final Configuration config) { final String input = config.get(enumToConfKey(implementingClass, GeoWaveConfg.INDEX)); if (input != null) { final byte[] indexBytes = ByteArrayUtils.byteArrayFromString(input); return (Index) PersistenceUtils.fromBinary(indexBytes); } return null; } public static QueryConstraints getQueryConstraints( final Class implementingClass, final JobContext context) { return getQueryConstraintsInternal(implementingClass, getConfiguration(context)); } public static void setQueryConstraints( final Class implementingClass, final Configuration config, final QueryConstraints query) { if (query != null) { config.set( enumToConfKey(implementingClass, InputConfig.QUERY_CONSTRAINTS), ByteArrayUtils.byteArrayToString(PersistenceUtils.toBinary(query))); } else { config.unset(enumToConfKey(implementingClass, InputConfig.QUERY_CONSTRAINTS)); } } public static IndexQueryOptions getIndexQueryOptions( final Class implementingClass, final JobContext context) { return getIndexQueryOptionsInternal(implementingClass, getConfiguration(context)); } public static void setIndexQueryOptions( final Class implementingClass, final Configuration config, final IndexQueryOptions queryOptions) { if (queryOptions != null) { config.set( enumToConfKey(implementingClass, InputConfig.INDEX_QUERY_OPTIONS), ByteArrayUtils.byteArrayToString(PersistenceUtils.toBinary(queryOptions))); } else { config.unset(enumToConfKey(implementingClass, InputConfig.INDEX_QUERY_OPTIONS)); } } public static DataTypeQueryOptions getDataTypeQueryOptions( final Class implementingClass, final JobContext context) { return getDataTypeQueryOptionsInternal(implementingClass, getConfiguration(context)); } public static void setDataTypeQueryOptions( final Class implementingClass, final Configuration config, final DataTypeQueryOptions queryOptions) { if (queryOptions != null) { config.set( enumToConfKey(implementingClass, InputConfig.DATA_TYPE_QUERY_OPTIONS), ByteArrayUtils.byteArrayToString(PersistenceUtils.toBinary(queryOptions))); } else { config.unset(enumToConfKey(implementingClass, InputConfig.DATA_TYPE_QUERY_OPTIONS)); } } public static CommonQueryOptions getCommonQueryOptions( final Class implementingClass, final JobContext context) { return getCommonQueryOptionsInternal(implementingClass, getConfiguration(context)); } public static void setCommonQueryOptions( final Class implementingClass, final Configuration config, final CommonQueryOptions queryOptions) { if (queryOptions != null) { config.set( enumToConfKey(implementingClass, InputConfig.COMMON_QUERY_OPTIONS), ByteArrayUtils.byteArrayToString(PersistenceUtils.toBinary(queryOptions))); } else { config.unset(enumToConfKey(implementingClass, InputConfig.COMMON_QUERY_OPTIONS)); } } public static Integer getMinimumSplitCount( final Class implementingClass, final JobContext context) { return getMinimumSplitCountInternal(implementingClass, getConfiguration(context)); } public static void setMinimumSplitCount( final Class implementingClass, final Configuration config, final Integer minSplits) { if (minSplits != null) { config.set(enumToConfKey(implementingClass, InputConfig.MIN_SPLITS), minSplits.toString()); } else { config.unset(enumToConfKey(implementingClass, InputConfig.MIN_SPLITS)); } } public static Integer getMaximumSplitCount( final Class implementingClass, final JobContext context) { return getMaximumSplitCountInternal(implementingClass, getConfiguration(context)); } public static void setMaximumSplitCount( final Class implementingClass, final Configuration config, final Integer maxSplits) { if (maxSplits != null) { config.set(enumToConfKey(implementingClass, InputConfig.MAX_SPLITS), maxSplits.toString()); } else { config.unset(enumToConfKey(implementingClass, InputConfig.MAX_SPLITS)); } } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/input/GeoWaveInputFormat.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.input; import java.io.IOException; import java.util.List; import java.util.Map; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.locationtech.geowave.core.store.GeoWaveStoreFinder; import org.locationtech.geowave.core.store.StoreFactoryFamilySpi; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.adapter.TransientAdapterStore; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.Query; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.geowave.core.store.query.options.CommonQueryOptions; import org.locationtech.geowave.core.store.query.options.DataTypeQueryOptions; import org.locationtech.geowave.core.store.query.options.IndexQueryOptions; import org.locationtech.geowave.core.store.query.options.QueryAllIndices; import org.locationtech.geowave.core.store.query.options.QueryAllTypes; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase; import org.locationtech.geowave.mapreduce.JobContextAdapterStore; import org.locationtech.geowave.mapreduce.JobContextIndexStore; import org.locationtech.geowave.mapreduce.MapReduceDataStore; import org.locationtech.geowave.mapreduce.input.GeoWaveInputConfigurator.InputConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class GeoWaveInputFormat extends InputFormat { private static final Class CLASS = GeoWaveInputFormat.class; protected static final Logger LOGGER = LoggerFactory.getLogger(CLASS); public static void setStoreOptionsMap( final Configuration config, final Map storeConfigOptions) { GeoWaveConfiguratorBase.setStoreOptionsMap(CLASS, config, storeConfigOptions); } public static void setStoreOptions( final Configuration config, final DataStorePluginOptions storeOptions) { if (storeOptions != null) { GeoWaveConfiguratorBase.setStoreOptionsMap(CLASS, config, storeOptions.getOptionsAsMap()); } else { GeoWaveConfiguratorBase.setStoreOptionsMap(CLASS, config, null); } } public static IndexStore getJobContextIndexStore(final JobContext context) { return GeoWaveConfiguratorBase.getJobContextIndexStore(CLASS, context); } public static AdapterIndexMappingStore getJobContextAdapterIndexMappingStore( final JobContext context) { return GeoWaveConfiguratorBase.getJobContextAdapterIndexMappingStore(CLASS, context); } public static TransientAdapterStore getJobContextAdapterStore(final JobContext context) { return GeoWaveConfiguratorBase.getJobContextAdapterStore(CLASS, context); } public static DataStatisticsStore getJobContextDataStatisticsStore(final JobContext context) { // TODO, this doesn't create a data statistics store wrapping a // jobcontext as the name implies, need to either wrap a job context or // rename this (for adapter and index store, adapters and indices are // stored in the job context rather than multiple processes needing to // look it up, this doesn't seem to be happening for stats) return GeoWaveConfiguratorBase.getDataStatisticsStore(CLASS, context); } public static InternalAdapterStore getJobContextInternalAdapterStore(final JobContext context) { return GeoWaveConfiguratorBase.getJobContextInternalAdapterStore(CLASS, context); } public static void setMinimumSplitCount(final Configuration config, final Integer minSplits) { GeoWaveInputConfigurator.setMinimumSplitCount(CLASS, config, minSplits); } public static void setMaximumSplitCount(final Configuration config, final Integer maxSplits) { GeoWaveInputConfigurator.setMaximumSplitCount(CLASS, config, maxSplits); } public static void setIsOutputWritable( final Configuration config, final Boolean isOutputWritable) { config.setBoolean( GeoWaveConfiguratorBase.enumToConfKey(CLASS, InputConfig.OUTPUT_WRITABLE), isOutputWritable); } public static void setQuery( final Configuration config, final Query query, final PersistentAdapterStore adapterStore, final InternalAdapterStore internalAdapterStore, final IndexStore indexStore) { setCommonQueryOptions(config, query.getCommonQueryOptions()); setDataTypeQueryOptions( config, query.getDataTypeQueryOptions(), adapterStore, internalAdapterStore); setIndexQueryOptions(config, query.getIndexQueryOptions(), indexStore); setQueryConstraints(config, query.getQueryConstraints()); } public static void setQueryConstraints(final Configuration config, final QueryConstraints query) { GeoWaveInputConfigurator.setQueryConstraints(CLASS, config, query); } protected static QueryConstraints getQueryConstraints(final JobContext context) { return GeoWaveInputConfigurator.getQueryConstraints(CLASS, context); } public static void setIndexQueryOptions( final Configuration config, final IndexQueryOptions queryOptions, final IndexStore indexStore) { final String indexName = queryOptions.getIndexName(); if (indexName != null) { // make available to the context index store JobContextIndexStore.addIndex(config, indexStore.getIndex(indexName)); } GeoWaveInputConfigurator.setIndexQueryOptions(CLASS, config, queryOptions); } protected static IndexQueryOptions getIndexQueryOptions(final JobContext context) { final IndexQueryOptions options = GeoWaveInputConfigurator.getIndexQueryOptions(CLASS, context); return options == null ? new QueryAllIndices() : options; } protected static DataTypeQueryOptions getDataTypeQueryOptions(final JobContext context) { final DataTypeQueryOptions options = GeoWaveInputConfigurator.getDataTypeQueryOptions(CLASS, context); return options == null ? new QueryAllTypes<>() : options; } public static void setDataTypeQueryOptions( final Configuration config, final DataTypeQueryOptions queryOptions, final PersistentAdapterStore adapterStore, final InternalAdapterStore internalAdapterStore) { // TODO figure out where to add internal adapter IDs to the job context // and read it from the job context instead try { // THIS SHOULD GO AWAY, and assume the adapters in the Persistent // Data Store // instead. It will fail, due to the 'null', if the query options // does not // contain the adapters final String[] typeNames = queryOptions.getTypeNames(); if ((typeNames != null) && (typeNames.length > 0)) { for (final String typeName : typeNames) { // Also store for use the mapper and reducers final Short adapterId = internalAdapterStore.getAdapterId(typeName); if (adapterId == null) { LOGGER.error("Cannot find type '" + typeName + "'"); continue; } JobContextAdapterStore.addDataAdapter(config, adapterStore.getAdapter(adapterId)); } } } catch (final Exception e) { LOGGER.warn( "Adapter Ids witih adapters are included in the query options.This, the adapter must be accessible from the data store for use by the consumer/Mapper.", e); } GeoWaveInputConfigurator.setDataTypeQueryOptions(CLASS, config, queryOptions); } protected static CommonQueryOptions getCommonQueryOptions(final JobContext context) { final CommonQueryOptions options = GeoWaveInputConfigurator.getCommonQueryOptions(CLASS, context); return options == null ? new CommonQueryOptions() : options; } public static void setCommonQueryOptions( final Configuration config, final CommonQueryOptions queryOptions) { GeoWaveInputConfigurator.setCommonQueryOptions(CLASS, config, queryOptions); } protected static Index getIndex(final JobContext context) { return GeoWaveInputConfigurator.getIndex( CLASS, GeoWaveConfiguratorBase.getConfiguration(context)); } protected static Boolean isOutputWritable(final JobContext context) { return GeoWaveConfiguratorBase.getConfiguration(context).getBoolean( GeoWaveConfiguratorBase.enumToConfKey(CLASS, InputConfig.OUTPUT_WRITABLE), false); } protected static Integer getMinimumSplitCount(final JobContext context) { return GeoWaveInputConfigurator.getMinimumSplitCount(CLASS, context); } protected static Integer getMaximumSplitCount(final JobContext context) { return GeoWaveInputConfigurator.getMaximumSplitCount(CLASS, context); } @Override public RecordReader createRecordReader( final InputSplit split, final TaskAttemptContext context) throws IOException, InterruptedException { final Map configOptions = getStoreOptionsMap(context); final DataStore dataStore = GeoWaveStoreFinder.createDataStore(configOptions); if ((dataStore != null) && (dataStore instanceof MapReduceDataStore)) { return (RecordReader) ((MapReduceDataStore) dataStore).createRecordReader( getCommonQueryOptions(context), getDataTypeQueryOptions(context), getIndexQueryOptions(context), getQueryConstraints(context), getJobContextAdapterStore(context), getJobContextInternalAdapterStore(context), getJobContextAdapterIndexMappingStore(context), getJobContextDataStatisticsStore(context), getJobContextIndexStore(context), isOutputWritable(context).booleanValue(), split); } LOGGER.error("Data Store does not support map reduce"); throw new IOException("Data Store does not support map reduce"); } /** * Check whether a configuration is fully configured to be used with an Accumulo * {@link org.apache.hadoop.mapreduce.InputFormat}. * * @param context the Hadoop context for the configured job * @throws IOException if the context is improperly configured * @since 1.5.0 */ protected static void validateOptions(final JobContext context) throws IOException { // attempt to // get each // of the // GeoWave // stores // from the job context try { final Map configOptions = getStoreOptionsMap(context); final StoreFactoryFamilySpi factoryFamily = GeoWaveStoreFinder.findStoreFamily(configOptions); if (factoryFamily == null) { final String msg = "Unable to find GeoWave data store"; LOGGER.warn(msg); throw new IOException(msg); } } catch (final Exception e) { LOGGER.warn("Error finding GeoWave stores", e); throw new IOException("Error finding GeoWave stores", e); } } public static DataStorePluginOptions getStoreOptions(final JobContext context) { return GeoWaveConfiguratorBase.getStoreOptions(CLASS, context); } public static Map getStoreOptionsMap(final JobContext context) { return GeoWaveConfiguratorBase.getStoreOptionsMap(CLASS, context); } @Override public List getSplits(final JobContext context) throws IOException, InterruptedException { final Map configOptions = getStoreOptionsMap(context); final DataStore dataStore = GeoWaveStoreFinder.createDataStore(configOptions); if ((dataStore != null) && (dataStore instanceof MapReduceDataStore)) { return ((MapReduceDataStore) dataStore).getSplits( getCommonQueryOptions(context), getDataTypeQueryOptions(context), getIndexQueryOptions(context), getQueryConstraints(context), getJobContextAdapterStore(context), getJobContextAdapterIndexMappingStore(context), getJobContextDataStatisticsStore(context), getJobContextInternalAdapterStore(context), getJobContextIndexStore(context), context, getMinimumSplitCount(context), getMaximumSplitCount(context)); } LOGGER.error("Data Store does not support map reduce"); throw new IOException("Data Store does not support map reduce"); } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/input/GeoWaveInputKey.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.input; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.Arrays; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.io.WritableComparator; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.api.Index; import com.google.common.primitives.Bytes; /** * This class encapsulates the unique identifier for GeoWave input data using a map-reduce GeoWave * input format. The combination of the the adapter ID and the data ID should be unique. */ public class GeoWaveInputKey implements WritableComparable, java.io.Serializable { /** */ private static final long serialVersionUID = 1L; protected Short internalAdapterId; private ByteArray dataId; private transient org.locationtech.geowave.core.store.entities.GeoWaveKey key; public GeoWaveInputKey() {} public GeoWaveInputKey( final org.locationtech.geowave.core.store.entities.GeoWaveKey key, final String indexName) { this(key.getAdapterId(), key, indexName); } public GeoWaveInputKey(final short internalAdapterId, final ByteArray dataId) { this.internalAdapterId = internalAdapterId; this.dataId = dataId; } public GeoWaveInputKey( final short internalAdapterId, final org.locationtech.geowave.core.store.entities.GeoWaveKey key, final String indexName) { this.internalAdapterId = internalAdapterId; if (key.getNumberOfDuplicates() > 0) { dataId = new ByteArray(key.getDataId()); } else { // if deduplication should be disabled, prefix the actual data // ID with the index ID concatenated with the insertion // ID to gaurantee uniqueness and effectively disable // aggregating by only the data ID dataId = new ByteArray( Bytes.concat( indexName == null ? new byte[0] : StringUtils.stringToBinary(indexName), key.getPartitionKey() == null ? new byte[0] : key.getPartitionKey(), key.getSortKey() == null ? new byte[0] : key.getSortKey(), key.getDataId())); } this.key = key; } public Pair getPartitionAndSortKey(final Index index) { final int partitionKeyLength = index.getIndexStrategy().getPartitionKeyLength(); final int indexIdLength = StringUtils.stringToBinary(index.getName()).length; if (dataId.getBytes().length < (indexIdLength + partitionKeyLength)) { return null; } else { final byte[] partitionKey = Arrays.copyOfRange(dataId.getBytes(), indexIdLength, indexIdLength + partitionKeyLength); final byte[] sortKey = Arrays.copyOfRange( dataId.getBytes(), indexIdLength + partitionKeyLength, dataId.getBytes().length); return ImmutablePair.of(partitionKey, sortKey); } } public org.locationtech.geowave.core.store.entities.GeoWaveKey getGeoWaveKey() { return key; } public void setGeoWaveKey(final org.locationtech.geowave.core.store.entities.GeoWaveKey key) { this.key = key; } public short getInternalAdapterId() { return internalAdapterId; } public void setInternalAdapterId(final short internalAdapterId) { this.internalAdapterId = internalAdapterId; } public void setDataId(final ByteArray dataId) { this.dataId = dataId; } public ByteArray getDataId() { return dataId; } @Override public int compareTo(final GeoWaveInputKey o) { final byte[] internalAdapterIdBytes = ByteArrayUtils.shortToByteArray(internalAdapterId); final int adapterCompare = WritableComparator.compareBytes( internalAdapterIdBytes, 0, internalAdapterIdBytes.length, ByteArrayUtils.shortToByteArray(o.internalAdapterId), 0, ByteArrayUtils.shortToByteArray(o.internalAdapterId).length); if (adapterCompare != 0) { return adapterCompare; } final GeoWaveInputKey other = o; return WritableComparator.compareBytes( dataId.getBytes(), 0, dataId.getBytes().length, other.dataId.getBytes(), 0, other.dataId.getBytes().length); } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((dataId == null) ? 0 : dataId.hashCode()); result = (prime * result) + ((internalAdapterId == null) ? 0 : internalAdapterId.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final GeoWaveInputKey other = (GeoWaveInputKey) obj; if (dataId == null) { if (other.dataId != null) { return false; } } else if (!dataId.equals(other.dataId)) { return false; } if (internalAdapterId == null) { if (other.internalAdapterId != null) { return false; } } else if (!internalAdapterId.equals(other.internalAdapterId)) { return false; } return true; } @Override public void readFields(final DataInput input) throws IOException { internalAdapterId = input.readShort(); final int dataIdLength = input.readInt(); final byte[] dataIdBytes = new byte[dataIdLength]; input.readFully(dataIdBytes); dataId = new ByteArray(dataIdBytes); } @Override public void write(final DataOutput output) throws IOException { output.writeShort(internalAdapterId); output.writeInt(dataId.getBytes().length); output.write(dataId.getBytes()); } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/input/InputFormatIteratorWrapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.input; import java.util.Iterator; import java.util.Map; import java.util.NoSuchElementException; import org.apache.commons.lang3.tuple.Pair; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.TransientAdapterStore; import org.locationtech.geowave.core.store.adapter.exceptions.AdapterException; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.base.BaseDataStoreUtils; import org.locationtech.geowave.core.store.base.dataidx.DataIndexRetrieval; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.query.filter.QueryFilter; import org.locationtech.geowave.mapreduce.HadoopWritableSerializationTool; import com.beust.jcommander.internal.Maps; /** * This is used internally to translate GeoWave rows into native objects (using the appropriate data * adapter). It also performs any client-side filtering. It will peek at the next entry in the * underlying datastore iterator to always maintain a reference to the next value. * * @param The type for the entry */ public class InputFormatIteratorWrapper implements Iterator> { protected final Iterator reader; private final QueryFilter[] queryFilters; private final HadoopWritableSerializationTool serializationTool; private final boolean isOutputWritable; protected Pair nextEntry; private final Index index; private final DataIndexRetrieval dataIndexRetrieval; private final AdapterIndexMappingStore mappingStore; private final Map indexMappings; public InputFormatIteratorWrapper( final Iterator reader, final QueryFilter[] queryFilters, final TransientAdapterStore adapterStore, final InternalAdapterStore internalAdapterStore, final AdapterIndexMappingStore mappingStore, final Index index, final boolean isOutputWritable, final DataIndexRetrieval dataIndexRetrieval) { this.reader = reader; this.queryFilters = queryFilters; this.index = index; this.serializationTool = new HadoopWritableSerializationTool(adapterStore, internalAdapterStore); this.isOutputWritable = isOutputWritable; this.dataIndexRetrieval = dataIndexRetrieval; this.mappingStore = mappingStore; this.indexMappings = Maps.newHashMap(); } protected void findNext() { while ((this.nextEntry == null) && reader.hasNext()) { final GeoWaveRow nextRow = reader.next(); if (nextRow != null) { if (!indexMappings.containsKey(nextRow.getAdapterId())) { indexMappings.put( nextRow.getAdapterId(), mappingStore.getMapping(nextRow.getAdapterId(), index.getName())); } final Pair decodedValue = decodeRowToEntry( nextRow, queryFilters, (InternalDataAdapter) serializationTool.getInternalAdapter( nextRow.getAdapterId()), indexMappings.get(nextRow.getAdapterId()), index); if (decodedValue != null) { nextEntry = decodedValue; return; } } } } @SuppressWarnings("unchecked") protected Object decodeRowToValue( final GeoWaveRow row, final QueryFilter[] clientFilters, final InternalDataAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index) { Object value = null; try { value = BaseDataStoreUtils.decodeRow( row, clientFilters, adapter, indexMapping, null, null, index, null, null, true, dataIndexRetrieval); } catch (final AdapterException e) { return null; } if (value == null) { return null; } return value; } @SuppressWarnings("unchecked") protected Pair decodeRowToEntry( final GeoWaveRow row, final QueryFilter[] clientFilters, final InternalDataAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index) { final Object value = decodeRowToValue(row, clientFilters, adapter, indexMapping, index); if (value == null) { return null; } return valueToEntry(row, value); } protected Pair valueToEntry(final GeoWaveRow row, final Object value) { final short adapterId = row.getAdapterId(); final T result = (T) (isOutputWritable ? serializationTool.getHadoopWritableSerializerForAdapter(adapterId).toWritable(value) : value); final GeoWaveInputKey key = new GeoWaveInputKey(row, index.getName()); return Pair.of(key, result); } @Override public boolean hasNext() { findNext(); return nextEntry != null; } @Override public Pair next() throws NoSuchElementException { final Pair previousNext = nextEntry; if (nextEntry == null) { throw new NoSuchElementException(); } nextEntry = null; return previousNext; } @Override public void remove() { reader.remove(); } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/operations/ConfigHDFSCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.operations; import java.util.ArrayList; import java.util.List; import java.util.Properties; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.cli.operations.config.ConfigSection; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "hdfs", parentOperation = ConfigSection.class) @Parameters(commandDescription = "Create a local configuration for HDFS") public class ConfigHDFSCommand extends ServiceEnabledCommand { /** Return "200 OK" for the config HDFS command. */ @Override public Boolean successStatusIs200() { return true; } private static final String HDFS_DEFAULTFS_PREFIX = "hdfs.defaultFS"; private static final String HDFS_DEFAULTFS_URL = HDFS_DEFAULTFS_PREFIX + ".url"; @Parameter(description = "") private List parameters = new ArrayList<>(); private String url = null; @Override public boolean prepare(final OperationParams params) { boolean retval = true; retval |= super.prepare(params); return retval; } @Override public void execute(final OperationParams params) throws Exception { computeResults(params); } public static String getHdfsUrl(final Properties configProperties) { String hdfsFSUrl = configProperties.getProperty(ConfigHDFSCommand.HDFS_DEFAULTFS_URL); if (hdfsFSUrl == null) { throw new ParameterException( "HDFS DefaultFS URL is empty. Config using \"geowave config hdfs \""); } if (!hdfsFSUrl.contains("://")) { hdfsFSUrl = "hdfs://" + hdfsFSUrl; } return hdfsFSUrl; } public void setHdfsUrlParameter(final String hdfsFsUrl) { parameters = new ArrayList<>(); parameters.add(hdfsFsUrl); } @Override public Void computeResults(final OperationParams params) throws Exception { if (parameters.size() != 1) { throw new ParameterException( "Requires argument: (HDFS hostname:port or namenode HA nameservice, eg: sandbox.mydomain.com:8020 )"); } url = parameters.get(0); final Properties existingProps = getGeoWaveConfigProperties(params); // all switches are optional if (url != null) { existingProps.setProperty(HDFS_DEFAULTFS_URL, url); } // Write properties file ConfigOptions.writeProperties( getGeoWaveConfigFile(params), existingProps, this.getClass(), HDFS_DEFAULTFS_PREFIX, params.getConsole()); return null; } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/operations/CopyCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.operations; import java.io.File; import java.util.ArrayList; import java.util.List; import java.util.Properties; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.Command; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.cli.store.StoreSection; import org.locationtech.geowave.mapreduce.copy.StoreCopyJobRunner; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import com.beust.jcommander.ParametersDelegate; @GeowaveOperation(name = "copymr", parentOperation = StoreSection.class) @Parameters( commandDescription = "Copy all data from one data store to another existing data store using MapReduce") public class CopyCommand extends DefaultOperation implements Command { @Parameter(description = " ") private List parameters = new ArrayList<>(); @ParametersDelegate private CopyCommandOptions options = new CopyCommandOptions(); private DataStorePluginOptions inputStoreOptions = null; private DataStorePluginOptions outputStoreOptions = null; @Override public void execute(final OperationParams params) throws Exception { createRunner(params).runJob(); } public StoreCopyJobRunner createRunner(final OperationParams params) { // Ensure we have all the required arguments if (parameters.size() != 2) { throw new ParameterException("Requires arguments: "); } final String inputStoreName = parameters.get(0); final String outputStoreName = parameters.get(1); // Config file final File configFile = getGeoWaveConfigFile(params); if (options.getHdfsHostPort() == null) { final Properties configProperties = ConfigOptions.loadProperties(configFile); final String hdfsFSUrl = ConfigHDFSCommand.getHdfsUrl(configProperties); options.setHdfsHostPort(hdfsFSUrl); } // Attempt to load input store. inputStoreOptions = CLIUtils.loadStore(inputStoreName, configFile, params.getConsole()); // Attempt to load output store. outputStoreOptions = CLIUtils.loadStore(outputStoreName, configFile, params.getConsole()); final String jobName = "Copy " + inputStoreName + " to " + outputStoreName; final StoreCopyJobRunner runner = new StoreCopyJobRunner(inputStoreOptions, outputStoreOptions, options, jobName); return runner; } public List getParameters() { return parameters; } public void setParameters(final String inputStore, final String outputStore) { parameters = new ArrayList<>(); parameters.add(inputStore); parameters.add(outputStore); } public DataStorePluginOptions getInputStoreOptions() { return inputStoreOptions; } public DataStorePluginOptions getOutputStoreOptions() { return outputStoreOptions; } public CopyCommandOptions getOptions() { return options; } public void setOptions(final CopyCommandOptions options) { this.options = options; } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/operations/CopyCommandOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.operations; import com.beust.jcommander.Parameter; public class CopyCommandOptions { @Parameter( names = "--hdfsHostPort", description = "The hdfs host port", converter = HdfsHostPortConverter.class) private String hdfsHostPort; @Parameter( names = "--jobSubmissionHostPort", required = true, description = "The job submission tracker") private String jobTrackerOrResourceManHostPort; @Parameter(names = "--minSplits", description = "The min partitions for the input data") private Integer minSplits; @Parameter(names = "--maxSplits", description = "The max partitions for the input data") private Integer maxSplits; @Parameter( names = "--numReducers", description = "Number of threads writing at a time (default: 8)") private Integer numReducers = 8; // Default constructor public CopyCommandOptions() {} public CopyCommandOptions( final Integer minSplits, final Integer maxSplits, final Integer numReducers) { this.minSplits = minSplits; this.maxSplits = maxSplits; this.numReducers = numReducers; } public String getHdfsHostPort() { return hdfsHostPort; } public String getJobTrackerOrResourceManHostPort() { return jobTrackerOrResourceManHostPort; } public Integer getMinSplits() { return minSplits; } public Integer getMaxSplits() { return maxSplits; } public Integer getNumReducers() { return numReducers; } public void setHdfsHostPort(final String hdfsHostPort) { this.hdfsHostPort = hdfsHostPort; } public void setJobTrackerOrResourceManHostPort(final String jobTrackerOrResourceManHostPort) { this.jobTrackerOrResourceManHostPort = jobTrackerOrResourceManHostPort; } public void setMinSplits(final Integer minSplits) { this.minSplits = minSplits; } public void setMaxSplits(final Integer maxSplits) { this.maxSplits = maxSplits; } public void setNumReducers(final Integer numReducers) { this.numReducers = numReducers; } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/operations/HdfsHostPortConverter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.operations; import org.locationtech.geowave.core.cli.converters.GeoWaveBaseConverter; /** This class will ensure that the hdfs parameter is in the correct format. */ public class HdfsHostPortConverter extends GeoWaveBaseConverter { public HdfsHostPortConverter(final String optionName) { super(optionName); } @Override public String convert(String hdfsHostPort) { if (!hdfsHostPort.contains("://")) { hdfsHostPort = "hdfs://" + hdfsHostPort; } return hdfsHostPort; } @Override public boolean isRequired() { return true; } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/operations/MapReduceOperationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.operations; import org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi; public class MapReduceOperationProvider implements CLIOperationProviderSpi { private static final Class[] OPERATIONS = new Class[] {CopyCommand.class, ConfigHDFSCommand.class}; @Override public Class[] getOperations() { return OPERATIONS; } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/output/GeoWaveOutputFormat.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.output; import java.io.IOException; import java.io.InputStream; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.OutputCommitter; import org.apache.hadoop.mapreduce.OutputFormat; import org.apache.hadoop.mapreduce.RecordWriter; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.store.GeoWaveStoreFinder; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.AdapterStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.TransientAdapterStore; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.WriteResults; import org.locationtech.geowave.core.store.api.Writer; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase; import org.locationtech.geowave.mapreduce.JobContextAdapterStore; import org.locationtech.geowave.mapreduce.JobContextIndexStore; import org.locationtech.geowave.mapreduce.MapReduceDataStore; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This output format is the preferred mechanism for writing data to GeoWave within a map-reduce * job. */ public class GeoWaveOutputFormat extends OutputFormat, Object> { private static final Class CLASS = GeoWaveOutputFormat.class; protected static final Logger LOGGER = LoggerFactory.getLogger(CLASS); @Override public RecordWriter, Object> getRecordWriter( final TaskAttemptContext context) throws IOException, InterruptedException { try { final Map configOptions = getStoreOptionsMap(context); final IndexStore persistentIndexStore = GeoWaveStoreFinder.createIndexStore(configOptions); final DataStore dataStore = GeoWaveStoreFinder.createDataStore(configOptions); final Index[] indices = JobContextIndexStore.getIndices(context); if (LOGGER.isDebugEnabled()) { final StringBuilder sbDebug = new StringBuilder(); sbDebug.append("Config Options: "); for (final Map.Entry entry : configOptions.entrySet()) { sbDebug.append(entry.getKey() + "/" + entry.getValue() + ", "); } sbDebug.append("\n\tIndices Size: " + indices.length); sbDebug.append("\n\tpersistentIndexStore: " + persistentIndexStore); final String filename = "/META-INF/services/org.locationtech.geowave.core.store.StoreFactoryFamilySpi"; final InputStream is = context.getClass().getResourceAsStream(filename); if (is == null) { sbDebug.append("\n\tStoreFactoryFamilySpi: Unable to open file '" + filename + "'"); } else { sbDebug.append("\n\tStoreFactoryFamilySpi: " + IOUtils.toString(is, "UTF-8")); is.close(); } LOGGER.debug(sbDebug.toString()); } for (final Index i : indices) { if (!persistentIndexStore.indexExists(i.getName())) { dataStore.addIndex(i); } } final TransientAdapterStore jobContextAdapterStore = GeoWaveConfiguratorBase.getJobContextAdapterStore(CLASS, context); final IndexStore jobContextIndexStore = new JobContextIndexStore(context, persistentIndexStore); return new GeoWaveRecordWriter(dataStore, jobContextIndexStore, jobContextAdapterStore); } catch (final Exception e) { throw new IOException(e); } } public static void setStoreOptions( final Configuration config, final DataStorePluginOptions storeOptions) { if (storeOptions != null) { GeoWaveConfiguratorBase.setStoreOptionsMap(CLASS, config, storeOptions.getOptionsAsMap()); final DataStore dataStore = storeOptions.createDataStore(); if ((dataStore != null) && (dataStore instanceof MapReduceDataStore)) { ((MapReduceDataStore) dataStore).prepareRecordWriter(config); } } else { GeoWaveConfiguratorBase.setStoreOptionsMap(CLASS, config, null); } } public static void addIndex(final Configuration config, final Index index) { JobContextIndexStore.addIndex(config, index); } public static void addDataAdapter(final Configuration config, final DataTypeAdapter adapter) { JobContextAdapterStore.addDataAdapter(config, adapter); } public static IndexStore getJobContextIndexStore(final JobContext context) { return GeoWaveConfiguratorBase.getJobContextIndexStore(CLASS, context); } public static AdapterStore getJobContextAdapterStore(final JobContext context) { return GeoWaveConfiguratorBase.getJobContextAdapterStore(CLASS, context); } public static AdapterIndexMappingStore getJobContextAdapterIndexMappingStore( final JobContext context) { return GeoWaveConfiguratorBase.getJobContextAdapterIndexMappingStore(CLASS, context); } public static InternalAdapterStore getJobContextInternalAdapterStore(final JobContext context) { return GeoWaveConfiguratorBase.getJobContextInternalAdapterStore(CLASS, context); } public static DataStorePluginOptions getStoreOptions(final JobContext context) { return GeoWaveConfiguratorBase.getStoreOptions(CLASS, context); } public static Map getStoreOptionsMap(final JobContext context) { return GeoWaveConfiguratorBase.getStoreOptionsMap(CLASS, context); } @Override public void checkOutputSpecs(final JobContext context) throws IOException, InterruptedException { // attempt to get each of the GeoWave stores from the job context try { final Map configOptions = getStoreOptionsMap(context); if (GeoWaveStoreFinder.createDataStore(configOptions) == null) { final String msg = "Unable to find GeoWave data store"; LOGGER.warn(msg); throw new IOException(msg); } if (GeoWaveStoreFinder.createIndexStore(configOptions) == null) { final String msg = "Unable to find GeoWave index store"; LOGGER.warn(msg); throw new IOException(msg); } if (GeoWaveStoreFinder.createAdapterStore(configOptions) == null) { final String msg = "Unable to find GeoWave adapter store"; LOGGER.warn(msg); throw new IOException(msg); } if (GeoWaveStoreFinder.createDataStatisticsStore(configOptions) == null) { final String msg = "Unable to find GeoWave data statistics store"; LOGGER.warn(msg); throw new IOException(msg); } } catch (final Exception e) { LOGGER.warn("Error finding GeoWave stores", e); throw new IOException("Error finding GeoWave stores", e); } } @Override public OutputCommitter getOutputCommitter(final TaskAttemptContext context) throws IOException, InterruptedException { return new NullOutputFormat().getOutputCommitter(context); } /** A base class to be used to create {@link RecordWriter} instances that write to GeoWave. */ public static class GeoWaveRecordWriter extends RecordWriter, Object> { private final Map> adapterTypeNameToIndexWriterCache = new HashMap<>(); private final TransientAdapterStore adapterStore; private final IndexStore indexStore; private final DataStore dataStore; public GeoWaveRecordWriter( final DataStore dataStore, final IndexStore indexStore, final TransientAdapterStore adapterStore) { this.dataStore = dataStore; this.adapterStore = adapterStore; this.indexStore = indexStore; } /** * Push a mutation into a table. If table is null, the defaultTable will be used. If * canCreateTable is set, the table will be created if it does not exist. The table name must * only contain alphanumerics and underscore. */ @SuppressWarnings({"unchecked", "rawtypes"}) @Override public void write(final GeoWaveOutputKey ingestKey, final Object data) throws IOException { boolean success = false; String errorMessage = null; if (ingestKey.getIndexNames().length == 0) { throw new IOException("Empty index name input list"); } final DataTypeAdapter adapter = ingestKey.getAdapter(adapterStore); if (adapter != null) { final Writer indexWriter = getIndexWriter(adapter, ingestKey.getIndexNames()); if (indexWriter != null) { final WriteResults writeList = indexWriter.write(data); if (!writeList.isEmpty()) { success = true; } else { errorMessage = "Empty write list"; } } else { errorMessage = "Cannot write to index '" + Arrays.toString(ingestKey.getIndexNames()) + "'"; } } else { errorMessage = "Adapter '" + ingestKey.getTypeName() + "' does not exist"; } if (!success) { throw new IOException(errorMessage); } } private synchronized Writer getIndexWriter( final DataTypeAdapter adapter, final String[] indexNames) { Writer writer = adapterTypeNameToIndexWriterCache.get(adapter.getTypeName()); if (writer == null) { final Index[] indices = new Index[indexNames.length]; int i = 0; for (final String indexName : indexNames) { final Index index = indexStore.getIndex(indexName); if (index != null) { indices[i++] = index; } else { LOGGER.warn("Index '" + indexName + "' does not exist"); } } dataStore.addType(adapter, indices); writer = dataStore.createWriter(adapter.getTypeName()); adapterTypeNameToIndexWriterCache.put(adapter.getTypeName(), writer); } return writer; } @Override public synchronized void close(final TaskAttemptContext attempt) throws IOException, InterruptedException { for (final Writer indexWriter : adapterTypeNameToIndexWriterCache.values()) { indexWriter.close(); } } } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/output/GeoWaveOutputKey.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.output; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.Arrays; import org.apache.hadoop.io.WritableComparable; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.adapter.TransientAdapterStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.ingest.GeoWaveData; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class encapsulates the unique identifier for GeoWave to ingest data using a map-reduce * GeoWave output format. The record writer must have bother the adapter and the index for the data * element to ingest. */ public class GeoWaveOutputKey implements WritableComparable, java.io.Serializable { private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveOutputKey.class); /** */ private static final long serialVersionUID = 1L; protected String typeName; private String[] indexNames; private transient DataTypeAdapter adapter; protected GeoWaveOutputKey() { super(); } public GeoWaveOutputKey(final String typeName, final String indexName) { this.typeName = typeName; indexNames = new String[] {indexName}; } public GeoWaveOutputKey(final String typeName, final String[] indexNames) { this.typeName = typeName; this.indexNames = indexNames; } public GeoWaveOutputKey(final DataTypeAdapter adapter, final String[] indexNames) { this.adapter = adapter; this.indexNames = indexNames; typeName = adapter.getTypeName(); } public GeoWaveOutputKey(final GeoWaveData data) { this.adapter = data.getAdapter(); this.indexNames = data.getIndexNames(); this.typeName = data.getTypeName(); } public String getTypeName() { return typeName; } public void setTypeName(final String typeName) { this.typeName = typeName; } public String[] getIndexNames() { return indexNames; } public DataTypeAdapter getAdapter(final TransientAdapterStore adapterCache) { if (adapter != null) { return adapter; } return (DataTypeAdapter) adapterCache.getAdapter(typeName); } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + Arrays.hashCode(indexNames); result = (prime * result) + ((typeName == null) ? 0 : typeName.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final GeoWaveOutputKey other = (GeoWaveOutputKey) obj; if (!Arrays.equals(indexNames, other.indexNames)) { return false; } if (typeName == null) { if (other.typeName != null) { return false; } } else if (!typeName.equals(other.typeName)) { return false; } return true; } @Override public int compareTo(final GeoWaveOutputKey o) { final int adapterCompare = typeName.compareTo(o.typeName); if (adapterCompare != 0) { return adapterCompare; } final int lengthCompare = Integer.compare(indexNames.length, o.indexNames.length); if (lengthCompare != 0) { return lengthCompare; } for (int i = 0; i < indexNames.length; i++) { final int indexNameCompare = indexNames[i].compareTo(o.indexNames[i]); if (indexNameCompare != 0) { return indexNameCompare; } } return 0; } @Override public void readFields(final DataInput input) throws IOException { final int typeNameLength = input.readInt(); final byte[] typeNameBinary = new byte[typeNameLength]; input.readFully(typeNameBinary); typeName = StringUtils.stringFromBinary(typeNameBinary); final byte indexNameCount = input.readByte(); indexNames = new String[indexNameCount]; for (int i = 0; i < indexNameCount; i++) { final int indexNameLength = input.readInt(); final byte[] indexNameBytes = new byte[indexNameLength]; input.readFully(indexNameBytes); indexNames[i] = StringUtils.stringFromBinary(indexNameBytes); } } @Override public void write(final DataOutput output) throws IOException { final byte[] typeNameBinary = StringUtils.stringToBinary(typeName); output.writeInt(typeNameBinary.length); output.write(typeNameBinary); output.writeByte(indexNames.length); for (final String indexName : indexNames) { final byte[] indexNameBytes = StringUtils.stringToBinary(indexName); output.writeInt(indexNameBytes.length); output.write(indexNameBytes); } } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/s3/DefaultGeoWaveAWSCredentialsProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.s3; import com.amazonaws.SdkClientException; import com.amazonaws.auth.AWSCredentials; import com.amazonaws.auth.AnonymousAWSCredentials; import com.amazonaws.auth.DefaultAWSCredentialsProviderChain; class DefaultGeoWaveAWSCredentialsProvider extends DefaultAWSCredentialsProviderChain { @Override public AWSCredentials getCredentials() { try { return super.getCredentials(); } catch (final SdkClientException exception) { } // fall back to anonymous credentials return new AnonymousAWSCredentials(); } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/s3/GeoWaveAmazonS3Factory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.s3; import java.util.Properties; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.auth.DefaultAWSCredentialsProviderChain; import com.upplication.s3fs.AmazonS3ClientFactory; public class GeoWaveAmazonS3Factory extends AmazonS3ClientFactory { @Override protected AWSCredentialsProvider getCredentialsProvider(final Properties props) { final AWSCredentialsProvider credentialsProvider = super.getCredentialsProvider(props); if (credentialsProvider instanceof DefaultAWSCredentialsProviderChain) { return new DefaultGeoWaveAWSCredentialsProvider(); } return credentialsProvider; } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/s3/S3Params.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.s3; public class S3Params { private final String bucket; private final String key; S3Params(final String bucket, final String key) { this.bucket = bucket; this.key = key; } public String getBucket() { return bucket; } public String getKey() { return key; } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/s3/S3ParamsExtractor.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.s3; import java.io.IOException; import java.net.URL; import org.apache.commons.lang.StringUtils; public class S3ParamsExtractor { protected static S3Params extract(final URL url) throws IOException, IllegalArgumentException { if (!"s3".equals(url.getProtocol())) { throw new IllegalArgumentException("Unsupported protocol '" + url.getProtocol() + "'"); } // bucket final int index = StringUtils.ordinalIndexOf(url.getPath(), "/", 2); final String bucket = url.getPath().substring(1, index); // key final String key = url.getPath().substring(index + 1); return new S3Params(bucket, key); } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/s3/S3URLConnection.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.s3; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.net.URLConnection; import com.amazonaws.ClientConfiguration; import com.amazonaws.Protocol; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3Client; import com.amazonaws.services.s3.model.S3Object; public class S3URLConnection extends URLConnection { public static final String PROP_S3_HANDLER_USER_AGENT = "s3.handler.userAgent"; public static final String PROP_S3_HANDLER_PROTOCOL = "s3.handler.protocol"; public static final String PROP_S3_HANDLER_SIGNER_OVERRIDE = "s3.handler.signerOverride"; /** * Constructs a URL connection to the specified URL. A connection to the object referenced by the * URL is not created. * * @param url the specified URL. */ public S3URLConnection(final URL url) { super(url); } @Override public InputStream getInputStream() throws IOException { final S3Params s3Params = S3ParamsExtractor.extract(url); final ClientConfiguration clientConfig = buildClientConfig(); final AmazonS3 s3Client = new AmazonS3Client(new DefaultGeoWaveAWSCredentialsProvider(), clientConfig); final S3Object object = s3Client.getObject(s3Params.getBucket(), s3Params.getKey()); return object.getObjectContent(); } @Override public void connect() throws IOException { // do nothing } // ----------------------------------------------------------------------------------------------------------------- private ClientConfiguration buildClientConfig() { final String userAgent = System.getProperty(PROP_S3_HANDLER_USER_AGENT, null); final String protocol = System.getProperty(PROP_S3_HANDLER_PROTOCOL, "https"); final String signerOverride = System.getProperty(PROP_S3_HANDLER_SIGNER_OVERRIDE, null); final ClientConfiguration clientConfig = new ClientConfiguration().withProtocol( "https".equalsIgnoreCase(protocol) ? Protocol.HTTPS : Protocol.HTTP); if (userAgent != null) { clientConfig.setUserAgent(userAgent); } if (signerOverride != null) { clientConfig.setSignerOverride(signerOverride); } return clientConfig; } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/s3/S3URLStreamHandler.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.s3; import java.io.IOException; import java.net.URL; import java.net.URLConnection; import java.net.URLStreamHandler; public class S3URLStreamHandler extends URLStreamHandler { @Override protected URLConnection openConnection(final URL url) throws IOException { return new S3URLConnection(url); } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/s3/S3URLStreamHandlerFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.s3; import java.net.URLStreamHandler; import java.net.URLStreamHandlerFactory; import java.util.Optional; public class S3URLStreamHandlerFactory implements URLStreamHandlerFactory { // The wrapped URLStreamHandlerFactory's instance private final Optional delegate; /** Used in case there is no existing URLStreamHandlerFactory defined */ public S3URLStreamHandlerFactory() { this(null); } /** Used in case there is an existing URLStreamHandlerFactory defined */ public S3URLStreamHandlerFactory(final URLStreamHandlerFactory delegate) { this.delegate = Optional.ofNullable(delegate); } @Override public URLStreamHandler createURLStreamHandler(final String protocol) { if ("s3".equals(protocol)) { return new S3URLStreamHandler(); // my S3 URLStreamHandler; } // It is not the s3 protocol so we delegate it to the wrapped // URLStreamHandlerFactory return delegate.map(factory -> factory.createURLStreamHandler(protocol)).orElse(null); } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/splits/GeoWaveInputSplit.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.splits; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.apache.hadoop.io.Writable; import org.apache.hadoop.mapreduce.InputSplit; import org.locationtech.geowave.core.index.StringUtils; /** * The Class GeoWaveInputSplit. Encapsulates a GeoWave Index and a set of Row ranges for use in Map * Reduce jobs. */ public class GeoWaveInputSplit extends InputSplit implements Writable { private Map splitInfo; private String[] locations; protected GeoWaveInputSplit() { splitInfo = new HashMap<>(); locations = new String[] {}; } protected GeoWaveInputSplit(final Map splitInfo, final String[] locations) { this.splitInfo = splitInfo; this.locations = locations; } public Set getIndexNames() { return splitInfo.keySet(); } public SplitInfo getInfo(final String indexName) { return splitInfo.get(indexName); } /** * This implementation of length is only an estimate, it does not provide exact values. Do not * have your code rely on this return value. */ @Override public long getLength() throws IOException { long diff = 0; for (final Entry indexEntry : splitInfo.entrySet()) { for (final RangeLocationPair range : indexEntry.getValue().getRangeLocationPairs()) { diff += (long) range.getCardinality(); } } return diff; } @Override public String[] getLocations() throws IOException { return locations; } @Override public void readFields(final DataInput in) throws IOException { final int numIndices = in.readInt(); splitInfo = new HashMap<>(numIndices); for (int i = 0; i < numIndices; i++) { final int indexNameLength = in.readInt(); final byte[] indexNameBytes = new byte[indexNameLength]; in.readFully(indexNameBytes); final String indexName = StringUtils.stringFromBinary(indexNameBytes); final SplitInfo si = new SplitInfo(); si.readFields(in); splitInfo.put(indexName, si); } final int numLocs = in.readInt(); locations = new String[numLocs]; for (int i = 0; i < numLocs; ++i) { locations[i] = in.readUTF(); } } @Override public void write(final DataOutput out) throws IOException { out.writeInt(splitInfo.size()); for (final Entry range : splitInfo.entrySet()) { final byte[] indexNameBytes = StringUtils.stringToBinary(range.getKey()); out.writeInt(indexNameBytes.length); out.write(indexNameBytes); final SplitInfo rangeList = range.getValue(); rangeList.write(out); } out.writeInt(locations.length); for (final String location : locations) { out.writeUTF(location); } } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/splits/GeoWaveRecordReader.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.splits; import java.io.Closeable; import java.io.IOException; import java.math.BigDecimal; import java.math.BigInteger; import java.math.RoundingMode; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.NoSuchElementException; import java.util.Set; import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.CloseableIteratorWrapper; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.AdapterStoreWrapper; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.adapter.TransientAdapterStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.base.BaseDataStore; import org.locationtech.geowave.core.store.base.BaseQueryOptions; import org.locationtech.geowave.core.store.base.dataidx.BatchDataIndexRetrieval; import org.locationtech.geowave.core.store.base.dataidx.DataIndexRetrieval; import org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils; import org.locationtech.geowave.core.store.entities.GeoWaveKey; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.operations.RowReader; import org.locationtech.geowave.core.store.query.constraints.AdapterAndIndexBasedQueryConstraints; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.geowave.core.store.query.filter.QueryFilter; import org.locationtech.geowave.core.store.query.options.CommonQueryOptions; import org.locationtech.geowave.core.store.query.options.DataTypeQueryOptions; import org.locationtech.geowave.core.store.query.options.IndexQueryOptions; import org.locationtech.geowave.mapreduce.MapReduceDataStoreOperations; import org.locationtech.geowave.mapreduce.input.AsyncInputFormatIteratorWrapper; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.locationtech.geowave.mapreduce.input.InputFormatIteratorWrapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Preconditions; import com.google.common.collect.Iterators; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; /** * This class is used by the GeoWaveInputFormat to read data from a GeoWave data store. * * @param The native type for the reader */ public class GeoWaveRecordReader extends RecordReader { protected static class ProgressPerRange { private final float startProgress; private final float deltaProgress; public ProgressPerRange(final float startProgress, final float endProgress) { this.startProgress = startProgress; deltaProgress = endProgress - startProgress; } public float getOverallProgress(final float rangeProgress) { return startProgress + (rangeProgress * deltaProgress); } } protected static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveRecordReader.class); protected long numKeysRead; protected CloseableIterator iterator; protected Map progressPerRange; protected GeoWaveInputKey currentGeoWaveKey = null; protected RangeLocationPair currentGeoWaveRangeIndexPair = null; protected T currentValue = null; protected GeoWaveInputSplit split; protected QueryConstraints constraints; protected BaseQueryOptions sanitizedQueryOptions; protected boolean isOutputWritable; protected TransientAdapterStore adapterStore; protected InternalAdapterStore internalAdapterStore; protected AdapterIndexMappingStore aimStore; protected IndexStore indexStore; protected BaseDataStore dataStore; protected MapReduceDataStoreOperations operations; protected int dataIndexBatchSize; public GeoWaveRecordReader( final CommonQueryOptions commonOptions, final DataTypeQueryOptions typeOptions, final IndexQueryOptions indexOptions, final QueryConstraints constraints, final boolean isOutputWritable, final TransientAdapterStore adapterStore, final InternalAdapterStore internalAdapterStore, final AdapterIndexMappingStore aimStore, final IndexStore indexStore, final MapReduceDataStoreOperations operations, final int dataIndexBatchSize) { this.constraints = constraints; // all queries will use the same instance of the dedupe filter for // client side filtering because the filter needs to be applied across // indices sanitizedQueryOptions = new BaseQueryOptions( commonOptions, typeOptions, indexOptions, new AdapterStoreWrapper(adapterStore, internalAdapterStore), internalAdapterStore); this.isOutputWritable = isOutputWritable; this.adapterStore = adapterStore; this.internalAdapterStore = internalAdapterStore; this.aimStore = aimStore; this.indexStore = indexStore; this.operations = operations; this.dataIndexBatchSize = dataIndexBatchSize; } /** Initialize a scanner over the given input split using this task attempt configuration. */ @Override public void initialize(final InputSplit inSplit, final TaskAttemptContext attempt) throws IOException { split = (GeoWaveInputSplit) inSplit; numKeysRead = 0; final Set indices = split.getIndexNames(); final BigDecimal sum = BigDecimal.ZERO; final Map incrementalRangeSums = new LinkedHashMap<>(); final List>> allIterators = new ArrayList<>(); final NextRangeCallback callback = new InternalCallback(); final short[] adapters; // do a check for AdapterAndIndexBasedQueryConstraints in case // the splits provider was unable to set it if (constraints instanceof AdapterAndIndexBasedQueryConstraints) { adapters = sanitizedQueryOptions.getAdapterIds(internalAdapterStore); } else { adapters = null; } for (final String i : indices) { final SplitInfo splitInfo = split.getInfo(i); List queryFilters = null; if (constraints != null) { // if adapters isn't null that also means this constraint is // AdapterAndIndexBasedQueryConstraints if (adapters != null) { InternalDataAdapter adapter = null; if (adapters.length > 1) { // this should be a rare situation, but just in case, loop over adapters and fill the // iterator of results per adapter for (final short adapterId : adapters) { final String typeName = internalAdapterStore.getTypeName(adapterId); if (typeName != null) { final DataTypeAdapter baseAdapter = adapterStore.getAdapter(typeName); if (baseAdapter != null) { adapter = baseAdapter.asInternalAdapter(adapterId); } } if (adapter == null) { LOGGER.warn("Unable to find type matching an adapter dependent query"); } queryFilters = ((AdapterAndIndexBasedQueryConstraints) constraints).createQueryConstraints( adapter, splitInfo.getIndex(), aimStore.getMapping(adapterId, splitInfo.getIndex().getName())).createFilters( splitInfo.getIndex()); sanitizedQueryOptions.setAdapterId(adapterId); fillIterators( allIterators, splitInfo, queryFilters, sum, incrementalRangeSums, callback); } continue; } // in practice this is used for CQL and you can't have // multiple types/adapters if (adapters.length == 1) { final String typeName = internalAdapterStore.getTypeName(adapters[0]); if (typeName != null) { final DataTypeAdapter baseAdapter = adapterStore.getAdapter(typeName); if (baseAdapter != null) { adapter = baseAdapter.asInternalAdapter(adapters[0]); } } } if (adapter == null) { LOGGER.warn("Unable to find type matching an adapter dependent query"); } final QueryConstraints tempConstraints = ((AdapterAndIndexBasedQueryConstraints) constraints).createQueryConstraints( adapter, splitInfo.getIndex(), adapter != null ? aimStore.getMapping(adapter.getAdapterId(), splitInfo.getIndex().getName()) : null); if (tempConstraints == null) { LOGGER.warn( "Adapter and Index based constraints not satisfied for adapter '" + adapter.getTypeName() + "'"); continue; } else { constraints = tempConstraints; } } queryFilters = constraints.createFilters(splitInfo.getIndex()); } fillIterators(allIterators, splitInfo, queryFilters, sum, incrementalRangeSums, callback); } // finally we can compute percent progress progressPerRange = new LinkedHashMap<>(); RangeLocationPair prevRangeIndex = null; float prevProgress = 0f; if (sum.compareTo(BigDecimal.ZERO) > 0) { try { for (final Entry entry : incrementalRangeSums.entrySet()) { final BigDecimal value = entry.getValue(); final float progress = value.divide(sum, RoundingMode.HALF_UP).floatValue(); if (prevRangeIndex != null) { progressPerRange.put(prevRangeIndex, new ProgressPerRange(prevProgress, progress)); } prevRangeIndex = entry.getKey(); prevProgress = progress; } progressPerRange.put(prevRangeIndex, new ProgressPerRange(prevProgress, 1f)); } catch (final Exception e) { LOGGER.warn("Unable to calculate progress", e); } } // concatenate iterators iterator = new CloseableIteratorWrapper<>(new Closeable() { @Override public void close() throws IOException { for (final CloseableIterator> reader : allIterators) { reader.close(); } } }, Iterators.concat(allIterators.iterator())); } private void fillIterators( final List>> allIterators, final SplitInfo splitInfo, final List queryFilters, BigDecimal sum, final Map incrementalRangeSums, final NextRangeCallback callback) { if (!splitInfo.getRangeLocationPairs().isEmpty()) { final QueryFilter[] filters = ((queryFilters == null) || queryFilters.isEmpty()) ? null : queryFilters.toArray(new QueryFilter[0]); final PersistentAdapterStore persistentAdapterStore = new AdapterStoreWrapper(adapterStore, internalAdapterStore); final DataIndexRetrieval dataIndexRetrieval = DataIndexUtils.getDataIndexRetrieval( operations, persistentAdapterStore, aimStore, internalAdapterStore, splitInfo.getIndex(), sanitizedQueryOptions.getFieldIdsAdapterPair(), sanitizedQueryOptions.getAggregation(), sanitizedQueryOptions.getAuthorizations(), dataIndexBatchSize); final List>> indexReaders = new ArrayList<>(splitInfo.getRangeLocationPairs().size()); for (final RangeLocationPair r : splitInfo.getRangeLocationPairs()) { indexReaders.add( Pair.of( r, operations.createReader( new RecordReaderParams( splitInfo.getIndex(), persistentAdapterStore, aimStore, internalAdapterStore, sanitizedQueryOptions.getAdapterIds(internalAdapterStore), sanitizedQueryOptions.getMaxResolutionSubsamplingPerDimension(), sanitizedQueryOptions.getAggregation(), sanitizedQueryOptions.getFieldIdsAdapterPair(), splitInfo.isMixedVisibility(), splitInfo.isAuthorizationsLimiting(), splitInfo.isClientsideRowMerging(), r.getRange(), sanitizedQueryOptions.getLimit(), sanitizedQueryOptions.getMaxRangeDecomposition(), sanitizedQueryOptions.getAuthorizations())))); incrementalRangeSums.put(r, sum); sum = sum.add(BigDecimal.valueOf(r.getCardinality())); } allIterators.add( concatenateWithCallback( indexReaders, callback, splitInfo.getIndex(), filters, dataIndexRetrieval)); } } protected Iterator> rowReaderToKeyValues( final Index index, final QueryFilter[] filters, final DataIndexRetrieval dataIndexRetrieval, final Iterator reader) { InputFormatIteratorWrapper iteratorWrapper; if (dataIndexRetrieval instanceof BatchDataIndexRetrieval) { // need special handling to account for asynchronous batched retrieval from the data index iteratorWrapper = new AsyncInputFormatIteratorWrapper<>( reader, filters, adapterStore, internalAdapterStore, aimStore, index, isOutputWritable, (BatchDataIndexRetrieval) dataIndexRetrieval); } else { iteratorWrapper = new InputFormatIteratorWrapper<>( reader, filters, adapterStore, internalAdapterStore, aimStore, index, isOutputWritable, dataIndexRetrieval); } return iteratorWrapper; } @Override public void close() { if (iterator != null) { iterator.close(); } } @Override public GeoWaveInputKey getCurrentKey() throws IOException, InterruptedException { return currentGeoWaveKey; } @Override public boolean nextKeyValue() throws IOException, InterruptedException { if (iterator != null) { if (iterator.hasNext()) { ++numKeysRead; final Object value = iterator.next(); if (value instanceof Entry) { final Entry entry = (Entry) value; currentGeoWaveKey = entry.getKey(); currentValue = entry.getValue(); } return true; } } return false; } @Override public T getCurrentValue() throws IOException, InterruptedException { return currentValue; } protected static interface NextRangeCallback { public void setRange(RangeLocationPair indexPair); } /** Mostly guava's concatenate method, but there is a need for a callback between iterators */ protected CloseableIterator> concatenateWithCallback( final List>> inputs, final NextRangeCallback nextRangeCallback, final Index index, final QueryFilter[] filters, final DataIndexRetrieval dataIndexRetrieval) { Preconditions.checkNotNull(inputs); return new CloseableIteratorWrapper<>(new Closeable() { @Override public void close() { for (final Pair> input : inputs) { input.getRight().close(); } } }, rowReaderToKeyValues( index, filters, dataIndexRetrieval, new ConcatenatedIteratorWithCallback(nextRangeCallback, inputs.iterator()))); } private static float getOverallProgress( final GeoWaveRowRange range, final GeoWaveInputKey currentKey, final ProgressPerRange progress) { final float rangeProgress = getProgressForRange(range, currentKey); return progress.getOverallProgress(rangeProgress); } private static float getProgressForRange( final byte[] start, final byte[] end, final byte[] position) { final int maxDepth = Math.min(Math.max(end.length, start.length), position.length); final BigInteger startBI = new BigInteger(SplitsProvider.extractBytes(start, maxDepth)); final BigInteger endBI = new BigInteger(SplitsProvider.extractBytes(end, maxDepth)); final BigInteger positionBI = new BigInteger(SplitsProvider.extractBytes(position, maxDepth)); return (float) (positionBI.subtract(startBI).doubleValue() / endBI.subtract(startBI).doubleValue()); } private static float getProgressForRange( final GeoWaveRowRange range, final GeoWaveInputKey currentKey) { if (currentKey == null) { return 0f; } if ((range != null) && (range.getStartSortKey() != null) && (range.getEndSortKey() != null) && (currentKey.getGeoWaveKey() != null)) { // TODO GEOWAVE-1018 this doesn't account for partition keys at all // just look at the row progress return getProgressForRange( range.getStartSortKey(), range.getEndSortKey(), GeoWaveKey.getCompositeId(currentKey.getGeoWaveKey())); } // if we can't figure it out, then claim no progress return 0f; } @Override public float getProgress() throws IOException { if ((numKeysRead > 0) && (currentGeoWaveKey == null)) { return 1.0f; } if (currentGeoWaveRangeIndexPair == null) { return 0.0f; } final ProgressPerRange progress = progressPerRange.get(currentGeoWaveRangeIndexPair); if (progress == null) { return Math.min( 1, Math.max( 0, getProgressForRange(currentGeoWaveRangeIndexPair.getRange(), currentGeoWaveKey))); } return Math.min( 1, Math.max( 0, getOverallProgress( currentGeoWaveRangeIndexPair.getRange(), currentGeoWaveKey, progress))); } private class InternalCallback implements NextRangeCallback { @Override public void setRange(final RangeLocationPair indexPair) { currentGeoWaveRangeIndexPair = indexPair; } } private static class ConcatenatedIteratorWithCallback implements Iterator { private final NextRangeCallback nextRangeCallback; private final Iterator>> inputIteratorOfIterators; private Iterator currentIterator = Collections.emptyIterator(); private Iterator removeFrom; public ConcatenatedIteratorWithCallback( final NextRangeCallback nextRangeCallback, final Iterator>> inputIteratorOfIterators) { super(); this.nextRangeCallback = nextRangeCallback; this.inputIteratorOfIterators = inputIteratorOfIterators; } @Override public boolean hasNext() { boolean currentHasNext; while (!(currentHasNext = Preconditions.checkNotNull(currentIterator).hasNext()) && inputIteratorOfIterators.hasNext()) { final Entry> entry = inputIteratorOfIterators.next(); nextRangeCallback.setRange(entry.getKey()); currentIterator = entry.getValue(); } return currentHasNext; } @Override public GeoWaveRow next() { if (!hasNext()) { throw new NoSuchElementException(); } removeFrom = currentIterator; return currentIterator.next(); } @SuppressFBWarnings(value = "NP_NULL_ON_SOME_PATH", justification = "Precondition catches null") @Override public void remove() { Preconditions.checkState( removeFrom != null, "no calls to next() since last call to remove()"); removeFrom.remove(); removeFrom = null; } } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/splits/GeoWaveRowRange.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.splits; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.Arrays; import org.apache.hadoop.io.Writable; import org.locationtech.geowave.core.index.ByteArrayUtils; public class GeoWaveRowRange implements Writable { private byte[] partitionKey; private byte[] startKey; private byte[] endKey; private boolean startKeyInclusive; private boolean endKeyInclusive; protected GeoWaveRowRange() {} public GeoWaveRowRange( final byte[] partitionKey, final byte[] startKey, final byte[] endKey, final boolean startKeyInclusive, final boolean endKeyInclusive) { this.partitionKey = partitionKey; this.startKey = startKey; this.endKey = endKey; this.startKeyInclusive = startKeyInclusive; this.endKeyInclusive = endKeyInclusive; } @Override public void write(final DataOutput out) throws IOException { out.writeBoolean((partitionKey == null) || (partitionKey.length == 0)); out.writeBoolean(startKey == null); out.writeBoolean(endKey == null); if ((partitionKey != null) && (partitionKey.length > 0)) { out.writeShort(partitionKey.length); out.write(partitionKey); } if (startKey != null) { out.writeShort(startKey.length); out.write(startKey); } if (endKey != null) { out.writeShort(endKey.length); out.write(endKey); } out.writeBoolean(startKeyInclusive); out.writeBoolean(endKeyInclusive); } @Override public void readFields(final DataInput in) throws IOException { final boolean nullPartitionKey = in.readBoolean(); final boolean infiniteStartKey = in.readBoolean(); final boolean infiniteEndKey = in.readBoolean(); if (!nullPartitionKey) { partitionKey = new byte[in.readShort()]; in.readFully(partitionKey); } if (!infiniteStartKey) { startKey = new byte[in.readShort()]; in.readFully(startKey); } else { startKey = null; } if (!infiniteEndKey) { endKey = new byte[in.readShort()]; in.readFully(endKey); } else { endKey = null; } startKeyInclusive = in.readBoolean(); endKeyInclusive = in.readBoolean(); } public byte[] getPartitionKey() { return partitionKey; } public byte[] getStartSortKey() { return startKey; } public byte[] getEndSortKey() { return endKey; } public boolean isStartSortKeyInclusive() { return startKeyInclusive; } public boolean isEndSortKeyInclusive() { return endKeyInclusive; } public boolean isInfiniteStartSortKey() { return startKey == null; } public boolean isInfiniteStopSortKey() { return endKey == null; } public byte[] getCombinedStartKey() { if ((partitionKey == null) || (partitionKey.length == 0)) { return startKey; } return (startKey == null) ? null : ByteArrayUtils.combineArrays(partitionKey, startKey); } public byte[] getCombinedEndKey() { if ((partitionKey == null) || (partitionKey.length == 0)) { return endKey; } return (endKey == null) ? ByteArrayUtils.combineArrays(ByteArrayUtils.getNextPrefix(partitionKey), endKey) : ByteArrayUtils.combineArrays(partitionKey, endKey); } @Override public String toString() { return "GeoWaveRowRange [partitionKey=" + Arrays.toString(partitionKey) + ", startKey=" + Arrays.toString(startKey) + ", endKey=" + Arrays.toString(endKey) + ", startKeyInclusive=" + startKeyInclusive + ", endKeyInclusive=" + endKeyInclusive + "]"; } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/splits/IntermediateSplitInfo.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.splits; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.TreeSet; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.tuple.Pair; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.store.adapter.AdapterStoreWrapper; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.adapter.TransientAdapterStore; import org.locationtech.geowave.core.store.adapter.statistics.histogram.ByteUtils; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.base.BaseDataStoreUtils; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.locationtech.geowave.core.store.statistics.InternalStatisticsHelper; import org.locationtech.geowave.core.store.statistics.index.DifferingVisibilityCountStatistic.DifferingVisibilityCountValue; import org.locationtech.geowave.core.store.statistics.index.FieldVisibilityCountStatistic.FieldVisibilityCountValue; import org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic.RowRangeHistogramValue; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class IntermediateSplitInfo implements Comparable { private static final Logger LOGGER = LoggerFactory.getLogger(IntermediateSplitInfo.class); protected class IndexRangeLocation { private RangeLocationPair rangeLocationPair; private final Index index; public IndexRangeLocation(final RangeLocationPair rangeLocationPair, final Index index) { this.rangeLocationPair = rangeLocationPair; this.index = index; } public IndexRangeLocation split( final RowRangeHistogramValue stats, final double currentCardinality, final double targetCardinality) { if (stats == null) { return null; } final double thisCardinalty = rangeLocationPair.getCardinality(); final double fraction = (targetCardinality - currentCardinality) / thisCardinalty; final byte[] start = rangeLocationPair.getRange().getStartSortKey(); final byte[] end = rangeLocationPair.getRange().getEndSortKey(); final double cdfStart = start == null ? 0.0 : stats.cdf(start); final double cdfEnd = end == null ? 1.0 : stats.cdf(end); final double expectedEndValue = stats.quantile(cdfStart + ((cdfEnd - cdfStart) * fraction)); final int maxCardinality = Math.max(start != null ? start.length : 0, end != null ? end.length : 0); byte[] bytes = ByteUtils.toBytes(expectedEndValue); byte[] splitKey; if ((bytes.length < 8) && (bytes.length < maxCardinality)) { // prepend with 0 bytes = expandBytes(bytes, Math.min(8, maxCardinality)); } if (bytes.length < maxCardinality) { splitKey = new byte[maxCardinality]; System.arraycopy(bytes, 0, splitKey, 0, bytes.length); } else { splitKey = bytes; } final String location = rangeLocationPair.getLocation(); final boolean startKeyInclusive = true; final boolean endKeyInclusive = false; if (((start != null) && (new ByteArray(start).compareTo(new ByteArray(splitKey)) >= 0)) || ((end != null) && (new ByteArray(end).compareTo(new ByteArray(splitKey)) <= 0))) { splitKey = SplitsProvider.getMidpoint(rangeLocationPair.getRange()); if (splitKey == null) { return null; } // if you can split the range only by setting the split to the // end, but its not inclusive on the end, just clamp this to the // start and don't split producing a new pair if (Arrays.equals(end, splitKey) && !rangeLocationPair.getRange().isEndSortKeyInclusive()) { rangeLocationPair = new RangeLocationPair( new GeoWaveRowRange( rangeLocationPair.getRange().getPartitionKey(), rangeLocationPair.getRange().getStartSortKey(), splitKey, rangeLocationPair.getRange().isStartSortKeyInclusive(), endKeyInclusive), location, stats.cardinality(rangeLocationPair.getRange().getStartSortKey(), splitKey)); return null; } } try { final RangeLocationPair newPair = new RangeLocationPair( new GeoWaveRowRange( rangeLocationPair.getRange().getPartitionKey(), rangeLocationPair.getRange().getStartSortKey(), splitKey, rangeLocationPair.getRange().isStartSortKeyInclusive(), endKeyInclusive), location, stats.cardinality(rangeLocationPair.getRange().getStartSortKey(), splitKey)); rangeLocationPair = new RangeLocationPair( new GeoWaveRowRange( rangeLocationPair.getRange().getPartitionKey(), splitKey, rangeLocationPair.getRange().getEndSortKey(), startKeyInclusive, rangeLocationPair.getRange().isEndSortKeyInclusive()), location, stats.cardinality(splitKey, rangeLocationPair.getRange().getEndSortKey())); return new IndexRangeLocation(newPair, index); } catch (final java.lang.IllegalArgumentException ex) { LOGGER.info("Unable to split range", ex); return null; } } private byte[] expandBytes(final byte valueBytes[], final int numBytes) { final byte[] bytes = new byte[numBytes]; int expansion = 0; if (numBytes > valueBytes.length) { expansion = (numBytes - valueBytes.length); for (int i = 0; i < expansion; i++) { bytes[i] = 0; } for (int i = 0; i < valueBytes.length; i++) { bytes[expansion + i] = valueBytes[i]; } } else { for (int i = 0; i < numBytes; i++) { bytes[i] = valueBytes[i]; } } return bytes; } } private final Map splitInfo; private final SplitsProvider splitsProvider; public IntermediateSplitInfo( final Map splitInfo, final SplitsProvider splitsProvider) { this.splitInfo = splitInfo; this.splitsProvider = splitsProvider; } synchronized void merge(final IntermediateSplitInfo split) { for (final Entry e : split.splitInfo.entrySet()) { SplitInfo thisInfo = splitInfo.get(e.getKey()); if (thisInfo == null) { thisInfo = new SplitInfo(e.getValue().getIndex()); splitInfo.put(e.getKey(), thisInfo); } thisInfo.getRangeLocationPairs().addAll(e.getValue().getRangeLocationPairs()); } } /** * Side effect: Break up this split. * *

Split the ranges into two * * @return the new split. */ synchronized IntermediateSplitInfo split( final Map, RowRangeHistogramValue> statsCache) { // generically you'd want the split to be as limiting to total // locations as possible and then as limiting as possible to total // indices, but in this case split() is only called when all ranges // are in the same location and the same index final TreeSet orderedSplits = new TreeSet<>(new Comparator() { @Override public int compare(final IndexRangeLocation o1, final IndexRangeLocation o2) { return (o1.rangeLocationPair.getCardinality() - o2.rangeLocationPair.getCardinality()) < 0 ? -1 : 1; } }); for (final Entry ranges : splitInfo.entrySet()) { for (final RangeLocationPair p : ranges.getValue().getRangeLocationPairs()) { orderedSplits.add(new IndexRangeLocation(p, ranges.getValue().getIndex())); } } final double targetCardinality = getTotalCardinality() / 2; double currentCardinality = 0.0; final Map otherSplitInfo = new HashMap<>(); splitInfo.clear(); do { final IndexRangeLocation next = orderedSplits.pollFirst(); double nextCardinality = currentCardinality + next.rangeLocationPair.getCardinality(); if (nextCardinality > targetCardinality) { final IndexRangeLocation newSplit = next.split( statsCache.get( Pair.of( next.index, new ByteArray(next.rangeLocationPair.getRange().getPartitionKey()))), currentCardinality, targetCardinality); double splitCardinality = next.rangeLocationPair.getCardinality(); // Stats can have inaccuracies over narrow ranges // thus, a split based on statistics may not be found if (newSplit != null) { splitCardinality += newSplit.rangeLocationPair.getCardinality(); addPairForIndex(otherSplitInfo, newSplit.rangeLocationPair, newSplit.index); addPairForIndex(splitInfo, next.rangeLocationPair, next.index); } else { // Still add to the other SPLIT if there is remaining // pairs in this SPLIT addPairForIndex( (!orderedSplits.isEmpty()) ? otherSplitInfo : splitInfo, next.rangeLocationPair, next.index); } nextCardinality = currentCardinality + splitCardinality; if (nextCardinality > targetCardinality) { break; } currentCardinality = nextCardinality; } else { addPairForIndex(otherSplitInfo, next.rangeLocationPair, next.index); currentCardinality = nextCardinality; } } while (!orderedSplits.isEmpty()); // What is left of the ranges // that haven't been placed in the other split info for (final IndexRangeLocation split : orderedSplits) { addPairForIndex(splitInfo, split.rangeLocationPair, split.index); } // All ranges consumed by the other split if (splitInfo.size() == 0) { // First try to move a index set of ranges back. if (otherSplitInfo.size() > 1) { final Iterator> it = otherSplitInfo.entrySet().iterator(); final Entry entry = it.next(); it.remove(); splitInfo.put(entry.getKey(), entry.getValue()); } else { splitInfo.putAll(otherSplitInfo); otherSplitInfo.clear(); } } return otherSplitInfo.size() == 0 ? null : new IntermediateSplitInfo(otherSplitInfo, splitsProvider); } private void addPairForIndex( final Map otherSplitInfo, final RangeLocationPair pair, final Index index) { SplitInfo other = otherSplitInfo.get(index.getName()); if (other == null) { other = new SplitInfo(index); otherSplitInfo.put(index.getName(), other); } other.getRangeLocationPairs().add(pair); } public synchronized GeoWaveInputSplit toFinalSplit( final DataStatisticsStore statisticsStore, final TransientAdapterStore adapterStore, final InternalAdapterStore internalAdapterStore, final Map> indexIdToAdaptersMap, final String... authorizations) { final Set locations = new HashSet<>(); for (final Entry entry : splitInfo.entrySet()) { for (final RangeLocationPair pair : entry.getValue().getRangeLocationPairs()) { if ((pair.getLocation() != null) && !pair.getLocation().isEmpty()) { locations.add(pair.getLocation()); } } } for (final SplitInfo si : splitInfo.values()) { final List adapterIds = indexIdToAdaptersMap.get(si.getIndex().getName()); final PersistentAdapterStore persistentAdapterStore = new AdapterStoreWrapper(adapterStore, internalAdapterStore); final DifferingVisibilityCountValue differingVisibilityCounts = InternalStatisticsHelper.getDifferingVisibilityCounts( si.getIndex(), adapterIds, persistentAdapterStore, statisticsStore, authorizations); final FieldVisibilityCountValue visibilityCounts = InternalStatisticsHelper.getVisibilityCounts( si.getIndex(), adapterIds, persistentAdapterStore, statisticsStore, authorizations); si.setClientsideRowMerging( BaseDataStoreUtils.isRowMerging( persistentAdapterStore, ArrayUtils.toPrimitive(adapterIds.toArray(new Short[0])))); si.setMixedVisibility( (differingVisibilityCounts == null) || differingVisibilityCounts.isAnyEntryDifferingFieldVisiblity()); si.setAuthorizationsLimiting( (visibilityCounts == null) || visibilityCounts.isAuthorizationsLimiting(authorizations)); } return new GeoWaveInputSplit(splitInfo, locations.toArray(new String[locations.size()])); } @Override public int compareTo(final IntermediateSplitInfo o) { final double thisTotal = getTotalCardinality(); final double otherTotal = o.getTotalCardinality(); int result = Double.compare(thisTotal, otherTotal); if (result == 0) { result = Integer.compare(splitInfo.size(), o.splitInfo.size()); if (result == 0) { final List pairs = new ArrayList<>(); final List otherPairs = new ArrayList<>(); double rangeSum = 0; double otherSum = 0; for (final SplitInfo s : splitInfo.values()) { pairs.addAll(s.getRangeLocationPairs()); } for (final SplitInfo s : o.splitInfo.values()) { otherPairs.addAll(s.getRangeLocationPairs()); } result = Integer.compare(pairs.size(), otherPairs.size()); if (result == 0) { for (final RangeLocationPair p : pairs) { rangeSum += SplitsProvider.getRangeLength(p.getRange()); } for (final RangeLocationPair p : otherPairs) { otherSum += SplitsProvider.getRangeLength(p.getRange()); } result = Double.compare(rangeSum, otherSum); if (result == 0) { result = Integer.compare(hashCode(), o.hashCode()); } } } } return result; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((splitInfo == null) ? 0 : splitInfo.hashCode()); result = (prime * result) + ((splitsProvider == null) ? 0 : splitsProvider.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final IntermediateSplitInfo other = (IntermediateSplitInfo) obj; if (splitInfo == null) { if (other.splitInfo != null) { return false; } } else if (!splitInfo.equals(other.splitInfo)) { return false; } if (splitsProvider == null) { if (other.splitsProvider != null) { return false; } } else if (!splitsProvider.equals(other.splitsProvider)) { return false; } return true; } private synchronized double getTotalCardinality() { double sum = 0.0; for (final SplitInfo si : splitInfo.values()) { for (final RangeLocationPair pair : si.getRangeLocationPairs()) { sum += pair.getCardinality(); } } return sum; } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/splits/RangeLocationPair.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.splits; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; public class RangeLocationPair { private GeoWaveRowRange range; private String location; private double cardinality; protected RangeLocationPair() {} public RangeLocationPair(final GeoWaveRowRange range, final double cardinality) { this(range, "", cardinality); } public RangeLocationPair( final GeoWaveRowRange range, final String location, final double cardinality) { this.location = location; this.range = range; this.cardinality = cardinality; } public double getCardinality() { return cardinality; } public GeoWaveRowRange getRange() { return range; } public String getLocation() { return location; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((location == null) ? 0 : location.hashCode()); result = (prime * result) + ((range == null) ? 0 : range.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final RangeLocationPair other = (RangeLocationPair) obj; if (location == null) { if (other.location != null) { return false; } } else if (!location.equals(other.location)) { return false; } if (range == null) { if (other.range != null) { return false; } } else if (!range.equals(other.range)) { return false; } return true; } public void readFields(final DataInput in) throws IOException, InstantiationException, IllegalAccessException { final boolean nullRange = in.readBoolean(); if (nullRange) { range = null; } else { range = new GeoWaveRowRange(); range.readFields(in); } location = in.readUTF(); cardinality = in.readDouble(); } public void write(final DataOutput out) throws IOException { out.writeBoolean(range == null); if (range != null) { range.write(out); } out.writeUTF(location); out.writeDouble(cardinality); } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/splits/RecordReaderParams.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.splits; import org.apache.commons.lang3.tuple.Pair; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.operations.RangeReaderParams; public class RecordReaderParams extends RangeReaderParams { private final GeoWaveRowRange rowRange; public RecordReaderParams( final Index index, final PersistentAdapterStore adapterStore, final AdapterIndexMappingStore mappingStore, final InternalAdapterStore internalAdapterStore, final short[] adapterIds, final double[] maxResolutionSubsamplingPerDimension, final Pair, Aggregation> aggregation, final Pair> fieldSubsets, final boolean isMixedVisibility, final boolean isAuthorizationsLimiting, final boolean isClientsideRowMerging, final GeoWaveRowRange rowRange, final Integer limit, final Integer maxRangeDecomposition, final String... additionalAuthorizations) { super( index, adapterStore, mappingStore, internalAdapterStore, adapterIds, maxResolutionSubsamplingPerDimension, aggregation, fieldSubsets, isMixedVisibility, isAuthorizationsLimiting, isClientsideRowMerging, limit, maxRangeDecomposition, additionalAuthorizations); this.rowRange = rowRange; } public GeoWaveRowRange getRowRange() { return rowRange; } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/splits/SplitInfo.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.splits; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.api.Index; public class SplitInfo { private Index index; private List rangeLocationPairs; private boolean mixedVisibility = true; private boolean authorizationsLimiting = true; private boolean clientsideRowMerging = false; protected SplitInfo() {} public SplitInfo(final Index index) { this.index = index; rangeLocationPairs = new ArrayList<>(); } public SplitInfo(final Index index, final List rangeLocationPairs) { super(); this.index = index; this.rangeLocationPairs = rangeLocationPairs; } public boolean isMixedVisibility() { return mixedVisibility; } public void setMixedVisibility(final boolean mixedVisibility) { this.mixedVisibility = mixedVisibility; } public boolean isAuthorizationsLimiting() { return authorizationsLimiting; } public void setAuthorizationsLimiting(final boolean authorizationsLimiting) { this.authorizationsLimiting = authorizationsLimiting; } public boolean isClientsideRowMerging() { return clientsideRowMerging; } public void setClientsideRowMerging(final boolean clientsideRowMerging) { this.clientsideRowMerging = clientsideRowMerging; } public Index getIndex() { return index; } public List getRangeLocationPairs() { return rangeLocationPairs; } public void readFields(final DataInput in) throws IOException { final int indexLength = in.readInt(); final byte[] indexBytes = new byte[indexLength]; in.readFully(indexBytes); final Index index = (Index) PersistenceUtils.fromBinary(indexBytes); final int numRanges = in.readInt(); final List rangeList = new ArrayList<>(numRanges); for (int j = 0; j < numRanges; j++) { try { final RangeLocationPair range = new RangeLocationPair(); range.readFields(in); rangeList.add(range); } catch (InstantiationException | IllegalAccessException e) { throw new IOException("Unable to instantiate range", e); } } this.index = index; rangeLocationPairs = rangeList; mixedVisibility = in.readBoolean(); authorizationsLimiting = in.readBoolean(); clientsideRowMerging = in.readBoolean(); } public void write(final DataOutput out) throws IOException { final byte[] indexBytes = PersistenceUtils.toBinary(index); out.writeInt(indexBytes.length); out.write(indexBytes); out.writeInt(rangeLocationPairs.size()); for (final RangeLocationPair r : rangeLocationPairs) { r.write(out); } out.writeBoolean(mixedVisibility); out.writeBoolean(authorizationsLimiting); out.writeBoolean(clientsideRowMerging); } } ================================================ FILE: core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/splits/SplitsProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.mapreduce.splits; import java.io.IOException; import java.math.BigInteger; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.JobContext; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.ByteArrayRange; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.IndexMetaData; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.AdapterStoreWrapper; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.adapter.TransientAdapterStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.base.BaseDataStoreUtils; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.operations.DataStoreOperations; import org.locationtech.geowave.core.store.query.constraints.AdapterAndIndexBasedQueryConstraints; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.geowave.core.store.query.options.CommonQueryOptions; import org.locationtech.geowave.core.store.query.options.DataTypeQueryOptions; import org.locationtech.geowave.core.store.query.options.IndexQueryOptions; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.locationtech.geowave.core.store.statistics.InternalStatisticsHelper; import org.locationtech.geowave.core.store.statistics.index.IndexMetaDataSetStatistic.IndexMetaDataSetValue; import org.locationtech.geowave.core.store.statistics.index.PartitionsStatistic.PartitionsValue; import org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic.RowRangeHistogramValue; import org.locationtech.geowave.core.store.util.DataStoreUtils; import org.locationtech.geowave.mapreduce.input.GeoWaveInputFormat; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class SplitsProvider { private static final Logger LOGGER = LoggerFactory.getLogger(SplitsProvider.class); private static final BigInteger TWO = BigInteger.valueOf(2); public SplitsProvider() {} /** Read the metadata table to get tablets and match up ranges to them. */ public List getSplits( final DataStoreOperations operations, final CommonQueryOptions commonOptions, final DataTypeQueryOptions typeOptions, final IndexQueryOptions indexOptions, final QueryConstraints constraints, final TransientAdapterStore adapterStore, final DataStatisticsStore statsStore, final InternalAdapterStore internalAdapterStore, final IndexStore indexStore, final AdapterIndexMappingStore adapterIndexMappingStore, final JobContext context, final Integer minSplits, final Integer maxSplits) throws IOException, InterruptedException { final Map, RowRangeHistogramValue> statsCache = new HashMap<>(); final List retVal = new ArrayList<>(); final TreeSet splits = new TreeSet<>(); final Map> indexIdToAdaptersMap = new HashMap<>(); for (final Pair> indexAdapterIdPair : BaseDataStoreUtils.getAdaptersWithMinimalSetOfIndices( typeOptions.getTypeNames(), indexOptions.getIndexName(), adapterStore, internalAdapterStore, adapterIndexMappingStore, indexStore, constraints)) { QueryConstraints indexAdapterConstraints; if (constraints instanceof AdapterAndIndexBasedQueryConstraints) { final List adapters = indexAdapterIdPair.getRight(); DataTypeAdapter adapter = null; // in practice this is used for CQL and you can't have multiple // types/adapters if (adapters.size() == 1) { final String typeName = internalAdapterStore.getTypeName(adapters.get(0)); if (typeName != null) { adapter = adapterStore.getAdapter(typeName); } } if (adapter == null) { indexAdapterConstraints = constraints; LOGGER.info("Unable to find type matching an adapter dependent query"); } else { indexAdapterConstraints = ((AdapterAndIndexBasedQueryConstraints) constraints).createQueryConstraints( adapter.asInternalAdapter(adapters.get(0)), indexAdapterIdPair.getLeft(), adapterIndexMappingStore.getMapping( adapters.get(0), indexAdapterIdPair.getLeft().getName())); if (indexAdapterConstraints == null) { continue; } // make sure we pass along the new constraints to the record // reader - for spark on YARN (not localy though), job // configuration is immutable so while picking up the // appropriate constraint from the configuration is more // efficient, also do a check for // AdapterAndIndexBasedQueryConstraints within the Record Reader // itself GeoWaveInputFormat.setQueryConstraints( context.getConfiguration(), indexAdapterConstraints); } } else { indexAdapterConstraints = constraints; } indexIdToAdaptersMap.put( indexAdapterIdPair.getKey().getName(), indexAdapterIdPair.getValue()); IndexMetaData[] indexMetadata = null; if (indexAdapterConstraints != null) { final IndexMetaDataSetValue statValue = InternalStatisticsHelper.getIndexMetadata( indexAdapterIdPair.getLeft(), indexAdapterIdPair.getRight(), new AdapterStoreWrapper(adapterStore, internalAdapterStore), statsStore, commonOptions.getAuthorizations()); if (statValue != null) { indexMetadata = statValue.toArray(); } } populateIntermediateSplits( splits, operations, indexAdapterIdPair.getLeft(), indexAdapterIdPair.getValue(), statsCache, adapterStore, internalAdapterStore, statsStore, maxSplits, indexAdapterConstraints, (double[]) commonOptions.getHints().get( DataStoreUtils.TARGET_RESOLUTION_PER_DIMENSION_FOR_HIERARCHICAL_INDEX), indexMetadata, commonOptions.getAuthorizations()); } // this is an incremental algorithm, it may be better use the target // split count to drive it (ie. to get 3 splits this will split 1 // large // range into two down the middle and then split one of those ranges // down the middle to get 3, rather than splitting one range into // thirds) final List unsplittable = new ArrayList<>(); if (!statsCache.isEmpty() && !splits.isEmpty() && (minSplits != null) && (splits.size() < minSplits)) { // set the ranges to at least min splits do { // remove the highest range, split it into 2 and add both // back, // increasing the size by 1 final IntermediateSplitInfo highestSplit = splits.pollLast(); final IntermediateSplitInfo otherSplit = highestSplit.split(statsCache); // When we can't split the highest split we remove it and // attempt the second highest // working our way up the split set. if (otherSplit == null) { unsplittable.add(highestSplit); } else { splits.add(highestSplit); splits.add(otherSplit); } } while ((splits.size() != 0) && ((splits.size() + unsplittable.size()) < minSplits)); // Add all unsplittable splits back to splits array splits.addAll(unsplittable); if (splits.size() < minSplits) { LOGGER.warn("Truly unable to meet split count. Actual Count: " + splits.size()); } } else if (((maxSplits != null) && (maxSplits > 0)) && (splits.size() > maxSplits)) { // merge splits to fit within max splits do { // this is the naive approach, remove the lowest two ranges // and merge them, decreasing the size by 1 // TODO Ideally merge takes into account locations (as well // as possibly the index as a secondary criteria) to limit // the number of locations/indices final IntermediateSplitInfo lowestSplit = splits.pollFirst(); final IntermediateSplitInfo nextLowestSplit = splits.pollFirst(); lowestSplit.merge(nextLowestSplit); splits.add(lowestSplit); } while (splits.size() > maxSplits); } for (final IntermediateSplitInfo split : splits) { retVal.add( split.toFinalSplit( statsStore, adapterStore, internalAdapterStore, indexIdToAdaptersMap, commonOptions.getAuthorizations())); } return retVal; } protected TreeSet populateIntermediateSplits( final TreeSet splits, final DataStoreOperations operations, final Index index, final List adapterIds, final Map, RowRangeHistogramValue> statsCache, final TransientAdapterStore adapterStore, final InternalAdapterStore internalAdapterStore, final DataStatisticsStore statsStore, final Integer maxSplits, final QueryConstraints constraints, final double[] targetResolutionPerDimensionForHierarchicalIndex, final IndexMetaData[] indexMetadata, final String[] authorizations) throws IOException { // Build list of row ranges from query List ranges = null; if (constraints != null) { final List indexConstraints = constraints.getIndexConstraints(index); if ((maxSplits != null) && (maxSplits > 0)) { ranges = DataStoreUtils.constraintsToQueryRanges( indexConstraints, index, targetResolutionPerDimensionForHierarchicalIndex, maxSplits, indexMetadata).getCompositeQueryRanges(); } else { ranges = DataStoreUtils.constraintsToQueryRanges( indexConstraints, index, targetResolutionPerDimensionForHierarchicalIndex, -1, indexMetadata).getCompositeQueryRanges(); } } final List rangeList = new ArrayList<>(); final PersistentAdapterStore persistentAdapterStore = new AdapterStoreWrapper(adapterStore, internalAdapterStore); if (ranges == null) { final PartitionsValue statistics = InternalStatisticsHelper.getPartitions( index, adapterIds, persistentAdapterStore, statsStore, authorizations); // Try to get ranges from histogram statistics if (statistics != null) { final Set partitionKeys = statistics.getValue(); for (final ByteArray partitionKey : partitionKeys) { final GeoWaveRowRange gwRange = new GeoWaveRowRange(partitionKey.getBytes(), null, null, true, true); final double cardinality = getCardinality( getHistStats( index, adapterIds, persistentAdapterStore, statsStore, statsCache, partitionKey, authorizations), gwRange); rangeList.add( new RangeLocationPair( gwRange, cardinality <= 0 ? 0 : cardinality < 1 ? 1.0 : cardinality)); } } else { // add one all-inclusive range rangeList.add( new RangeLocationPair(new GeoWaveRowRange(null, null, null, true, false), 0.0)); } } else { for (final ByteArrayRange range : ranges) { final GeoWaveRowRange gwRange = SplitsProvider.toRowRange(range, index.getIndexStrategy().getPartitionKeyLength()); final double cardinality = getCardinality( getHistStats( index, adapterIds, persistentAdapterStore, statsStore, statsCache, new ByteArray(gwRange.getPartitionKey()), authorizations), gwRange); rangeList.add( new RangeLocationPair( gwRange, cardinality <= 0 ? 0 : cardinality < 1 ? 1.0 : cardinality)); } } final Map splitInfo = new HashMap<>(); if (!rangeList.isEmpty()) { splitInfo.put(index.getName(), new SplitInfo(index, rangeList)); splits.add(new IntermediateSplitInfo(splitInfo, this)); } return splits; } protected double getCardinality( final RowRangeHistogramValue rangeStats, final GeoWaveRowRange range) { if (range == null) { if (rangeStats != null) { return rangeStats.getTotalCount(); } else { // with an infinite range and no histogram we have no info to // base a cardinality on return 0; } } return rangeStats == null ? 0.0 : rangeStats.cardinality(range.getStartSortKey(), range.getEndSortKey()); } protected RowRangeHistogramValue getHistStats( final Index index, final List adapterIds, final PersistentAdapterStore adapterStore, final DataStatisticsStore statsStore, final Map, RowRangeHistogramValue> statsCache, final ByteArray partitionKey, final String[] authorizations) throws IOException { final Pair key = Pair.of(index, partitionKey); RowRangeHistogramValue rangeStats = statsCache.get(key); if (rangeStats == null) { try { rangeStats = InternalStatisticsHelper.getRangeStats( index, adapterIds, adapterStore, statsStore, partitionKey, authorizations); if (rangeStats != null) { statsCache.put(key, rangeStats); } } catch (final Exception e) { throw new IOException(e); } } return rangeStats; } protected static byte[] getKeyFromBigInteger(final BigInteger value, final int numBytes) { // TODO: does this account for the two extra bytes on BigInteger? final byte[] valueBytes = value.toByteArray(); final byte[] bytes = new byte[numBytes]; final int pos = Math.abs(numBytes - valueBytes.length); System.arraycopy(valueBytes, 0, bytes, pos, Math.min(valueBytes.length, bytes.length)); return bytes; } protected static BigInteger getRange(final GeoWaveRowRange range, final int cardinality) { return getEnd(range, cardinality).subtract(getStart(range, cardinality)); } protected static BigInteger getStart(final GeoWaveRowRange range, final int cardinality) { final byte[] start = range.getStartSortKey(); byte[] startBytes; if (!range.isInfiniteStartSortKey() && (start != null)) { startBytes = extractBytes(start, cardinality); } else { startBytes = extractBytes(new byte[] {}, cardinality); } return new BigInteger(startBytes); } protected static BigInteger getEnd(final GeoWaveRowRange range, final int cardinality) { final byte[] end = range.getEndSortKey(); byte[] endBytes; if (!range.isInfiniteStopSortKey() && (end != null)) { endBytes = extractBytes(end, cardinality); } else { endBytes = extractBytes(new byte[] {}, cardinality, true); } return new BigInteger(endBytes); } protected static double getRangeLength(final GeoWaveRowRange range) { if ((range == null) || (range.getStartSortKey() == null) || (range.getEndSortKey() == null)) { return 1; } final byte[] start = range.getStartSortKey(); final byte[] end = range.getEndSortKey(); final int maxDepth = Math.max(end.length, start.length); final BigInteger startBI = new BigInteger(extractBytes(start, maxDepth)); final BigInteger endBI = new BigInteger(extractBytes(end, maxDepth)); return endBI.subtract(startBI).doubleValue(); } protected static byte[] getMidpoint(final GeoWaveRowRange range) { if ((range.getStartSortKey() == null) || (range.getEndSortKey() == null)) { return null; } final byte[] start = range.getStartSortKey(); final byte[] end = range.getEndSortKey(); if (Arrays.equals(start, end)) { return null; } final int maxDepth = Math.max(end.length, start.length); final BigInteger startBI = new BigInteger(extractBytes(start, maxDepth)); final BigInteger endBI = new BigInteger(extractBytes(end, maxDepth)); final BigInteger rangeBI = endBI.subtract(startBI); if (rangeBI.equals(BigInteger.ZERO) || rangeBI.equals(BigInteger.ONE)) { return end; } final byte[] valueBytes = rangeBI.divide(TWO).add(startBI).toByteArray(); final byte[] bytes = new byte[valueBytes.length - 2]; System.arraycopy(valueBytes, 2, bytes, 0, bytes.length); return bytes; } public static byte[] extractBytes(final byte[] seq, final int numBytes) { return extractBytes(seq, numBytes, false); } protected static byte[] extractBytes( final byte[] seq, final int numBytes, final boolean infiniteEndKey) { final byte[] bytes = new byte[numBytes + 2]; bytes[0] = 1; bytes[1] = 0; for (int i = 0; i < numBytes; i++) { if (i >= seq.length) { if (infiniteEndKey) { // -1 is 0xff bytes[i + 2] = -1; } else { bytes[i + 2] = 0; } } else { bytes[i + 2] = seq[i]; } } return bytes; } public static GeoWaveRowRange toRowRange( final ByteArrayRange range, final int partitionKeyLength) { final byte[] startRow = range.getStart() == null ? null : range.getStart(); final byte[] stopRow = range.getEnd() == null ? null : range.getEnd(); if (partitionKeyLength <= 0) { return new GeoWaveRowRange(null, startRow, stopRow, true, false); } else { byte[] partitionKey; boolean partitionKeyDiffers = false; if ((startRow == null) && (stopRow == null)) { return new GeoWaveRowRange(null, null, null, true, true); } else if (startRow != null) { partitionKey = ArrayUtils.subarray(startRow, 0, partitionKeyLength); if (stopRow != null) { partitionKeyDiffers = !Arrays.equals(partitionKey, ArrayUtils.subarray(stopRow, 0, partitionKeyLength)); } } else { partitionKey = ArrayUtils.subarray(stopRow, 0, partitionKeyLength); } return new GeoWaveRowRange( partitionKey, startRow == null ? null : (partitionKeyLength == startRow.length ? null : ArrayUtils.subarray(startRow, partitionKeyLength, startRow.length)), partitionKeyDiffers ? null : (stopRow == null ? null : (partitionKeyLength == stopRow.length ? null : ArrayUtils.subarray(stopRow, partitionKeyLength, stopRow.length))), true, partitionKeyDiffers); } } public static ByteArrayRange fromRowRange(final GeoWaveRowRange range) { if ((range.getPartitionKey() == null) || (range.getPartitionKey().length == 0)) { final byte[] startKey = (range.getStartSortKey() == null) ? null : range.getStartSortKey(); final byte[] endKey = (range.getEndSortKey() == null) ? null : range.getEndSortKey(); return new ByteArrayRange(startKey, endKey); } else { final byte[] startKey = (range.getStartSortKey() == null) ? range.getPartitionKey() : ArrayUtils.addAll(range.getPartitionKey(), range.getStartSortKey()); final byte[] endKey = (range.getEndSortKey() == null) ? ByteArrayUtils.getNextPrefix(range.getPartitionKey()) : ArrayUtils.addAll(range.getPartitionKey(), range.getEndSortKey()); return new ByteArrayRange(startKey, endKey); } } public static byte[] getInclusiveEndKey(final byte[] endKey) { final byte[] inclusiveEndKey = new byte[endKey.length + 1]; System.arraycopy(endKey, 0, inclusiveEndKey, 0, inclusiveEndKey.length - 1); return inclusiveEndKey; } } ================================================ FILE: core/mapreduce/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi ================================================ org.locationtech.geowave.mapreduce.operations.MapReduceOperationProvider ================================================ FILE: core/mapreduce/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.spi.ClassLoaderTransformerSpi ================================================ org.locationtech.geowave.mapreduce.VFSClassLoaderTransformer ================================================ FILE: core/pom.xml ================================================ 4.0.0 geowave-parent org.locationtech.geowave ../ 2.0.2-SNAPSHOT geowave-core-parent GeoWave Core Parent POM The set of base functionality provided for all configurations of GeoWave pom index store geotime cli ingest mapreduce ================================================ FILE: core/store/.gitignore ================================================ /bin/ ================================================ FILE: core/store/pom.xml ================================================ 4.0.0 geowave-core-parent org.locationtech.geowave ../ 2.0.2-SNAPSHOT geowave-core-store GeoWave Store org.apache.commons commons-pool2 org.locationtech.geowave geowave-core-cli ${project.version} org.locationtech.geowave geowave-core-index ${project.version} org.apache.commons commons-vfs2 org.apache.commons commons-text org.apache.commons commons-csv 1.1 org.antlr antlr4-runtime 4.7.2 org.hdrhistogram HdrHistogram 2.1.7 com.tdunning t-digest 3.2 com.fasterxml.jackson.core jackson-databind org.apache.maven.plugins maven-jar-plugin 3.2.0 test-jar org.antlr antlr4-maven-plugin 4.7.2 antlr antlr4 ================================================ FILE: core/store/src/main/antlr4/org/locationtech/geowave/core/store/query/gwql/parse/GWQL.g4 ================================================ grammar GWQL; options { language = Java; } @parser::header { import java.util.List; import com.google.common.collect.Lists; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.query.gwql.ErrorListener; import org.locationtech.geowave.core.store.query.gwql.GWQLParseHelper; import org.locationtech.geowave.core.store.query.gwql.GWQLParseException; import org.locationtech.geowave.core.store.query.gwql.AggregationSelector; import org.locationtech.geowave.core.store.query.gwql.ColumnSelector; import org.locationtech.geowave.core.store.query.gwql.Selector; import org.locationtech.geowave.core.store.query.gwql.statement.Statement; import org.locationtech.geowave.core.store.query.gwql.statement.SelectStatement; import org.locationtech.geowave.core.store.query.gwql.statement.DeleteStatement; import org.locationtech.geowave.core.store.query.filter.expression.Filter; import org.locationtech.geowave.core.store.query.filter.expression.Predicate; import org.locationtech.geowave.core.store.query.filter.expression.Expression; import org.locationtech.geowave.core.store.query.filter.expression.Literal; import org.locationtech.geowave.core.store.query.filter.expression.FieldValue; import org.locationtech.geowave.core.store.query.filter.expression.BooleanLiteral; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericLiteral; import org.locationtech.geowave.core.store.query.filter.expression.text.TextLiteral; } @parser::members { private DataStore dataStore = null; private DataTypeAdapter adapter = null; public static Statement parseStatement(final DataStore dataStore, final String statement) { final GWQLLexer lexer = new GWQLLexer(CharStreams.fromString(statement)); final TokenStream tokenStream = new CommonTokenStream(lexer); final GWQLParser parser = new GWQLParser(tokenStream); parser.dataStore = dataStore; parser.removeErrorListeners(); parser.addErrorListener(new ErrorListener()); return parser.query().stmt; } } query returns [ Statement stmt ] : statement (SEMICOLON)* EOF { $stmt = $statement.stmt; } | error { $stmt=null; } ; statement returns [ Statement stmt ] : selectStatement { $stmt = $selectStatement.stmt; } | deleteStatement { $stmt = $deleteStatement.stmt; } ; deleteStatement returns [ DeleteStatement stmt ] locals [ Filter f = null ] : K_DELETE K_FROM adapterName ( K_WHERE filter { $f = $filter.value; })? { $stmt = new DeleteStatement(dataStore, adapter, $f); } ; selectStatement returns [ SelectStatement stmt ] locals [ Filter f = null, Integer limit = null, List selectorList = Lists.newArrayList() ] : K_SELECT selectors[$selectorList] K_FROM adapterName ( K_WHERE filter { $f = $filter.value; })? ( K_LIMIT INTEGER { $limit = $INTEGER.int; })? { $stmt = new SelectStatement(dataStore, adapter, $selectorList, $f, $limit); } ; error : UNEXPECTED_CHAR { throw new GWQLParseException("UNEXPECTED_CHAR=" + $UNEXPECTED_CHAR.text); } ; selectors [List selectorList] : agg1=aggregate { $selectorList.add($agg1.sel); } (COMMA aggN=aggregate { $selectorList.add($aggN.sel); } )* | sel1=selector { $selectorList.add($sel1.sel); } (COMMA selN=selector { $selectorList.add($selN.sel); } )* | '*' ; selector returns [ ColumnSelector sel ] locals [ String alias = null ] : columnName ( K_AS columnAlias { $alias = $columnAlias.text; } )? { $sel = new ColumnSelector($columnName.text, $alias); } ; aggregate returns [ AggregationSelector sel ] locals [ String alias = null ] : functionName '(' functionArg ')' ( K_AS columnAlias { $alias = $columnAlias.text; } )? { $sel = new AggregationSelector($functionName.text, new String[] { $functionArg.text }, $alias); } ; functionArg : '*' | columnName ; adapterName : tableName { adapter = dataStore.getType($tableName.text); if (adapter == null) { throw new GWQLParseException("No type named " + $tableName.text); } } ; columnName : IDENTIFIER ; columnAlias : IDENTIFIER ; tableName : IDENTIFIER ; functionName : IDENTIFIER ; filter returns [ Filter value ] : predicate { $value = $predicate.value; } #simplePredicateFilter | f1=filter K_AND f2=filter { $value = $f1.value.and($f2.value); } #andFilter | f1=filter K_OR f2=filter { $value = $f1.value.or($f2.value); } #orFilter | K_NOT f=filter { $value = Filter.not($f.value); } #notFilter | LPAREN f=filter RPAREN { $value = $f.value; } #parenFilter | LSQUARE f=filter RSQUARE { $value = $f.value; } #sqBracketFilter | K_INCLUDE { $value = Filter.include(); } #includeFilter | K_EXCLUDE { $value = Filter.exclude(); } #excludeFilter ; predicate returns [ Predicate value ] : f=predicateFunction { $value = $f.value; } | e1=expression EQUALS e2=expression { $value = GWQLParseHelper.getEqualsPredicate($e1.value, $e2.value); } | e1=expression NOT_EQUALS e2=expression { $value = GWQLParseHelper.getNotEqualsPredicate($e1.value, $e2.value); } | e1=expression LESS_THAN e2=expression { $value = GWQLParseHelper.getLessThanPredicate($e1.value, $e2.value); } | e1=expression LESS_THAN_OR_EQUAL e2=expression { $value = GWQLParseHelper.getLessThanOrEqualsPredicate($e1.value, $e2.value); } | e1=expression GREATER_THAN e2=expression { $value = GWQLParseHelper.getGreaterThanPredicate($e1.value, $e2.value); } | e1=expression GREATER_THAN_OR_EQUAL e2=expression { $value = GWQLParseHelper.getGreaterThanOrEqualsPredicate($e1.value, $e2.value); } | v=expression K_BETWEEN l=expression K_AND u=expression { $value = GWQLParseHelper.getBetweenPredicate($v.value, $l.value, $u.value); } | e=expression K_IS K_NULL { $value = $e.value.isNull(); } | e=expression K_IS K_NOT K_NULL { $value = $e.value.isNotNull(); } | e1=expression o=predicateOperator e2=expression { $value = GWQLParseHelper.getOperatorPredicate($o.text, $e1.value, $e2.value); } ; expression returns [ Expression value ] : e1=expression STAR e2=expression { $value = GWQLParseHelper.getMultiplyExpression($e1.value, $e2.value); } | e1=expression DIVIDE e2=expression { $value = GWQLParseHelper.getDivideExpression($e1.value, $e2.value); } | e1=expression PLUS e2=expression { $value = GWQLParseHelper.getAddExpression($e1.value, $e2.value); } | e1=expression MINUS e2=expression { $value = GWQLParseHelper.getSubtractExpression($e1.value, $e2.value); } | f=expressionFunction { $value = $f.value; } | LPAREN e=expression RPAREN { $value = $e.value; } | LSQURE e=expression RSQUARE { $value = $e.value; } | e1=expression CAST IDENTIFIER { $value = GWQLParseHelper.castExpression($IDENTIFIER.text, $e1.value); } | l=literal { $value = $l.value; } | c=columnName { $value = GWQLParseHelper.getFieldValue(adapter, $c.text); } ; predicateFunction returns [ Predicate value ] locals [ List> expressions = Lists.newArrayList() ] : functionName LPAREN expressionList[$expressions] RPAREN { $value = GWQLParseHelper.getPredicateFunction($functionName.text, $expressions); } ; expressionFunction returns [ Expression value ] locals [ List> expressions = Lists.newArrayList() ] : functionName LPAREN expressionList[$expressions] RPAREN { $value = GWQLParseHelper.getExpressionFunction($functionName.text, $expressions); } ; predicateOperator : IDENTIFIER ; expressionList [List> expressions] : expr1=expression { $expressions.add($expr1.value); } (COMMA exprN=expression { $expressions.add($exprN.value); } )* ; literal returns [ Literal value ] : number { $value = NumericLiteral.of(Double.parseDouble($number.text)); } | textLiteral { $value = $textLiteral.value; } | BOOLEAN_LITERAL { $value = BooleanLiteral.of($BOOLEAN_LITERAL.text); } ; number : NUMERIC | INTEGER ; textLiteral returns [ TextLiteral value ] : SQUOTE_LITERAL { $value = GWQLParseHelper.evaluateTextLiteral($SQUOTE_LITERAL.text); } ; SQUOTE_LITERAL: '\'' ('\\'. | '\'\'' | ~('\'' | '\\'))* '\''; ESCAPED_SQUOTE: BACKSLASH SQUOTE; NEWLINE: BACKSLASH 'n'; RETURN: BACKSLASH 'r'; TAB: BACKSLASH 't'; BACKSPACE: BACKSLASH 'b'; FORM_FEED: BACKSLASH 'f'; ESCAPED_BACKSLASH: BACKSLASH BACKSLASH; NOT_EQUALS: '<>'; LESS_THAN_OR_EQUAL: '<='; GREATER_THAN_OR_EQUAL: '>='; LESS_THAN: '<'; GREATER_THAN: '>'; EQUALS: '='; LPAREN: '('; RPAREN: ')'; LCURL: '{'; RCURL: '}'; LSQUARE: '['; RSQUARE: ']'; COMMA: ','; STAR: '*'; DIVIDE: '/'; PLUS: '+'; MINUS: '-'; CAST: '::'; DOT: '.'; SQUOTE: '\''; DQUOTE: '"'; BACKSLASH: '\\'; SEMICOLON: ';'; K_AND : A N D; K_AS : A S; K_DELETE : D E L E T E; K_FROM : F R O M; K_LIMIT : L I M I T; K_OR : O R; K_SELECT : S E L E C T; K_WHERE : W H E R E; K_NOT : N O T; K_IS : I S; K_NULL : N U L L; K_INCLUDE : I N C L U D E; K_EXCLUDE: E X C L U D E; K_BETWEEN: B E T W E E N; BOOLEAN_LITERAL : T R U E | F A L S E ; IDENTIFIER : ESCAPED_IDENTIFIER { String txt = getText(); // strip the leading and trailing characters that wrap the identifier when using unconventional naming txt = txt.substring(1, txt.length() - 1); setText(txt); } | [a-zA-Z_] [a-zA-Z0-9_]* // TODO check: needs more chars in set ; ESCAPED_IDENTIFIER : '"' (~'"' | '""')* '"' | '`' (~'`' | '``')* '`' | '[' ~']'* ']' ; INTEGER : MINUS? DIGIT+ (E DIGIT+)? ; NUMERIC : MINUS? DIGIT+ DOT DIGIT+ (E (MINUS)* DIGIT+)? ; WHITESPACE: [ \t\n\r\f] -> channel(HIDDEN); UNEXPECTED_CHAR : . ; fragment DIGIT : [0-9]; fragment A : [aA]; fragment B : [bB]; fragment C : [cC]; fragment D : [dD]; fragment E : [eE]; fragment F : [fF]; fragment G : [gG]; fragment H : [hH]; fragment I : [iI]; fragment J : [jJ]; fragment K : [kK]; fragment L : [lL]; fragment M : [mM]; fragment N : [nN]; fragment O : [oO]; fragment P : [pP]; fragment Q : [qQ]; fragment R : [rR]; fragment S : [sS]; fragment T : [tT]; fragment U : [uU]; fragment V : [vV]; fragment W : [wW]; fragment X : [xX]; fragment Y : [yY]; fragment Z : [zZ]; ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/AdapterMapping.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.Persistable; public class AdapterMapping implements Persistable { private ByteArray adapterId; private short internalAdapterId; public AdapterMapping() {} public AdapterMapping(final ByteArray adapterId, final short internalAdapterId) { super(); this.adapterId = adapterId; this.internalAdapterId = internalAdapterId; } public ByteArray getAdapterId() { return adapterId; } public short getInteranalAdapterId() { return internalAdapterId; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((adapterId == null) ? 0 : adapterId.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final AdapterMapping other = (AdapterMapping) obj; if (adapterId == null) { if (other.adapterId != null) { return false; } } else if (!adapterId.equals(other.adapterId)) { return false; } if (internalAdapterId != other.internalAdapterId) { return false; } return true; } @Override public byte[] toBinary() { final byte[] adapterIdBytes = adapterId.getBytes(); final ByteBuffer buf = ByteBuffer.allocate( adapterIdBytes.length + VarintUtils.unsignedShortByteLength(internalAdapterId)); buf.put(adapterIdBytes); VarintUtils.writeUnsignedShort(internalAdapterId, buf); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); internalAdapterId = VarintUtils.readUnsignedShort(buf); final byte[] adapterIdBytes = new byte[buf.remaining()]; buf.get(adapterIdBytes); adapterId = new ByteArray(adapterIdBytes); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/AdapterToIndexMapping.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store; import java.nio.ByteBuffer; import java.util.List; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.IndexFieldMapper; import org.locationtech.geowave.core.store.index.IndexStore; /** Meta-data for retaining Adapter to Index association */ public class AdapterToIndexMapping implements Persistable { private short adapterId; private String indexName; private List> fieldMappers; public AdapterToIndexMapping() {} public AdapterToIndexMapping( final short adapterId, final Index index, final List> fieldMappers) { super(); this.adapterId = adapterId; indexName = index.getName(); this.fieldMappers = fieldMappers; } public AdapterToIndexMapping( final short adapterId, final String indexName, final List> fieldMappers) { super(); this.adapterId = adapterId; this.indexName = indexName; this.fieldMappers = fieldMappers; } public short getAdapterId() { return adapterId; } public String getIndexName() { return indexName; } public List> getIndexFieldMappers() { return fieldMappers; } public IndexFieldMapper getMapperForIndexField(final String indexFieldName) { return fieldMappers.stream().filter( mapper -> mapper.indexFieldName().equals(indexFieldName)).findFirst().orElse(null); } public Index getIndex(final IndexStore indexStore) { return indexStore.getIndex(indexName); } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((adapterId == 0) ? 0 : Short.hashCode(adapterId)); result = (prime * result) + indexName.hashCode(); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final AdapterToIndexMapping other = (AdapterToIndexMapping) obj; if (adapterId == 0) { if (other.adapterId != 0) { return false; } } else if (adapterId != other.adapterId) { return false; } if (!indexName.equals(other.indexName)) { return false; } return true; } @Override public byte[] toBinary() { final byte[] indexIdBytes = StringUtils.stringToBinary(indexName); final byte[] mapperBytes = PersistenceUtils.toBinary(fieldMappers); final ByteBuffer buf = ByteBuffer.allocate( VarintUtils.unsignedShortByteLength(adapterId) + VarintUtils.unsignedShortByteLength((short) indexIdBytes.length) + indexIdBytes.length + mapperBytes.length); VarintUtils.writeUnsignedShort(adapterId, buf); VarintUtils.writeUnsignedShort((short) indexIdBytes.length, buf); buf.put(indexIdBytes); buf.put(mapperBytes); return buf.array(); } @SuppressWarnings({"unchecked", "rawtypes"}) @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); adapterId = VarintUtils.readUnsignedShort(buf); final byte[] indexNameBytes = new byte[VarintUtils.readUnsignedShort(buf)]; buf.get(indexNameBytes); indexName = StringUtils.stringFromBinary(indexNameBytes); final byte[] mapperBytes = new byte[buf.remaining()]; buf.get(mapperBytes); fieldMappers = (List) PersistenceUtils.fromBinaryAsList(mapperBytes); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/BaseDataStoreFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.base.BaseDataStore; import org.locationtech.geowave.core.store.metadata.AdapterIndexMappingStoreImpl; import org.locationtech.geowave.core.store.metadata.AdapterStoreImpl; import org.locationtech.geowave.core.store.metadata.DataStatisticsStoreImpl; import org.locationtech.geowave.core.store.metadata.IndexStoreImpl; import org.locationtech.geowave.core.store.metadata.InternalAdapterStoreImpl; import org.locationtech.geowave.core.store.metadata.PropertyStoreImpl; import org.locationtech.geowave.core.store.operations.DataStoreOperations; public class BaseDataStoreFactory extends BaseStoreFactory { public BaseDataStoreFactory( final String typeName, final String description, final StoreFactoryHelper helper) { super(typeName, description, helper); } @Override public DataStore createStore(final StoreFactoryOptions factoryOptions) { final DataStoreOperations operations = helper.createOperations(factoryOptions); final DataStoreOptions options = factoryOptions.getStoreOptions(); return new BaseDataStore( new IndexStoreImpl(operations, options), new AdapterStoreImpl(operations, options), new DataStatisticsStoreImpl(operations, options), new AdapterIndexMappingStoreImpl(operations, options), operations, options, new InternalAdapterStoreImpl(operations), new PropertyStoreImpl(operations, options)); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/BaseDataStoreFamily.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.metadata.AdapterIndexMappingStoreFactory; import org.locationtech.geowave.core.store.metadata.AdapterStoreFactory; import org.locationtech.geowave.core.store.metadata.DataStatisticsStoreFactory; import org.locationtech.geowave.core.store.metadata.IndexStoreFactory; import org.locationtech.geowave.core.store.metadata.InternalAdapterStoreFactory; import org.locationtech.geowave.core.store.metadata.PropertyStoreFactory; import org.locationtech.geowave.core.store.operations.DataStoreOperations; import org.locationtech.geowave.core.store.operations.DataStoreOperationsFactory; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; public class BaseDataStoreFamily implements StoreFactoryFamilySpi { private final String typeName; private final String description; private final StoreFactoryHelper helper; public BaseDataStoreFamily( final String typeName, final String description, final StoreFactoryHelper helper) { super(); this.typeName = typeName; this.description = description; this.helper = helper; } @Override public String getType() { return typeName; } @Override public String getDescription() { return description; } @Override public GenericStoreFactory getDataStoreFactory() { return new BaseDataStoreFactory(typeName, description, helper); } @Override public GenericStoreFactory getDataStatisticsStoreFactory() { return new DataStatisticsStoreFactory(typeName, description, helper); } @Override public GenericStoreFactory getIndexStoreFactory() { return new IndexStoreFactory(typeName, description, helper); } @Override public GenericStoreFactory getAdapterStoreFactory() { return new AdapterStoreFactory(typeName, description, helper); } @Override public GenericStoreFactory getAdapterIndexMappingStoreFactory() { return new AdapterIndexMappingStoreFactory(typeName, description, helper); } @Override public GenericStoreFactory getDataStoreOperationsFactory() { return new DataStoreOperationsFactory(typeName, description, helper); } @Override public GenericStoreFactory getInternalAdapterStoreFactory() { return new InternalAdapterStoreFactory(typeName, description, helper); } @Override public GenericStoreFactory getPropertyStoreFactory() { return new PropertyStoreFactory(typeName, description, helper); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/BaseDataStoreOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store; import com.beust.jcommander.Parameter; public class BaseDataStoreOptions implements DataStoreOptions { @Parameter(names = "--persistDataStatistics", hidden = true, arity = 1) protected boolean persistDataStatistics = true; @Parameter(names = "--enableBlockCache", hidden = true, arity = 1) protected boolean enableBlockCache = true; @Parameter(names = "--enableServerSideLibrary", arity = 1) protected boolean enableServerSideLibrary = true; @Parameter(names = "--enableSecondaryIndexing") protected boolean enableSecondaryIndexing = false; @Parameter(names = "--enableVisibility", arity = 1) protected Boolean configuredEnableVisibility = null; @Parameter(names = "--dataIndexBatchSize") protected int configuredDataIndexBatchSize = Integer.MIN_VALUE; @Parameter(names = "--maxRangeDecomposition", arity = 1) protected int configuredMaxRangeDecomposition = Integer.MIN_VALUE; @Parameter(names = "--aggregationMaxRangeDecomposition", arity = 1) protected int configuredAggregationMaxRangeDecomposition = Integer.MIN_VALUE; @Override public boolean isPersistDataStatistics() { return persistDataStatistics; } public void setPersistDataStatistics(final boolean persistDataStatistics) { this.persistDataStatistics = persistDataStatistics; } @Override public boolean isSecondaryIndexing() { return enableSecondaryIndexing; } @Override public void setSecondaryIndexing(final boolean enableSecondaryIndexing) { this.enableSecondaryIndexing = enableSecondaryIndexing; } @Override public boolean isEnableBlockCache() { return enableBlockCache; } public void setEnableBlockCache(final boolean enableBlockCache) { this.enableBlockCache = enableBlockCache; } @Override public boolean isServerSideLibraryEnabled() { return enableServerSideLibrary && !enableSecondaryIndexing; } public void setServerSideLibraryEnabled(final boolean enableServerSideLibrary) { this.enableServerSideLibrary = enableServerSideLibrary; } @Override public int getMaxRangeDecomposition() { return configuredMaxRangeDecomposition == Integer.MIN_VALUE ? defaultMaxRangeDecomposition() : configuredMaxRangeDecomposition; } protected int defaultMaxRangeDecomposition() { return 2000; } protected boolean defaultEnableVisibility() { return true; } public void setMaxRangeDecomposition(final int maxRangeDecomposition) { configuredMaxRangeDecomposition = maxRangeDecomposition; } @Override public int getAggregationMaxRangeDecomposition() { return configuredAggregationMaxRangeDecomposition == Integer.MIN_VALUE ? defaultAggregationMaxRangeDecomposition() : configuredAggregationMaxRangeDecomposition; } @Override public int getDataIndexBatchSize() { return isSecondaryIndexing() ? (configuredDataIndexBatchSize == Integer.MIN_VALUE ? defaultDataIndexBatchSize() : configuredDataIndexBatchSize) : Integer.MIN_VALUE; } protected int defaultDataIndexBatchSize() { return 2000; } protected int defaultAggregationMaxRangeDecomposition() { return 10; } public void setAggregationMaxRangeDecomposition(final int aggregationMaxRangeDecomposition) { configuredAggregationMaxRangeDecomposition = aggregationMaxRangeDecomposition; } @Override public boolean isVisibilityEnabled() { return configuredEnableVisibility == null ? defaultEnableVisibility() : configuredEnableVisibility; } public void setEnableVisibility(final boolean configuredEnableVisibility) { this.configuredEnableVisibility = configuredEnableVisibility; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/BaseStoreFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store; public abstract class BaseStoreFactory implements GenericStoreFactory { private final String typeName; private final String description; protected StoreFactoryHelper helper; public BaseStoreFactory( final String typeName, final String description, final StoreFactoryHelper helper) { super(); this.typeName = typeName; this.description = description; this.helper = helper; } @Override public String getType() { return typeName; } @Override public String getDescription() { return description; } @Override public StoreFactoryOptions createOptionsInstance() { return helper.createOptionsInstance(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/CloseableIterator.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store; import java.io.Closeable; import java.util.Iterator; import java.util.NoSuchElementException; /** * This interface wraps both the Iterator interface and the Closeable interface. It is best to close * an iterator of this interface when it is no longer needed. * * @param A generic for the type of data for iteration */ public interface CloseableIterator extends Iterator, Closeable { @Override void close(); public static class Wrapper implements CloseableIterator { private final Iterator iterator; public Wrapper(final Iterator iterator) { this.iterator = iterator; } @Override public boolean hasNext() { return iterator.hasNext(); } @Override public E next() { return iterator.next(); } @Override public void remove() { iterator.remove(); } @Override public void close() { // just a pass through on close() } } public static class Empty implements CloseableIterator { @Override public boolean hasNext() { return false; } @Override public E next() throws NoSuchElementException { throw new NoSuchElementException(); } @Override public void remove() {} @Override public void close() {} } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/CloseableIteratorWrapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store; import java.io.Closeable; import java.io.IOException; import java.util.Iterator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This is a simple wrapper around an iterator and a closeable to ensure that the caller can close * the closeable when it is finished being used by the iterator. * * @param The type to iterate on */ public class CloseableIteratorWrapper implements CloseableIterator { private static final Logger LOGGER = LoggerFactory.getLogger(CloseableIteratorWrapper.class); private final Closeable closeable; private final Iterator iterator; private Integer limit = null; private int count = 0; public CloseableIteratorWrapper(final Closeable closable, final Iterator iterator) { this.closeable = closable; this.iterator = iterator; } public CloseableIteratorWrapper( final Closeable closable, final Iterator iterator, final Integer limit) { this.closeable = closable; this.iterator = iterator; this.limit = limit; } @Override public boolean hasNext() { if ((limit != null) && (limit > 0) && (count > limit)) { return false; } return iterator.hasNext(); } @Override public E next() { count++; return iterator.next(); } @Override public void remove() { iterator.remove(); } @Override public void close() { try { closeable.close(); } catch (final IOException e) { LOGGER.warn("Unable to close iterator", e); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/DataStoreOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store; public interface DataStoreOptions { boolean isPersistDataStatistics(); boolean isEnableBlockCache(); boolean isServerSideLibraryEnabled(); default boolean requiresClientSideMerging() { return !isServerSideLibraryEnabled(); } boolean isVisibilityEnabled(); int getDataIndexBatchSize(); int getMaxRangeDecomposition(); int getAggregationMaxRangeDecomposition(); boolean isSecondaryIndexing(); void setSecondaryIndexing(boolean se); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/DataStoreProperty.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldUtils; import org.locationtech.geowave.core.store.data.field.FieldWriter; /** * A property for storing arbitrary information about a data store. Properties are unique by key, * and the value can be any class that is supported by a {@link FieldReader} and {@link FieldWriter} * implementation. */ public class DataStoreProperty implements Persistable { private String key; private Object value; public DataStoreProperty() {} public DataStoreProperty(final String key, final Object value) { this.key = key; this.value = value; } public String getKey() { return key; } public Object getValue() { return value; } @SuppressWarnings("unchecked") @Override public byte[] toBinary() { final byte[] keyBytes = StringUtils.stringToBinary(key); final byte[] classBytes = StringUtils.stringToBinary(value.getClass().getName()); final byte[] valueBytes; if (value instanceof Persistable) { valueBytes = PersistenceUtils.toBinary((Persistable) value); } else { final FieldWriter writer = (FieldWriter) FieldUtils.getDefaultWriterForClass(value.getClass()); valueBytes = writer.writeField(value); } final ByteBuffer buffer = ByteBuffer.allocate( VarintUtils.unsignedIntByteLength(keyBytes.length) + VarintUtils.unsignedIntByteLength(classBytes.length) + VarintUtils.unsignedIntByteLength(valueBytes.length) + keyBytes.length + classBytes.length + valueBytes.length); VarintUtils.writeUnsignedInt(keyBytes.length, buffer); buffer.put(keyBytes); VarintUtils.writeUnsignedInt(classBytes.length, buffer); buffer.put(classBytes); VarintUtils.writeUnsignedInt(valueBytes.length, buffer); buffer.put(valueBytes); return buffer.array(); } @SuppressWarnings("unchecked") @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); final byte[] keyBytes = new byte[VarintUtils.readUnsignedInt(buffer)]; buffer.get(keyBytes); final byte[] classBytes = new byte[VarintUtils.readUnsignedInt(buffer)]; buffer.get(classBytes); final byte[] valueBytes = new byte[VarintUtils.readUnsignedInt(buffer)]; buffer.get(valueBytes); key = StringUtils.stringFromBinary(keyBytes); final String className = StringUtils.stringFromBinary(classBytes); try { final Class valueClass = Class.forName(className); if (Persistable.class.isAssignableFrom(valueClass)) { value = PersistenceUtils.fromBinary(valueBytes); } else { final FieldReader reader = (FieldReader) FieldUtils.getDefaultReaderForClass(valueClass); value = reader.readField(valueBytes); } } catch (final ClassNotFoundException e) { throw new RuntimeException("Unable to find class for property: " + className); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/EntryVisibilityHandler.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store; import org.locationtech.geowave.core.store.entities.GeoWaveRow; public interface EntryVisibilityHandler { public byte[] getVisibility(T entry, GeoWaveRow... kvs); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/GenericFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store; public interface GenericFactory { public String getType(); public String getDescription(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/GenericStoreFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store; public interface GenericStoreFactory extends GenericFactory { /** * Create the store, w/the options instance that was returned and populated * w/createOptionsInstance(). */ T createStore(StoreFactoryOptions options); /** * An object used to configure the specific store. This really exists so that the command line * options for JCommander can be filled in without knowing which options class we specifically * have to create. */ StoreFactoryOptions createOptionsInstance(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/GeoWaveStoreFinder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.locationtech.geowave.core.index.SPIServiceRegistry; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.config.ConfigOption; import org.locationtech.geowave.core.store.config.ConfigUtils; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class GeoWaveStoreFinder { private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveStoreFinder.class); public static String STORE_HINT_KEY = "store_name"; public static final ConfigOption STORE_HINT_OPTION = new ConfigOption( STORE_HINT_KEY, "Set the GeoWave store, by default it will try to discover based on matching config options. " + getStoreNames(), true, String.class); private static Map registeredStoreFactoryFamilies = null; public static DataStatisticsStore createDataStatisticsStore( final Map configOptions) { final StoreFactoryFamilySpi factory = findStoreFamily(configOptions); if (factory == null) { return null; } return factory.getDataStatisticsStoreFactory().createStore( ConfigUtils.populateOptionsFromList( factory.getDataStatisticsStoreFactory().createOptionsInstance(), configOptions)); } public static DataStore createDataStore(final Map configOptions) { final StoreFactoryFamilySpi factory = findStoreFamily(configOptions); if (factory == null) { return null; } return factory.getDataStoreFactory().createStore( ConfigUtils.populateOptionsFromList( factory.getDataStoreFactory().createOptionsInstance(), configOptions)); } public static PersistentAdapterStore createAdapterStore(final Map configOptions) { final StoreFactoryFamilySpi factory = findStoreFamily(configOptions); if (factory == null) { return null; } return factory.getAdapterStoreFactory().createStore( ConfigUtils.populateOptionsFromList( factory.getAdapterStoreFactory().createOptionsInstance(), configOptions)); } public static InternalAdapterStore createInternalAdapterStore( final Map configOptions) { final StoreFactoryFamilySpi factory = findStoreFamily(configOptions); if (factory == null) { return null; } return factory.getInternalAdapterStoreFactory().createStore( ConfigUtils.populateOptionsFromList( // factory.getAdapterStoreFactory().createOptionsInstance(), factory.getInternalAdapterStoreFactory().createOptionsInstance(), configOptions)); } public static AdapterIndexMappingStore createAdapterIndexMappingStore( final Map configOptions) { final StoreFactoryFamilySpi factory = findStoreFamily(configOptions); if (factory == null) { return null; } return factory.getAdapterIndexMappingStoreFactory().createStore( ConfigUtils.populateOptionsFromList( factory.getAdapterIndexMappingStoreFactory().createOptionsInstance(), configOptions)); } public static IndexStore createIndexStore(final Map configOptions) { final StoreFactoryFamilySpi factory = findStoreFamily(configOptions); if (factory == null) { return null; } return factory.getIndexStoreFactory().createStore( ConfigUtils.populateOptionsFromList( factory.getIndexStoreFactory().createOptionsInstance(), configOptions)); } private static List getMissingRequiredOptions( final StoreFactoryFamilySpi factory, final Map configOptions) { final ConfigOption[] options = ConfigUtils.createConfigOptionsFromJCommander( factory.getDataStoreFactory().createOptionsInstance(), false); final List missing = new ArrayList<>(); for (final ConfigOption option : options) { if (!option.isOptional() && (!configOptions.containsKey(option.getName()) || (configOptions.get(option.getName()).equals("null")))) { missing.add(option.getName()); } } return missing; } private static List getMatchingRequiredOptions( final StoreFactoryFamilySpi factory, final Map configOptions) { final ConfigOption[] options = ConfigUtils.createConfigOptionsFromJCommander( factory.getDataStoreFactory().createOptionsInstance(), false); final List matching = new ArrayList<>(); for (final ConfigOption option : options) { if (!option.isOptional() && (configOptions.containsKey(option.getName()) && (!configOptions.get(option.getName()).equals("null")))) { matching.add(option.getName()); } } return matching; } public static StoreFactoryFamilySpi findStoreFamily(final Map configOptions) { final Object storeHint = configOptions.get(STORE_HINT_KEY); final Map internalStoreFamilies = getRegisteredStoreFactoryFamilies(); if (storeHint != null) { final StoreFactoryFamilySpi factory = internalStoreFamilies.get(storeHint.toString()); if (factory != null) { final List missingOptions = getMissingRequiredOptions(factory, configOptions); if (missingOptions.isEmpty()) { return factory; } // HP Fortify "Improper Output Neutralization" false positive // What Fortify considers "user input" comes only // from users with OS-level access anyway LOGGER.error( "Unable to find config options for store '" + storeHint.toString() + "'." + ConfigUtils.getOptions(missingOptions)); return null; } else { // HP Fortify "Improper Output Neutralization" false positive // What Fortify considers "user input" comes only // from users with OS-level access anyway LOGGER.error("Unable to find store '" + storeHint.toString() + "'"); return null; } } // if the hint is not provided, the factory finder will attempt to find // a factory that has an exact match meaning that all required params // are provided and all provided params are defined as at least optional // params for (final Entry entry : internalStoreFamilies.entrySet()) { if (exactMatch(entry.getValue(), configOptions)) { return entry.getValue(); } } // if it cannot find and exact match it will attempt to does not have // any missing options; if multiple/ factories will match, the one with // the most required matching options will be used with // the assumption that it has the most specificity and closest match of // the arguments; if there are multiple factories that match and have // the same number of required matching options, arbitrarily the last // one will be chosen // and a warning message will be logged int matchingFactoryRequiredOptionsCount = -1; StoreFactoryFamilySpi matchingFactory = null; boolean matchingFactoriesHaveSameRequiredOptionsCount = false; LOGGER.debug("Finding Factories (size): " + internalStoreFamilies.size()); for (final Entry entry : internalStoreFamilies.entrySet()) { final StoreFactoryFamilySpi factory = entry.getValue(); final List missingOptions = getMissingRequiredOptions(factory, configOptions); final List matchingOptions = getMatchingRequiredOptions(factory, configOptions); if (missingOptions.isEmpty() && ((matchingFactory == null) || (matchingOptions.size() >= matchingFactoryRequiredOptionsCount))) { matchingFactory = factory; matchingFactoriesHaveSameRequiredOptionsCount = (matchingOptions.size() == matchingFactoryRequiredOptionsCount); matchingFactoryRequiredOptionsCount = matchingOptions.size(); } } if (matchingFactory == null) { LOGGER.error("Unable to find any valid store"); } else if (matchingFactoriesHaveSameRequiredOptionsCount) { LOGGER.warn("Multiple valid stores found with equal specificity for store"); LOGGER.warn(matchingFactory.getType() + " will be automatically chosen"); } return matchingFactory; } private static String getStoreNames() { final Set uniqueNames = new HashSet<>(); uniqueNames.addAll(getRegisteredStoreFactoryFamilies().keySet()); return ConfigUtils.getOptions(uniqueNames).toString(); } public static boolean exactMatch( final StoreFactoryFamilySpi geowaveStoreFactoryFamily, final Map filteredParams) { return exactMatch(geowaveStoreFactoryFamily, filteredParams, null); } public static boolean exactMatch( final StoreFactoryFamilySpi geowaveStoreFactoryFamily, final Map filteredParams, final Map originalParams) { final ConfigOption[] requiredOptions = GeoWaveStoreFinder.getRequiredOptions(geowaveStoreFactoryFamily); // first ensure all required options are fulfilled for (final ConfigOption requiredOption : requiredOptions) { if (!filteredParams.containsKey(requiredOption.getName())) { return false; } } // next ensure that all params match an available option final Set availableOptions = new HashSet<>(); for (final ConfigOption option : GeoWaveStoreFinder.getAllOptions( geowaveStoreFactoryFamily, true)) { availableOptions.add(option.getName()); } for (final String optionName : filteredParams.keySet()) { if (!availableOptions.contains(optionName) && !STORE_HINT_KEY.equals(optionName)) { return false; } } // lastly try to create the index store (pick a minimally required // store) try { final StoreFactoryOptions options = ConfigUtils.populateOptionsFromList( geowaveStoreFactoryFamily.getDataStoreFactory().createOptionsInstance(), originalParams != null ? originalParams : filteredParams); geowaveStoreFactoryFamily.getIndexStoreFactory().createStore(options); } catch (final Exception e) { LOGGER.info("supplied map is not able to construct index store", e); return false; } return true; } public static synchronized Map getRegisteredStoreFactoryFamilies() { registeredStoreFactoryFamilies = getRegisteredFactories(StoreFactoryFamilySpi.class, registeredStoreFactoryFamilies); return registeredStoreFactoryFamilies; } public static synchronized ConfigOption[] getAllOptions( final StoreFactoryFamilySpi storeFactoryFamily, final boolean includeHidden) { final List allOptions = new ArrayList<>(); allOptions.addAll( Arrays.asList( ConfigUtils.createConfigOptionsFromJCommander( storeFactoryFamily.getDataStoreFactory().createOptionsInstance(), includeHidden))); // TODO our JCommanderPrefixTranslator's use of reflection does not // follow inheritance, these are commonly inherited classes and options // for all data stores provided as a stop gap until we can investigate // allOptions.addAll( // Arrays.asList( // ConfigUtils.createConfigOptionsFromJCommander( // new BaseDataStoreOptions()))); // allOptions.add( // new ConfigOption( // StoreFactoryOptions.GEOWAVE_NAMESPACE_OPTION, // StoreFactoryOptions.GEOWAVE_NAMESPACE_DESCRIPTION, // true, // String.class)); return allOptions.toArray(new ConfigOption[] {}); } public static synchronized ConfigOption[] getRequiredOptions( final StoreFactoryFamilySpi storeFactoryFamily) { final List requiredOptions = new ArrayList<>(); for (final ConfigOption option : getAllOptions(storeFactoryFamily, false)) { if (!option.isOptional()) { requiredOptions.add(option); } } return requiredOptions.toArray(new ConfigOption[] {}); } private static Map getRegisteredFactories( final Class cls, Map registeredFactories) { if (registeredFactories == null) { registeredFactories = new HashMap<>(); final Iterator storeFactories = new SPIServiceRegistry(GeoWaveStoreFinder.class).load(cls); while (storeFactories.hasNext()) { final T storeFactory = storeFactories.next(); if (storeFactory != null) { final String name = storeFactory.getType(); registeredFactories.put(ConfigUtils.cleanOptionName(name), storeFactory); } } } return registeredFactories; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/PropertyStore.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldWriter; /** * A basic property store for storing arbitrary information about a data store. The property value * can be any type that's supported by available {@link FieldReader} and {@link FieldWriter} * implementations. */ public interface PropertyStore { DataStoreProperty getProperty(String propertyKey); void setProperty(DataStoreProperty property); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/StoreFactoryFamilySpi.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.operations.DataStoreOperations; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; public interface StoreFactoryFamilySpi extends GenericFactory { GenericStoreFactory getDataStoreFactory(); GenericStoreFactory getDataStatisticsStoreFactory(); GenericStoreFactory getIndexStoreFactory(); GenericStoreFactory getAdapterStoreFactory(); GenericStoreFactory getAdapterIndexMappingStoreFactory(); GenericStoreFactory getInternalAdapterStoreFactory(); GenericStoreFactory getPropertyStoreFactory(); GenericStoreFactory getDataStoreOperationsFactory(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/StoreFactoryHelper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store; import org.locationtech.geowave.core.store.operations.DataStoreOperations; public interface StoreFactoryHelper { public StoreFactoryOptions createOptionsInstance(); public DataStoreOperations createOperations(StoreFactoryOptions options); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/StoreFactoryOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store; import java.lang.annotation.Annotation; import java.lang.reflect.Field; import java.util.Arrays; import java.util.Properties; import org.locationtech.geowave.core.cli.Constants; import org.locationtech.geowave.core.cli.utils.JCommanderParameterUtils; import org.locationtech.geowave.core.cli.utils.PropertiesUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.JCommander; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.internal.Console; /** This interface doesn't actually do anything, is just used for tracking during development. */ public abstract class StoreFactoryOptions { private static final Logger LOGGER = LoggerFactory.getLogger(StoreFactoryOptions.class); public static final String GEOWAVE_NAMESPACE_OPTION = "gwNamespace"; public static final String GEOWAVE_NAMESPACE_DESCRIPTION = "The geowave namespace (optional; default is no namespace)"; @Parameter(names = "--" + GEOWAVE_NAMESPACE_OPTION, description = GEOWAVE_NAMESPACE_DESCRIPTION) protected String geowaveNamespace; public StoreFactoryOptions() {} public StoreFactoryOptions(final String geowaveNamespace) { this.geowaveNamespace = geowaveNamespace; } public String getGeoWaveNamespace() { if ("null".equalsIgnoreCase(geowaveNamespace)) { return null; } return geowaveNamespace; } public void setGeoWaveNamespace(final String geowaveNamespace) { this.geowaveNamespace = geowaveNamespace; } public abstract StoreFactoryFamilySpi getStoreFactory(); public DataStorePluginOptions createPluginOptions() { return new DataStorePluginOptions(this); } public abstract DataStoreOptions getStoreOptions(); public void validatePluginOptions(final Console console) throws ParameterException { validatePluginOptions(new Properties(), console); } /** * Method to perform global validation for all plugin options * * @throws Exception */ public void validatePluginOptions(final Properties properties, final Console console) throws ParameterException { LOGGER.trace("ENTER :: validatePluginOptions()"); final PropertiesUtils propsUtils = new PropertiesUtils(properties); final boolean defaultEchoEnabled = propsUtils.getBoolean(Constants.CONSOLE_DEFAULT_ECHO_ENABLED_KEY, false); final boolean passwordEchoEnabled = propsUtils.getBoolean(Constants.CONSOLE_PASSWORD_ECHO_ENABLED_KEY, defaultEchoEnabled); LOGGER.debug( "Default console echo is {}, Password console echo is {}", new Object[] { defaultEchoEnabled ? "enabled" : "disabled", passwordEchoEnabled ? "enabled" : "disabled"}); for (final Field field : this.getClass().getDeclaredFields()) { for (final Annotation annotation : field.getAnnotations()) { if (annotation.annotationType() == Parameter.class) { final Parameter parameter = (Parameter) annotation; if (JCommanderParameterUtils.isRequired(parameter)) { field.setAccessible(true); // HPFortify // "Access Specifier Manipulation" // False Positive: These // fields are being modified // by trusted code, // in a way that is not // influenced by user input Object value = null; try { value = field.get(this); if (value == null) { console.println( "Field [" + field.getName() + "] is required: " + Arrays.toString(parameter.names()) + ": " + parameter.description()); console.print("Enter value for [" + field.getName() + "]: "); final boolean echoEnabled = JCommanderParameterUtils.isPassword(parameter) ? passwordEchoEnabled : defaultEchoEnabled; char[] password = console.readPassword(echoEnabled); final String strPassword = new String(password); password = null; if (!"".equals(strPassword.trim())) { value = ((strPassword != null) && !"".equals(strPassword.trim())) ? strPassword.trim() : null; } if (value == null) { throw new ParameterException( "Value for [" + field.getName() + "] cannot be null"); } else { field.set(this, value); } } } catch (final Exception ex) { LOGGER.error( "An error occurred validating plugin options for [" + this.getClass().getName() + "]: " + ex.getLocalizedMessage(), ex); } } } } } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/StorePersistableRegistry.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store; import org.locationtech.geowave.core.index.persist.InternalPersistableRegistry; import org.locationtech.geowave.core.index.persist.PersistableRegistrySpi; import org.locationtech.geowave.core.store.adapter.BaseFieldDescriptor; import org.locationtech.geowave.core.store.adapter.BasicDataTypeAdapter; import org.locationtech.geowave.core.store.adapter.BinaryDataAdapter; import org.locationtech.geowave.core.store.adapter.InternalDataAdapterImpl; import org.locationtech.geowave.core.store.adapter.SimpleRowTransform; import org.locationtech.geowave.core.store.api.AggregationQuery; import org.locationtech.geowave.core.store.api.Query; import org.locationtech.geowave.core.store.data.visibility.FallbackVisibilityHandler; import org.locationtech.geowave.core.store.data.visibility.FieldLevelVisibilityHandler; import org.locationtech.geowave.core.store.data.visibility.FieldMappedVisibilityHandler; import org.locationtech.geowave.core.store.data.visibility.GlobalVisibilityHandler; import org.locationtech.geowave.core.store.data.visibility.JsonFieldLevelVisibilityHandler; import org.locationtech.geowave.core.store.data.visibility.UnconstrainedVisibilityHandler; import org.locationtech.geowave.core.store.dimension.BasicNumericDimensionField; import org.locationtech.geowave.core.store.index.AttributeIndexImpl; import org.locationtech.geowave.core.store.index.BasicIndexModel; import org.locationtech.geowave.core.store.index.CustomAttributeIndex; import org.locationtech.geowave.core.store.index.CustomIndex; import org.locationtech.geowave.core.store.index.CustomNameIndex; import org.locationtech.geowave.core.store.index.IndexImpl; import org.locationtech.geowave.core.store.index.NullIndex; import org.locationtech.geowave.core.store.index.TextAttributeIndexProvider.AdapterFieldTextIndexEntryConverter; import org.locationtech.geowave.core.store.query.aggregate.BinningAggregation; import org.locationtech.geowave.core.store.query.aggregate.BinningAggregationOptions; import org.locationtech.geowave.core.store.query.aggregate.CountAggregation; import org.locationtech.geowave.core.store.query.aggregate.FieldMinAggregation; import org.locationtech.geowave.core.store.query.aggregate.FieldMaxAggregation; import org.locationtech.geowave.core.store.query.aggregate.FieldSumAggregation; import org.locationtech.geowave.core.store.query.aggregate.CompositeAggregation; import org.locationtech.geowave.core.store.query.aggregate.FieldNameParam; import org.locationtech.geowave.core.store.query.aggregate.MergingAggregation; import org.locationtech.geowave.core.store.query.aggregate.OptimalCountAggregation; import org.locationtech.geowave.core.store.query.aggregate.OptimalCountAggregation.CommonIndexCountAggregation; import org.locationtech.geowave.core.store.query.aggregate.OptimalCountAggregation.FieldCountAggregation; import org.locationtech.geowave.core.store.query.constraints.BasicOrderedConstraintQuery; import org.locationtech.geowave.core.store.query.constraints.BasicOrderedConstraintQuery.OrderedConstraints; import org.locationtech.geowave.core.store.query.constraints.BasicQuery; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintsByClass; import org.locationtech.geowave.core.store.query.constraints.CoordinateRangeQuery; import org.locationtech.geowave.core.store.query.constraints.CustomQueryConstraints; import org.locationtech.geowave.core.store.query.constraints.CustomQueryConstraints.InternalCustomConstraints; import org.locationtech.geowave.core.store.query.constraints.CustomQueryConstraintsWithFilter; import org.locationtech.geowave.core.store.query.constraints.CustomQueryConstraintsWithFilter.InternalCustomQueryFilter; import org.locationtech.geowave.core.store.query.constraints.DataIdQuery; import org.locationtech.geowave.core.store.query.constraints.DataIdRangeQuery; import org.locationtech.geowave.core.store.query.constraints.EverythingQuery; import org.locationtech.geowave.core.store.query.constraints.ExplicitFilteredQuery; import org.locationtech.geowave.core.store.query.constraints.FilteredEverythingQuery; import org.locationtech.geowave.core.store.query.constraints.InsertionIdQuery; import org.locationtech.geowave.core.store.query.constraints.OptimalExpressionQuery; import org.locationtech.geowave.core.store.query.constraints.PrefixIdQuery; import org.locationtech.geowave.core.store.query.constraints.SimpleNumericQuery; import org.locationtech.geowave.core.store.query.filter.AdapterIdQueryFilter; import org.locationtech.geowave.core.store.query.filter.BasicQueryFilter; import org.locationtech.geowave.core.store.query.filter.CoordinateRangeQueryFilter; import org.locationtech.geowave.core.store.query.filter.DataIdQueryFilter; import org.locationtech.geowave.core.store.query.filter.DataIdRangeQueryFilter; import org.locationtech.geowave.core.store.query.filter.DedupeFilter; import org.locationtech.geowave.core.store.query.filter.ExpressionQueryFilter; import org.locationtech.geowave.core.store.query.filter.FilterList; import org.locationtech.geowave.core.store.query.filter.InsertionIdQueryFilter; import org.locationtech.geowave.core.store.query.filter.PrefixIdQueryFilter; import org.locationtech.geowave.core.store.query.filter.expression.And; import org.locationtech.geowave.core.store.query.filter.expression.BooleanFieldValue; import org.locationtech.geowave.core.store.query.filter.expression.BooleanLiteral; import org.locationtech.geowave.core.store.query.filter.expression.Exclude; import org.locationtech.geowave.core.store.query.filter.expression.GenericEqualTo; import org.locationtech.geowave.core.store.query.filter.expression.GenericFieldValue; import org.locationtech.geowave.core.store.query.filter.expression.GenericLiteral; import org.locationtech.geowave.core.store.query.filter.expression.GenericNotEqualTo; import org.locationtech.geowave.core.store.query.filter.expression.Include; import org.locationtech.geowave.core.store.query.filter.expression.IsNotNull; import org.locationtech.geowave.core.store.query.filter.expression.IsNull; import org.locationtech.geowave.core.store.query.filter.expression.Not; import org.locationtech.geowave.core.store.query.filter.expression.Or; import org.locationtech.geowave.core.store.query.filter.expression.numeric.Abs; import org.locationtech.geowave.core.store.query.filter.expression.numeric.Add; import org.locationtech.geowave.core.store.query.filter.expression.numeric.Divide; import org.locationtech.geowave.core.store.query.filter.expression.numeric.Multiply; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericBetween; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericComparisonOperator; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldValue; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericLiteral; import org.locationtech.geowave.core.store.query.filter.expression.numeric.Subtract; import org.locationtech.geowave.core.store.query.filter.expression.text.Concat; import org.locationtech.geowave.core.store.query.filter.expression.text.Contains; import org.locationtech.geowave.core.store.query.filter.expression.text.EndsWith; import org.locationtech.geowave.core.store.query.filter.expression.text.StartsWith; import org.locationtech.geowave.core.store.query.filter.expression.text.TextBetween; import org.locationtech.geowave.core.store.query.filter.expression.text.TextComparisonOperator; import org.locationtech.geowave.core.store.query.filter.expression.text.TextFieldValue; import org.locationtech.geowave.core.store.query.filter.expression.text.TextLiteral; import org.locationtech.geowave.core.store.query.options.AggregateTypeQueryOptions; import org.locationtech.geowave.core.store.query.options.CommonQueryOptions; import org.locationtech.geowave.core.store.query.options.CommonQueryOptions.HintKey; import org.locationtech.geowave.core.store.query.options.FilterByTypeQueryOptions; import org.locationtech.geowave.core.store.query.options.QueryAllIndices; import org.locationtech.geowave.core.store.query.options.QueryAllTypes; import org.locationtech.geowave.core.store.query.options.QuerySingleIndex; public class StorePersistableRegistry implements PersistableRegistrySpi, InternalPersistableRegistry { @Override public PersistableIdAndConstructor[] getSupportedPersistables() { return new PersistableIdAndConstructor[] { // 200 is a legacy class (pre 2.0) new PersistableIdAndConstructor((short) 201, BaseFieldDescriptor::new), // 202 is used by CoreRegisteredIndexFieldMappers new PersistableIdAndConstructor((short) 203, GlobalVisibilityHandler::new), new PersistableIdAndConstructor((short) 204, UnconstrainedVisibilityHandler::new), new PersistableIdAndConstructor((short) 205, FallbackVisibilityHandler::new), new PersistableIdAndConstructor((short) 206, FieldMappedVisibilityHandler::new), new PersistableIdAndConstructor((short) 207, FieldLevelVisibilityHandler::new), new PersistableIdAndConstructor((short) 208, AdapterIdQueryFilter::new), new PersistableIdAndConstructor((short) 209, BasicQueryFilter::new), new PersistableIdAndConstructor((short) 210, DataIdQueryFilter::new), new PersistableIdAndConstructor((short) 211, DedupeFilter::new), new PersistableIdAndConstructor((short) 212, DataIdQuery::new), new PersistableIdAndConstructor((short) 213, PrefixIdQueryFilter::new), new PersistableIdAndConstructor((short) 215, BasicIndexModel::new), new PersistableIdAndConstructor((short) 216, JsonFieldLevelVisibilityHandler::new), new PersistableIdAndConstructor((short) 217, IndexImpl::new), new PersistableIdAndConstructor((short) 218, CustomNameIndex::new), new PersistableIdAndConstructor((short) 219, NullIndex::new), new PersistableIdAndConstructor((short) 220, DataIdRangeQuery::new), new PersistableIdAndConstructor((short) 221, AttributeIndexImpl::new), new PersistableIdAndConstructor((short) 222, CustomAttributeIndex::new), new PersistableIdAndConstructor((short) 223, AdapterFieldTextIndexEntryConverter::new), new PersistableIdAndConstructor((short) 224, BooleanFieldValue::new), new PersistableIdAndConstructor((short) 225, BooleanLiteral::new), new PersistableIdAndConstructor((short) 226, GenericFieldValue::new), new PersistableIdAndConstructor((short) 227, GenericLiteral::new), new PersistableIdAndConstructor((short) 228, BasicQueryByClass::new), new PersistableIdAndConstructor((short) 229, CoordinateRangeQuery::new), new PersistableIdAndConstructor((short) 230, CoordinateRangeQueryFilter::new), new PersistableIdAndConstructor((short) 231, CommonQueryOptions::new), new PersistableIdAndConstructor((short) 232, DataIdRangeQueryFilter::new), new PersistableIdAndConstructor((short) 233, CountAggregation::new), new PersistableIdAndConstructor((short) 234, Include::new), new PersistableIdAndConstructor((short) 235, InsertionIdQueryFilter::new), new PersistableIdAndConstructor((short) 236, Exclude::new), new PersistableIdAndConstructor((short) 237, FilterByTypeQueryOptions::new), new PersistableIdAndConstructor((short) 238, QueryAllIndices::new), new PersistableIdAndConstructor((short) 239, And::new), new PersistableIdAndConstructor((short) 240, Or::new), new PersistableIdAndConstructor((short) 241, AggregateTypeQueryOptions::new), new PersistableIdAndConstructor((short) 242, AdapterMapping::new), new PersistableIdAndConstructor((short) 243, Not::new), new PersistableIdAndConstructor((short) 244, Query::new), new PersistableIdAndConstructor((short) 245, AggregationQuery::new), new PersistableIdAndConstructor((short) 246, NumericComparisonOperator::new), new PersistableIdAndConstructor((short) 247, TextComparisonOperator::new), new PersistableIdAndConstructor((short) 248, QuerySingleIndex::new), new PersistableIdAndConstructor((short) 249, QueryAllTypes::new), new PersistableIdAndConstructor((short) 250, FilterList::new), new PersistableIdAndConstructor((short) 251, PrefixIdQuery::new), new PersistableIdAndConstructor((short) 252, InsertionIdQuery::new), new PersistableIdAndConstructor((short) 253, EverythingQuery::new), new PersistableIdAndConstructor((short) 254, SimpleRowTransform::new), new PersistableIdAndConstructor((short) 255, MergingAggregation::new), new PersistableIdAndConstructor((short) 256, SimpleNumericQuery::new), new PersistableIdAndConstructor((short) 257, ConstraintsByClass::new), new PersistableIdAndConstructor((short) 258, OrderedConstraints::new), new PersistableIdAndConstructor((short) 259, BasicOrderedConstraintQuery::new), new PersistableIdAndConstructor((short) 260, BasicQuery::new), new PersistableIdAndConstructor((short) 261, BinaryDataAdapter::new), // 262 is a legacy class (pre 2.0) new PersistableIdAndConstructor((short) 263, CustomIndex::new), new PersistableIdAndConstructor((short) 264, CustomQueryConstraints::new), new PersistableIdAndConstructor((short) 265, InternalCustomConstraints::new), new PersistableIdAndConstructor((short) 266, BinningAggregationOptions::new), new PersistableIdAndConstructor((short) 267, BinningAggregation::new), new PersistableIdAndConstructor((short) 268, CustomQueryConstraintsWithFilter::new), new PersistableIdAndConstructor((short) 269, InternalCustomQueryFilter::new), new PersistableIdAndConstructor((short) 270, InternalDataAdapterImpl::new), new PersistableIdAndConstructor((short) 271, BasicNumericDimensionField::new), new PersistableIdAndConstructor((short) 272, DataStoreProperty::new), new PersistableIdAndConstructor((short) 273, AdapterToIndexMapping::new), new PersistableIdAndConstructor((short) 274, HintKey::new), new PersistableIdAndConstructor((short) 275, NumericBetween::new), new PersistableIdAndConstructor((short) 276, Abs::new), new PersistableIdAndConstructor((short) 277, Add::new), new PersistableIdAndConstructor((short) 278, Subtract::new), new PersistableIdAndConstructor((short) 279, Multiply::new), new PersistableIdAndConstructor((short) 280, Divide::new), new PersistableIdAndConstructor((short) 281, NumericFieldValue::new), new PersistableIdAndConstructor((short) 282, NumericLiteral::new), new PersistableIdAndConstructor((short) 283, Concat::new), new PersistableIdAndConstructor((short) 284, Contains::new), new PersistableIdAndConstructor((short) 285, EndsWith::new), new PersistableIdAndConstructor((short) 286, StartsWith::new), new PersistableIdAndConstructor((short) 287, TextFieldValue::new), new PersistableIdAndConstructor((short) 288, TextLiteral::new), new PersistableIdAndConstructor((short) 289, TextBetween::new), new PersistableIdAndConstructor((short) 290, IsNotNull::new), new PersistableIdAndConstructor((short) 291, OptimalExpressionQuery::new), new PersistableIdAndConstructor((short) 292, GenericNotEqualTo::new), new PersistableIdAndConstructor((short) 293, GenericEqualTo::new), new PersistableIdAndConstructor((short) 294, ExplicitFilteredQuery::new), new PersistableIdAndConstructor((short) 295, ExpressionQueryFilter::new), new PersistableIdAndConstructor((short) 296, FilteredEverythingQuery::new), new PersistableIdAndConstructor((short) 297, BasicDataTypeAdapter::new), new PersistableIdAndConstructor((short) 298, IsNull::new), new PersistableIdAndConstructor((short) 299, FieldNameParam::new), // use 3000+ range new PersistableIdAndConstructor((short) 3000, OptimalCountAggregation::new), new PersistableIdAndConstructor((short) 3001, CommonIndexCountAggregation::new), new PersistableIdAndConstructor((short) 3002, FieldCountAggregation::new), new PersistableIdAndConstructor((short) 3003, FieldMaxAggregation::new), new PersistableIdAndConstructor((short) 3004, FieldMinAggregation::new), new PersistableIdAndConstructor((short) 3005, FieldSumAggregation::new), new PersistableIdAndConstructor((short) 3006, CompositeAggregation::new)}; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/AbstractAdapterPersistenceEncoding.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; import org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding; import org.locationtech.geowave.core.store.data.PersistentDataset; import org.locationtech.geowave.core.store.index.CommonIndexModel; /** @since 0.9.1 */ public abstract class AbstractAdapterPersistenceEncoding extends CommonIndexedPersistenceEncoding { protected final PersistentDataset adapterExtendedData; public AbstractAdapterPersistenceEncoding( final short internalAdapterId, final byte[] dataId, final byte[] partitionKey, final byte[] sortKey, final int duplicateCount, final PersistentDataset commonData, final PersistentDataset unknownData, final PersistentDataset adapterExtendedData) { super( internalAdapterId, dataId, partitionKey, sortKey, duplicateCount, commonData, unknownData); this.adapterExtendedData = adapterExtendedData; } /** * This returns a representation of the custom fields for the data adapter * * @return the extended data beyond the common index fields that are provided by the adapter */ public PersistentDataset getAdapterExtendedData() { return adapterExtendedData; } /** * Process unknownData given adapter and model to convert to adapter extended values * * @param adapter * @param model */ public abstract void convertUnknownValues( final InternalDataAdapter adapter, final CommonIndexModel model); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/AbstractDataTypeAdapter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.RowBuilder; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldUtils; import org.locationtech.geowave.core.store.data.field.FieldWriter; import org.locationtech.geowave.core.store.util.GenericTypeResolver; /** * Provides an abstract implementation of the {@link DataTypeAdapter} interface that handles field * descriptors, data ID, and type name. * * @param the entry data type */ public abstract class AbstractDataTypeAdapter implements DataTypeAdapter { private String typeName = null; private FieldDescriptor[] fieldDescriptors = null; private FieldDescriptor dataIDFieldDescriptor = null; private Map fieldDescriptorIndices = new HashMap<>(); private FieldWriter dataIDWriter = null; private FieldReader dataIDReader = null; // Maintain backwards compatibility with 2.0.x protected boolean serializeDataIDAsString = false; public AbstractDataTypeAdapter() {} public AbstractDataTypeAdapter( final String typeName, final FieldDescriptor[] fieldDescriptors, final FieldDescriptor dataIDFieldDescriptor) { this.typeName = typeName; if (fieldDescriptors == null) { throw new IllegalArgumentException("An array of field descriptors must be provided."); } if (dataIDFieldDescriptor == null) { throw new IllegalArgumentException("A data ID field descriptor must be provided."); } this.fieldDescriptors = fieldDescriptors; this.dataIDFieldDescriptor = dataIDFieldDescriptor; populateFieldDescriptorIndices(); } private void populateFieldDescriptorIndices() { for (int i = 0; i < fieldDescriptors.length; i++) { fieldDescriptorIndices.put(fieldDescriptors[i].fieldName(), i); } } @Override public String getTypeName() { return typeName; } /** * Returns the value of the field with the given name from the entry. If the data ID field name is * passed, it is expected that this method will return the value of that field even if the data ID * field is not included in the set of field descriptors. * * @param entry the entry * @param fieldName the field name or data ID field name * @return the value of the field on the entry */ @Override public abstract Object getFieldValue(T entry, String fieldName); @SuppressWarnings("unchecked") @Override public byte[] getDataId(T entry) { if (serializeDataIDAsString) { return StringUtils.stringToBinary( getFieldValue(entry, dataIDFieldDescriptor.fieldName()).toString()); } if (dataIDWriter == null) { dataIDWriter = (FieldWriter) FieldUtils.getDefaultWriterForClass( dataIDFieldDescriptor.bindingClass()); } return dataIDWriter.writeField(getFieldValue(entry, dataIDFieldDescriptor.fieldName())); } @SuppressWarnings({"unchecked", "rawtypes"}) @Override public Class getDataClass() { return (Class) GenericTypeResolver.resolveTypeArgument( this.getClass(), AbstractDataTypeAdapter.class); } @SuppressWarnings("unchecked") @Override public RowBuilder newRowBuilder(FieldDescriptor[] outputFieldDescriptors) { if (!serializeDataIDAsString && dataIDReader == null) { dataIDReader = (FieldReader) FieldUtils.getDefaultReaderForClass( dataIDFieldDescriptor.bindingClass()); } return new RowBuilder() { private Object[] values = new Object[outputFieldDescriptors.length]; @Override public void setField(String fieldName, Object fieldValue) { values[fieldDescriptorIndices.get(fieldName)] = fieldValue; } @Override public void setFields(Map valueMap) { valueMap.entrySet().forEach( entry -> values[fieldDescriptorIndices.get(entry.getKey())] = entry.getValue()); } @Override public T buildRow(byte[] dataId) { final Object dataIDObject = serializeDataIDAsString ? dataId : dataIDReader.readField(dataId); T obj = buildObject(dataIDObject, values); Arrays.fill(values, null); return obj; } }; } public abstract T buildObject(final Object dataId, final Object[] fieldValues); @Override public FieldDescriptor[] getFieldDescriptors() { return fieldDescriptors; } @Override public FieldDescriptor getFieldDescriptor(String fieldName) { final Integer index = fieldDescriptorIndices.get(fieldName); if (index == null) { return null; } return fieldDescriptors[index]; } protected FieldDescriptor getDataIDFieldDescriptor() { return dataIDFieldDescriptor; } @Override public byte[] toBinary() { final byte[] typeNameBytes = StringUtils.stringToBinary(typeName); final byte[] fieldDescriptorBytes = PersistenceUtils.toBinary(fieldDescriptors); // Maintain backwards compatibility for 2.0.x final byte[] dataIDFieldBytes = serializeDataIDAsString ? StringUtils.stringToBinary(dataIDFieldDescriptor.fieldName()) : new byte[0]; final byte[] dataIDFieldDescriptorBytes = PersistenceUtils.toBinary(dataIDFieldDescriptor); int bufferSize = VarintUtils.unsignedIntByteLength(typeNameBytes.length) + VarintUtils.unsignedIntByteLength(fieldDescriptorBytes.length) + VarintUtils.unsignedIntByteLength(dataIDFieldBytes.length) + typeNameBytes.length + fieldDescriptorBytes.length + dataIDFieldBytes.length; if (!serializeDataIDAsString) { bufferSize += VarintUtils.unsignedIntByteLength(dataIDFieldDescriptorBytes.length) + dataIDFieldDescriptorBytes.length; } final ByteBuffer buffer = ByteBuffer.allocate(bufferSize); VarintUtils.writeUnsignedInt(typeNameBytes.length, buffer); buffer.put(typeNameBytes); VarintUtils.writeUnsignedInt(fieldDescriptorBytes.length, buffer); buffer.put(fieldDescriptorBytes); VarintUtils.writeUnsignedInt(dataIDFieldBytes.length, buffer); buffer.put(dataIDFieldBytes); if (!serializeDataIDAsString) { VarintUtils.writeUnsignedInt(dataIDFieldDescriptorBytes.length, buffer); buffer.put(dataIDFieldDescriptorBytes); } return buffer.array(); } @Override public void fromBinary(byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); final byte[] typeNameBytes = new byte[VarintUtils.readUnsignedInt(buffer)]; buffer.get(typeNameBytes); this.typeName = StringUtils.stringFromBinary(typeNameBytes); final byte[] fieldDescriptorBytes = new byte[VarintUtils.readUnsignedInt(buffer)]; buffer.get(fieldDescriptorBytes); final List fieldDescriptorList = PersistenceUtils.fromBinaryAsList(fieldDescriptorBytes); this.fieldDescriptors = fieldDescriptorList.toArray(new FieldDescriptor[fieldDescriptorList.size()]); final byte[] dataIDFieldBytes = new byte[VarintUtils.readUnsignedInt(buffer)]; buffer.get(dataIDFieldBytes); final String dataIDField = StringUtils.stringFromBinary(dataIDFieldBytes); if (buffer.hasRemaining()) { final byte[] dataIDFieldDescriptorBytes = new byte[VarintUtils.readUnsignedInt(buffer)]; buffer.get(dataIDFieldDescriptorBytes); this.dataIDFieldDescriptor = (FieldDescriptor) PersistenceUtils.fromBinary(dataIDFieldDescriptorBytes); } else { for (int i = 0; i < fieldDescriptors.length; i++) { if (fieldDescriptors[i].fieldName().equals(dataIDField)) { this.dataIDFieldDescriptor = fieldDescriptors[i]; } } this.serializeDataIDAsString = true; } populateFieldDescriptorIndices(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/AdapterIndexMappingStore.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; import org.locationtech.geowave.core.store.AdapterToIndexMapping; /** * This is responsible for persisting adapter/index mappings (either in memory or to disk depending * on the implementation). */ public interface AdapterIndexMappingStore { /** * Returns the indices associated with the given adapter. * * @param internalAdapterId the internal adapter ID of the adapter * @return the adapter to index mapping */ public AdapterToIndexMapping[] getIndicesForAdapter(short internalAdapterId); public AdapterToIndexMapping getMapping(short adapterId, String indexName); /** * If an adapter is already associated with indices and the provided indices do not match, update * the mapping to include the combined set of indices. * * @param mapping the mapping to add */ public void addAdapterIndexMapping(AdapterToIndexMapping mapping); /** * Remove the given adapter from the mapping store. * * @param adapterId the internal adapter ID of the adapter */ public void remove(short adapterId); /** * Remove an index for the specified adapter mapping. The method should return false if the * adapter, or index for the adapter does not exist. * * @param adapterId the internal adapter ID of the adapter * @param indexName the name of the index */ public boolean remove(short adapterId, String indexName); /** * Remove all mappings from the store. */ public void removeAll(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/AdapterPersistenceEncoding.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; import org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset; import org.locationtech.geowave.core.store.data.PersistentDataset; import org.locationtech.geowave.core.store.index.CommonIndexModel; /** * This is an implementation of persistence encoding that also contains all of the extended data * values used to form the native type supported by this adapter. It does not contain any * information about the entry in a particular index and is used when writing an entry, prior to its * existence in an index. */ public class AdapterPersistenceEncoding extends AbstractAdapterPersistenceEncoding { public AdapterPersistenceEncoding( final short internalAdapterId, final byte[] dataId, final PersistentDataset commonData, final PersistentDataset adapterExtendedData) { super( internalAdapterId, dataId, null, null, 0, commonData, new MultiFieldPersistentDataset(), adapterExtendedData); // all data is identified by // the adapter, there is // inherently no unknown // data elements } @Override public void convertUnknownValues( final InternalDataAdapter adapter, final CommonIndexModel model) { // inherently no unknown data, nothing to do } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/AdapterStore.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.api.DataTypeAdapter; /** * This is responsible for persisting data adapters (either in memory or to disk depending on the * implementation). */ public interface AdapterStore> { /** * Add the adapter to the store * * @param adapter the adapter */ public void addAdapter(V adapter); /** * Get an adapter from the store by its unique ID * * @param adapterId the unique adapter ID * @return the adapter, null if it doesn't exist */ public V getAdapter(K adapterId); /** * Check for the existence of the adapter with the given unique ID * * @param adapterId the unique ID to look up * @return a boolean flag indicating whether the adapter exists */ public boolean adapterExists(K adapterId); /** * Get the full set of adapters within this store * * @return an iterator over all of the adapters in this store */ public V[] getAdapters(); public void removeAll(); /** @param adapterId the adapter ID to remove */ public void removeAdapter(K adapterId); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/AdapterStoreWrapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; import java.util.Arrays; import org.locationtech.geowave.core.store.api.DataTypeAdapter; /** * Given a transient store and a internal adapter store to use to map between internal IDs and * external IDs, we can wrap an implementation as a persistent adapter store */ public class AdapterStoreWrapper implements PersistentAdapterStore { private final TransientAdapterStore adapterStore; private final InternalAdapterStore internalAdapterStore; public AdapterStoreWrapper( final TransientAdapterStore adapterStore, final InternalAdapterStore internalAdapterStore) { this.adapterStore = adapterStore; this.internalAdapterStore = internalAdapterStore; } @Override public void addAdapter(final InternalDataAdapter adapter) { adapterStore.addAdapter(adapter.getAdapter()); } @Override public InternalDataAdapter getAdapter(final Short adapterId) { if (adapterId == null) { return null; } final DataTypeAdapter adapter = adapterStore.getAdapter(internalAdapterStore.getTypeName(adapterId)); if (adapter instanceof InternalDataAdapter) { return (InternalDataAdapter) adapter; } return adapter.asInternalAdapter(adapterId); } @Override public boolean adapterExists(final Short adapterId) { if (adapterId != null) { return internalAdapterStore.getTypeName(adapterId) != null; } return false; } @Override public InternalDataAdapter[] getAdapters() { final DataTypeAdapter[] adapters = adapterStore.getAdapters(); return Arrays.stream(adapters).map(adapter -> { if (adapter instanceof InternalDataAdapter) { return (InternalDataAdapter) adapter; } final Short adapterId = internalAdapterStore.getAdapterId(adapter.getTypeName()); if (adapterId == null) { return null; } return adapter.asInternalAdapter(adapterId); }).toArray(InternalDataAdapter[]::new); } @Override public void removeAll() { adapterStore.removeAll(); } @Override public void removeAdapter(final Short adapterId) { final String typeName = internalAdapterStore.getTypeName(adapterId); if (typeName != null) { adapterStore.removeAdapter(typeName); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/AsyncPersistenceEncoding.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; import java.util.concurrent.CompletableFuture; import org.locationtech.geowave.core.store.entities.GeoWaveValue; public interface AsyncPersistenceEncoding { CompletableFuture getFieldValuesFuture(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/BaseFieldDescriptor.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Set; import org.locationtech.geowave.core.index.IndexDimensionHint; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import com.beust.jcommander.internal.Sets; /** * Provides a base implementation for adapter field descriptors. * * @param the adapter field type */ public class BaseFieldDescriptor implements FieldDescriptor { private Class bindingClass; private String fieldName; private Set indexHints; public BaseFieldDescriptor() {} public BaseFieldDescriptor( final Class bindingClass, final String fieldName, final Set indexHints) { this.bindingClass = bindingClass; this.fieldName = fieldName; this.indexHints = indexHints; } @Override public Class bindingClass() { return bindingClass; } @Override public String fieldName() { return fieldName; } @Override public Set indexHints() { return indexHints; } @Override public byte[] toBinary() { final byte[] classBytes = StringUtils.stringToBinary(bindingClass.getName()); final byte[] fieldNameBytes = StringUtils.stringToBinary(fieldName); final String[] hintStrings = indexHints.stream().map(hint -> hint.getHintString()).toArray(String[]::new); final byte[] hintBytes = StringUtils.stringsToBinary(hintStrings); final ByteBuffer buffer = ByteBuffer.allocate( VarintUtils.unsignedShortByteLength((short) classBytes.length) + VarintUtils.unsignedShortByteLength((short) fieldNameBytes.length) + VarintUtils.unsignedShortByteLength((short) hintBytes.length) + classBytes.length + fieldNameBytes.length + hintBytes.length); VarintUtils.writeUnsignedShort((short) classBytes.length, buffer); buffer.put(classBytes); VarintUtils.writeUnsignedShort((short) fieldNameBytes.length, buffer); buffer.put(fieldNameBytes); VarintUtils.writeUnsignedShort((short) hintBytes.length, buffer); buffer.put(hintBytes); return buffer.array(); } @SuppressWarnings({"unchecked", "rawtypes"}) @Override public void fromBinary(byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); final byte[] classBytes = new byte[VarintUtils.readUnsignedShort(buffer)]; buffer.get(classBytes); final String className = StringUtils.stringFromBinary(classBytes); try { bindingClass = (Class) Class.forName(className); } catch (ClassNotFoundException e) { throw new RuntimeException("Unable to deserialize class for field descriptor: " + className); } final byte[] fieldNameBytes = new byte[VarintUtils.readUnsignedShort(buffer)]; buffer.get(fieldNameBytes); fieldName = StringUtils.stringFromBinary(fieldNameBytes); final byte[] hintBytes = new byte[VarintUtils.readUnsignedShort(buffer)]; buffer.get(hintBytes); final String[] hintStrings = StringUtils.stringsFromBinary(hintBytes); indexHints = Sets.newHashSet(); Arrays.stream(hintStrings).forEach(hint -> indexHints.add(new IndexDimensionHint(hint))); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/BasicDataTypeAdapter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; import java.beans.BeanInfo; import java.beans.IntrospectionException; import java.beans.Introspector; import java.beans.PropertyDescriptor; import java.lang.annotation.Annotation; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.nio.ByteBuffer; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.adapter.annotation.GeoWaveDataType; import org.locationtech.geowave.core.store.adapter.annotation.GeoWaveFieldAnnotation; import org.locationtech.geowave.core.store.data.field.FieldUtils; import org.locationtech.geowave.core.store.data.field.FieldWriter; /** * A data type adapter implementation with explicit mappings for accessors and mutators. These * mappings can be automatically inferred from annotations or public properties via the static * `newAdapter` method. * * @param the data type */ public class BasicDataTypeAdapter extends AbstractDataTypeAdapter { private Class dataClass; private Constructor objectConstructor; private Map> accessors; private Map> mutators; public BasicDataTypeAdapter() {} public BasicDataTypeAdapter( final String typeName, final Class dataClass, final FieldDescriptor[] fieldDescriptors, final FieldDescriptor dataIDFieldDescriptor, final Map> accessors, final Map> mutators) { super(typeName, fieldDescriptors, dataIDFieldDescriptor); this.dataClass = dataClass; try { objectConstructor = dataClass.getDeclaredConstructor(); objectConstructor.setAccessible(true); } catch (NoSuchMethodException | SecurityException e) { throw new RuntimeException( "A no-args constructor is required for object based data adapter classes."); } this.accessors = accessors; this.mutators = mutators; } @Override public Object getFieldValue(T entry, String fieldName) { if (accessors.containsKey(fieldName)) { return accessors.get(fieldName).get(entry); } return null; } @Override public T buildObject(final Object dataId, final Object[] fieldValues) { try { final T object = objectConstructor.newInstance(); final FieldDescriptor[] fields = getFieldDescriptors(); for (int i = 0; i < fields.length; i++) { mutators.get(fields[i].fieldName()).set(object, fieldValues[i]); } if (!serializeDataIDAsString) { mutators.get(getDataIDFieldDescriptor().fieldName()).set(object, dataId); } return object; } catch (InstantiationException | IllegalAccessException | SecurityException | IllegalArgumentException | InvocationTargetException e) { throw new RuntimeException("Unable to build object."); } } @Override public byte[] toBinary() { final byte[] superBinary = super.toBinary(); final byte[] classBytes = StringUtils.stringToBinary(dataClass.getName()); int totalBytes = VarintUtils.unsignedIntByteLength(superBinary.length) + VarintUtils.unsignedIntByteLength(classBytes.length) + superBinary.length + classBytes.length; final FieldDescriptor[] descriptors = getFieldDescriptors(); for (final FieldDescriptor descriptor : descriptors) { totalBytes += 1 + accessors.get(descriptor.fieldName()).byteCount(); totalBytes += 1 + mutators.get(descriptor.fieldName()).byteCount(); } totalBytes += 1 + accessors.get(getDataIDFieldDescriptor().fieldName()).byteCount(); totalBytes += 1 + mutators.get(getDataIDFieldDescriptor().fieldName()).byteCount(); final ByteBuffer buffer = ByteBuffer.allocate(totalBytes); VarintUtils.writeUnsignedInt(superBinary.length, buffer); buffer.put(superBinary); VarintUtils.writeUnsignedInt(classBytes.length, buffer); buffer.put(classBytes); for (final FieldDescriptor descriptor : descriptors) { final Accessor accessor = accessors.get(descriptor.fieldName()); final Mutator mutator = mutators.get(descriptor.fieldName()); buffer.put(accessor instanceof FieldAccessor ? (byte) 1 : (byte) 0); accessor.toBinary(buffer); buffer.put(mutator instanceof FieldMutator ? (byte) 1 : (byte) 0); mutator.toBinary(buffer); } final Accessor accessor = accessors.get(getDataIDFieldDescriptor().fieldName()); final Mutator mutator = mutators.get(getDataIDFieldDescriptor().fieldName()); buffer.put(accessor instanceof FieldAccessor ? (byte) 1 : (byte) 0); accessor.toBinary(buffer); buffer.put(mutator instanceof FieldMutator ? (byte) 1 : (byte) 0); mutator.toBinary(buffer); return buffer.array(); } @SuppressWarnings({"unchecked", "rawtypes"}) @Override public void fromBinary(byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); final byte[] superBinary = new byte[VarintUtils.readUnsignedInt(buffer)]; buffer.get(superBinary); super.fromBinary(superBinary); final byte[] classBytes = new byte[VarintUtils.readUnsignedInt(buffer)]; buffer.get(classBytes); final String className = StringUtils.stringFromBinary(classBytes); try { dataClass = (Class) Class.forName(className); } catch (ClassNotFoundException e) { throw new RuntimeException("Unable to find data class for adapter: " + className); } try { objectConstructor = dataClass.getDeclaredConstructor(); objectConstructor.setAccessible(true); } catch (NoSuchMethodException | SecurityException e) { throw new RuntimeException("Unable to find no-args constructor for class: " + className); } final FieldDescriptor[] descriptors = getFieldDescriptors(); accessors = new HashMap<>(descriptors.length); mutators = new HashMap<>(descriptors.length);; for (final FieldDescriptor descriptor : descriptors) { final Accessor accessor; if (buffer.get() > 0) { accessor = new FieldAccessor<>(); } else { accessor = new MethodAccessor<>(); } accessor.fromBinary(dataClass, buffer); accessors.put(descriptor.fieldName(), accessor); final Mutator mutator; if (buffer.get() > 0) { mutator = new FieldMutator<>(); } else { mutator = new MethodMutator<>(); } mutator.fromBinary(dataClass, buffer); mutators.put(descriptor.fieldName(), mutator); } if (buffer.hasRemaining()) { final Accessor accessor; if (buffer.get() > 0) { accessor = new FieldAccessor<>(); } else { accessor = new MethodAccessor<>(); } accessor.fromBinary(dataClass, buffer); accessors.put(getDataIDFieldDescriptor().fieldName(), accessor); final Mutator mutator; if (buffer.get() > 0) { mutator = new FieldMutator<>(); } else { mutator = new MethodMutator<>(); } mutator.fromBinary(dataClass, buffer); mutators.put(getDataIDFieldDescriptor().fieldName(), mutator); } } @Override public Class getDataClass() { return dataClass; } /** * Create a new data type adapter from the specified class. If the class is annotated with * `@GeoWaveDataType`, all fields will be inferred from GeoWave field annotations. Otherwise * public fields and properties will be used. The data type field will also be encoded as a * regular field. * * @param the data type * @param typeName the type name for this adapter * @param dataClass the data type class * @param dataIdField the field to use for unique data IDs * @return the data adapter */ public static BasicDataTypeAdapter newAdapter( final String typeName, final Class dataClass, final String dataIdField) { return newAdapter(typeName, dataClass, dataIdField, false); } /** * Create a new data type adapter from the specified class. If the class is annotated with * `@GeoWaveDataType`, all fields will be inferred from GeoWave field annotations. Otherwise * public fields and properties will be used. * * @param the data type * @param typeName the type name for this adapter * @param dataClass the data type class * @param dataIdField the field to use for unique data IDs * @param removeDataIDFromFieldList if {@code true} the data ID field will not be included in the * full list of fields, useful to prevent the data from being written twice at the cost of * some querying simplicity * @return the data adapter */ public static BasicDataTypeAdapter newAdapter( final String typeName, final Class dataClass, final String dataIdField, final boolean removeDataIDFromFieldList) { final List> fieldDescriptors = new LinkedList<>(); FieldDescriptor dataIdFieldDescriptor = null; final Set addedFields = new HashSet<>(); final Map> accessors = new HashMap<>(); final Map> mutators = new HashMap<>(); if (dataClass.isAnnotationPresent(GeoWaveDataType.class)) { // infer fields from annotations Class current = dataClass; while (!current.equals(Object.class)) { for (final Field f : current.getDeclaredFields()) { for (final Annotation a : f.getDeclaredAnnotations()) { if (a.annotationType().isAnnotationPresent(GeoWaveFieldAnnotation.class)) { try { final FieldDescriptor descriptor = a.annotationType().getAnnotation( GeoWaveFieldAnnotation.class).fieldDescriptorBuilder().newInstance().buildFieldDescriptor( f); checkWriterForClass(normalizeClass(f.getType())); if (addedFields.contains(descriptor.fieldName())) { throw new RuntimeException("Duplicate field name: " + descriptor.fieldName()); } f.setAccessible(true); accessors.put(descriptor.fieldName(), new FieldAccessor<>(f)); mutators.put(descriptor.fieldName(), new FieldMutator<>(f)); addedFields.add(descriptor.fieldName()); if (descriptor.fieldName().equals(dataIdField)) { dataIdFieldDescriptor = descriptor; if (removeDataIDFromFieldList) { continue; } } fieldDescriptors.add(descriptor); } catch (InstantiationException | IllegalAccessException e) { throw new RuntimeException( "Unable to build field descriptor for field " + f.getName()); } } } } current = current.getSuperclass(); } } else { // Infer fields from properties and public fields try { final BeanInfo info = Introspector.getBeanInfo(dataClass); final PropertyDescriptor[] properties = info.getPropertyDescriptors(); for (final PropertyDescriptor descriptor : properties) { if (descriptor.getName().equals("class")) { continue; } if (descriptor.getWriteMethod() == null) { continue; } if (descriptor.getReadMethod() == null) { continue; } final Class type = normalizeClass(descriptor.getPropertyType()); checkWriterForClass(type); accessors.put(descriptor.getName(), new MethodAccessor<>(descriptor.getReadMethod())); mutators.put(descriptor.getName(), new MethodMutator<>(descriptor.getWriteMethod())); addedFields.add(descriptor.getName()); final FieldDescriptor fieldDescriptor = new FieldDescriptorBuilder<>(type).fieldName(descriptor.getName()).build(); if (fieldDescriptor.fieldName().equals(dataIdField)) { dataIdFieldDescriptor = fieldDescriptor; if (removeDataIDFromFieldList) { continue; } } fieldDescriptors.add(fieldDescriptor); } } catch (IntrospectionException e) { // Ignore } // Get public fields final Field[] fields = dataClass.getFields(); for (final Field field : fields) { if (addedFields.contains(field.getName())) { continue; } final Class type = normalizeClass(field.getType()); checkWriterForClass(type); accessors.put(field.getName(), new FieldAccessor<>(field)); mutators.put(field.getName(), new FieldMutator<>(field)); final FieldDescriptor fieldDescriptor = new FieldDescriptorBuilder<>(type).fieldName(field.getName()).build(); if (fieldDescriptor.fieldName().equals(dataIdField)) { dataIdFieldDescriptor = fieldDescriptor; if (removeDataIDFromFieldList) { continue; } } fieldDescriptors.add(fieldDescriptor); } } return new BasicDataTypeAdapter<>( typeName, dataClass, fieldDescriptors.toArray(new FieldDescriptor[fieldDescriptors.size()]), dataIdFieldDescriptor, accessors, mutators); } private static void checkWriterForClass(final Class type) { final FieldWriter writer = FieldUtils.getDefaultWriterForClass(type); if (writer == null) { throw new RuntimeException("No field reader/writer available for type: " + type.getName()); } } private static interface Accessor { Object get(T entry); int byteCount(); void toBinary(ByteBuffer buffer); void fromBinary(final Class dataClass, ByteBuffer buffer); } private static class MethodAccessor implements Accessor { private Method accessor; public MethodAccessor() {} public MethodAccessor(final Method accessorMethod) { this.accessor = accessorMethod; } @Override public Object get(final T entry) { try { return accessor.invoke(entry); } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) { throw new RuntimeException("Unable to get value from entry", e); } } private byte[] nameBytes; @Override public int byteCount() { nameBytes = StringUtils.stringToBinary(accessor.getName()); return nameBytes.length + VarintUtils.unsignedIntByteLength(nameBytes.length); } @Override public void toBinary(final ByteBuffer buffer) { VarintUtils.writeUnsignedInt(nameBytes.length, buffer); buffer.put(nameBytes); } @Override public void fromBinary(final Class dataClass, final ByteBuffer buffer) { nameBytes = new byte[VarintUtils.readUnsignedInt(buffer)]; buffer.get(nameBytes); final String name = StringUtils.stringFromBinary(nameBytes); try { accessor = dataClass.getMethod(name); } catch (NoSuchMethodException | SecurityException e) { throw new RuntimeException("Unable to find accessor method: " + name); } } } private static class FieldAccessor implements Accessor { private Field field; public FieldAccessor() {} public FieldAccessor(final Field field) { this.field = field; } @Override public Object get(final T entry) { try { return field.get(entry); } catch (IllegalArgumentException | IllegalAccessException e) { throw new RuntimeException("Unable to get value from entry", e); } } private byte[] nameBytes; @Override public int byteCount() { nameBytes = StringUtils.stringToBinary(field.getName()); return nameBytes.length + VarintUtils.unsignedIntByteLength(nameBytes.length); } @Override public void toBinary(final ByteBuffer buffer) { VarintUtils.writeUnsignedInt(nameBytes.length, buffer); buffer.put(nameBytes); } @Override public void fromBinary(final Class dataClass, final ByteBuffer buffer) { nameBytes = new byte[VarintUtils.readUnsignedInt(buffer)]; buffer.get(nameBytes); final String name = StringUtils.stringFromBinary(nameBytes); field = findField(dataClass, name); if (field == null) { throw new RuntimeException("Unable to find field: " + name); } field.setAccessible(true); } } private static interface Mutator { void set(T entry, Object value); int byteCount(); void toBinary(ByteBuffer buffer); void fromBinary(final Class dataClass, final ByteBuffer buffer); } private static class MethodMutator implements Mutator { private Method mutator; public MethodMutator() {} public MethodMutator(final Method mutator) { this.mutator = mutator; } @Override public void set(final T entry, final Object object) { try { mutator.invoke(entry, object); } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) { throw new RuntimeException("Unable to set value on entry", e); } } private byte[] nameBytes; private byte[] parameterClassBytes; @Override public int byteCount() { nameBytes = StringUtils.stringToBinary(mutator.getName()); parameterClassBytes = StringUtils.stringToBinary(mutator.getParameterTypes()[0].getName()); return nameBytes.length + parameterClassBytes.length + VarintUtils.unsignedIntByteLength(nameBytes.length) + VarintUtils.unsignedIntByteLength(parameterClassBytes.length) + 1; } @Override public void toBinary(final ByteBuffer buffer) { VarintUtils.writeUnsignedInt(nameBytes.length, buffer); buffer.put(nameBytes); VarintUtils.writeUnsignedInt(parameterClassBytes.length, buffer); buffer.put(parameterClassBytes); if (mutator.getParameterTypes()[0].isPrimitive()) { buffer.put((byte) 1); } else { buffer.put((byte) 0); } } @Override public void fromBinary(final Class dataClass, final ByteBuffer buffer) { nameBytes = new byte[VarintUtils.readUnsignedInt(buffer)]; buffer.get(nameBytes); final String name = StringUtils.stringFromBinary(nameBytes); parameterClassBytes = new byte[VarintUtils.readUnsignedInt(buffer)]; buffer.get(parameterClassBytes); final String parameterClassName = StringUtils.stringFromBinary(parameterClassBytes); final boolean isPrimitive = buffer.hasRemaining() && buffer.get() == (byte) 1; Class parameterClass; try { if (isPrimitive) { parameterClass = getPrimitiveClass(parameterClassName); } else { parameterClass = Class.forName(parameterClassName); } } catch (ClassNotFoundException e1) { throw new RuntimeException( "Unable to find class for mutator parameter: " + parameterClassName); } try { mutator = dataClass.getMethod(name, parameterClass); } catch (NoSuchMethodException | SecurityException e) { throw new RuntimeException("Unable to find mutator method: " + name); } } } private static class FieldMutator implements Mutator { private Field field; public FieldMutator() {} public FieldMutator(final Field field) { this.field = field; } @Override public void set(final T entry, final Object object) { try { field.set(entry, object); } catch (IllegalArgumentException | IllegalAccessException e) { throw new RuntimeException("Unable to set value on entry", e); } } private byte[] nameBytes; @Override public int byteCount() { nameBytes = StringUtils.stringToBinary(field.getName()); return nameBytes.length + VarintUtils.unsignedIntByteLength(nameBytes.length); } @Override public void toBinary(final ByteBuffer buffer) { VarintUtils.writeUnsignedInt(nameBytes.length, buffer); buffer.put(nameBytes); } @Override public void fromBinary(final Class dataClass, final ByteBuffer buffer) { nameBytes = new byte[VarintUtils.readUnsignedInt(buffer)]; buffer.get(nameBytes); final String name = StringUtils.stringFromBinary(nameBytes); field = findField(dataClass, name); if (field == null) { throw new RuntimeException("Unable to find field: " + name); } field.setAccessible(true); } } private static Field findField(final Class dataClass, final String fieldName) { Class current = dataClass; while (!current.equals(Object.class)) { try { final Field field = current.getDeclaredField(fieldName); return field; } catch (SecurityException | NoSuchFieldException e) { // Do nothing } current = current.getSuperclass(); } return null; } public static Class normalizeClass(final Class sourceClass) { if (boolean.class.equals(sourceClass)) { return Boolean.class; } if (char.class.equals(sourceClass)) { return Character.class; } if (byte.class.equals(sourceClass)) { return Byte.class; } if (short.class.equals(sourceClass)) { return Short.class; } if (int.class.equals(sourceClass)) { return Integer.class; } if (long.class.equals(sourceClass)) { return Long.class; } if (float.class.equals(sourceClass)) { return Float.class; } if (double.class.equals(sourceClass)) { return Double.class; } return sourceClass; } public static Class getPrimitiveClass(final String className) throws ClassNotFoundException { switch (className) { case "boolean": return boolean.class; case "char": return char.class; case "byte": return byte.class; case "short": return short.class; case "int": return int.class; case "long": return long.class; case "float": return float.class; case "double": return double.class; default: break; } throw new ClassNotFoundException("Unknown primitive class " + className); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/BinaryDataAdapter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; import java.util.Map; import org.apache.commons.lang3.tuple.Pair; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.RowBuilder; public class BinaryDataAdapter implements DataTypeAdapter> { protected static final String SINGLETON_FIELD_NAME = "FIELD"; protected static final FieldDescriptor SINGLETON_FIELD_DESCRIPTOR = new FieldDescriptorBuilder<>(byte[].class).fieldName(SINGLETON_FIELD_NAME).build(); protected static final FieldDescriptor[] SINGLETON_FIELD_DESCRIPTOR_ARRAY = new FieldDescriptor[] {SINGLETON_FIELD_DESCRIPTOR}; private String typeName; public BinaryDataAdapter() { typeName = null; } public BinaryDataAdapter(final String typeName) { super(); this.typeName = typeName; } @Override public byte[] toBinary() { return StringUtils.stringToBinary(typeName); } @Override public void fromBinary(final byte[] bytes) { typeName = StringUtils.stringFromBinary(bytes); } @Override public String getTypeName() { return typeName; } @Override public byte[] getDataId(final Pair entry) { return entry.getKey(); } @Override public Object getFieldValue(final Pair entry, final String fieldName) { return entry.getValue(); } @SuppressWarnings({"unchecked", "rawtypes"}) @Override public Class getDataClass() { return Pair.class; } @Override public RowBuilder> newRowBuilder( final FieldDescriptor[] outputFieldDescriptors) { return new BinaryDataRowBuilder(); } @Override public FieldDescriptor[] getFieldDescriptors() { return SINGLETON_FIELD_DESCRIPTOR_ARRAY; } @Override public FieldDescriptor getFieldDescriptor(final String fieldName) { if (SINGLETON_FIELD_NAME.equals(fieldName)) { return SINGLETON_FIELD_DESCRIPTOR; } return null; } protected static class BinaryDataRowBuilder implements RowBuilder> { protected byte[] fieldValue; @Override public void setField(final String fieldName, final Object fieldValue) { if (SINGLETON_FIELD_NAME.equals(fieldName) && ((fieldValue == null) || (fieldValue instanceof byte[]))) { this.fieldValue = (byte[]) fieldValue; } } @Override public void setFields(final Map values) { if (values.containsKey(SINGLETON_FIELD_NAME)) { final Object obj = values.get(SINGLETON_FIELD_NAME); setField(SINGLETON_FIELD_NAME, obj); } } @Override public Pair buildRow(final byte[] dataId) { return Pair.of(dataId, fieldValue); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/FieldDescriptor.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; import java.util.Set; import org.locationtech.geowave.core.index.IndexDimensionHint; import org.locationtech.geowave.core.index.persist.Persistable; /** * Describes an adapter field, including the field name, the class of the field, and any index * hints. Each field may have one or more index hints that can be used to help GeoWave determine how * the adapter should be mapped to any arbitrary index. * * @param the adapter field type */ public interface FieldDescriptor extends Persistable { /** * @return the class of the data represented by this field */ Class bindingClass(); /** * @return the name of the field */ String fieldName(); /** * @return the set of index hints that this field contains */ Set indexHints(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/FieldDescriptorBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; import java.util.Set; import org.locationtech.geowave.core.index.IndexDimensionHint; import com.google.common.collect.Sets; /** * A builder for adapter field descriptors. * * @param the adapter field type * @param the field descriptor class * @param the builder class */ public class FieldDescriptorBuilder, B extends FieldDescriptorBuilder> { protected final Class bindingClass; protected String fieldName; protected Set indexHints = Sets.newHashSet(); /** * Create a new `FeatureDescriptorBuilder` for a field of the given type. * * @param bindingClass the adapter field type */ public FieldDescriptorBuilder(final Class bindingClass) { this.bindingClass = bindingClass; } /** * Supply a field name for the field. * * @param fieldName the name of the field * @return this builder */ public B fieldName(final String fieldName) { this.fieldName = fieldName; return (B) this; } /** * Add an index hint to the field. Index hints are used by GeoWave to determine how an adapter * should be mapped to an index. * * @param hint the index hint to set * @return this builder */ public B indexHint(final IndexDimensionHint hint) { this.indexHints.add(hint); return (B) this; } /** * Builds the field descriptor. * * @return the field descriptor */ public F build() { return (F) new BaseFieldDescriptor<>(bindingClass, fieldName, indexHints); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/FitToIndexPersistenceEncoding.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; import java.util.Collections; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.data.PersistentDataset; public class FitToIndexPersistenceEncoding extends AdapterPersistenceEncoding { private final InsertionIds insertionIds; public FitToIndexPersistenceEncoding( final short internalAdapterId, final byte[] dataId, final PersistentDataset commonData, final PersistentDataset adapterExtendedData, final byte[] partitionKey, final byte[] sortKey) { super(internalAdapterId, dataId, commonData, adapterExtendedData); insertionIds = new InsertionIds(partitionKey, sortKey == null ? null : Collections.singletonList(sortKey)); } @Override public InsertionIds getInsertionIds(final Index index) { return insertionIds; } @Override public boolean isDeduplicationEnabled() { return false; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/FullAsyncPersistenceEncoding.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; import java.util.concurrent.CompletableFuture; import org.locationtech.geowave.core.store.base.dataidx.BatchDataIndexRetrieval; import org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset; import org.locationtech.geowave.core.store.data.PersistentDataset; import org.locationtech.geowave.core.store.entities.GeoWaveValue; /** * This is an implementation of persistence encoding that retrieves fields asynchronously */ public class FullAsyncPersistenceEncoding extends IndexedAdapterPersistenceEncoding implements AsyncPersistenceEncoding { private final BatchDataIndexRetrieval asyncRetrieval; private CompletableFuture fieldValuesFuture = null; public FullAsyncPersistenceEncoding( final short adapterId, final byte[] dataId, final byte[] partitionKey, final byte[] sortKey, final int duplicateCount, final BatchDataIndexRetrieval asyncRetrieval) { super( adapterId, dataId, partitionKey, sortKey, duplicateCount, new MultiFieldPersistentDataset<>(), new MultiFieldPersistentDataset(), new MultiFieldPersistentDataset<>()); this.asyncRetrieval = asyncRetrieval; } /* * (non-Javadoc) * * @see * org.locationtech.geowave.core.store.adapter.AsyncPersistenceEncoding#getFieldValuesFuture() */ @Override public CompletableFuture getFieldValuesFuture() { return fieldValuesFuture; } @Override public boolean isAsync() { return fieldValuesFuture != null; } @Override public PersistentDataset getAdapterExtendedData() { // defer any reading of fieldValues until necessary deferredReadFields(); return super.getAdapterExtendedData(); } @Override public PersistentDataset getUnknownData() { // defer any reading of fieldValues until necessary deferredReadFields(); return super.getUnknownData(); } @Override public PersistentDataset getCommonData() { // defer any reading of fieldValues until necessary deferredReadFields(); return super.getCommonData(); } private void deferredReadFields() { fieldValuesFuture = asyncRetrieval.getDataAsync(getInternalAdapterId(), getDataId()); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/IndexDependentDataAdapter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; import java.util.Iterator; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; public interface IndexDependentDataAdapter extends DataTypeAdapter { public Iterator convertToIndex(Index index, T originalEntry); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/IndexedAdapterPersistenceEncoding.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; import java.util.Map.Entry; import java.util.Set; import org.locationtech.geowave.core.store.data.PersistentDataset; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.index.CommonIndexModel; /** * This is an implements of persistence encoding that also contains all of the extended data values * used to form the native type supported by this adapter. It also contains information about the * persisted object within a particular index such as the insertion ID in the index and the number * of duplicates for this entry in the index, and is used when reading data from the index. */ public class IndexedAdapterPersistenceEncoding extends AbstractAdapterPersistenceEncoding { public IndexedAdapterPersistenceEncoding( final short adapterId, final byte[] dataId, final byte[] partitionKey, final byte[] sortKey, final int duplicateCount, final PersistentDataset commonData, final PersistentDataset unknownData, final PersistentDataset adapterExtendedData) { super( adapterId, dataId, partitionKey, sortKey, duplicateCount, commonData, unknownData, adapterExtendedData); } @Override public void convertUnknownValues( final InternalDataAdapter adapter, final CommonIndexModel model) { final Set> unknownDataValues = getUnknownData().getValues().entrySet(); for (final Entry v : unknownDataValues) { final FieldReader reader = adapter.getReader(v.getKey()); final Object value = reader.readField(v.getValue()); adapterExtendedData.addValue(v.getKey(), value); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/InternalAdapterStore.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; /** * This is responsible for persisting adapter/Internal Adapter mappings (either in memory or to disk * depending on the implementation). */ public interface InternalAdapterStore { public String[] getTypeNames(); public short[] getAdapterIds(); public String getTypeName(short adapterId); public Short getAdapterId(String typeName); public short getInitialAdapterId(String typeName); /** * If an adapter is already associated with an internal Adapter returns false. Adapter can only be * associated with internal adapter once. * * @param typeName the type to add * @return the internal ID */ public short addTypeName(String typeName); /** * Remove a mapping from the store by type name. * * @param typeName the type to remove */ public boolean remove(String typeName); /** * Remove a mapping from the store by internal adapter ID. * * @param adapterId the internal adapter ID of the adapter to remove * @return {@code true} if the type was removed */ public boolean remove(short adapterId); /** * Remove all mappings from the store. */ public void removeAll(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/InternalAdapterUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.IndexFieldMapper; import org.locationtech.geowave.core.store.data.PersistentDataset; class InternalAdapterUtils { @SuppressWarnings("unchecked") protected static Object entryToIndexValue( final IndexFieldMapper fieldMapper, final DataTypeAdapter adapter, final T entry) { List fieldValues = (List) Arrays.stream(fieldMapper.getAdapterFields()).map( fieldName -> adapter.getFieldValue(entry, fieldName)).collect(Collectors.toList()); if (fieldValues.contains(null)) { return null; } return fieldMapper.toIndex(fieldValues); } @SuppressWarnings("unchecked") protected static Object entryToIndexValue( final IndexFieldMapper fieldMapper, final DataTypeAdapter adapter, final PersistentDataset adapterPersistenceEncoding) { final List fieldValues = (List) Arrays.stream(fieldMapper.getAdapterFields()).map( adapterPersistenceEncoding::getValue).collect(Collectors.toList()); if (fieldValues.contains(null)) { return null; } return fieldMapper.toIndex(fieldValues); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/InternalDataAdapter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.VisibilityHandler; import org.locationtech.geowave.core.store.index.CommonIndexModel; public interface InternalDataAdapter extends DataTypeAdapter { short getAdapterId(); DataTypeAdapter getAdapter(); VisibilityHandler getVisibilityHandler(); int getPositionOfOrderedField(CommonIndexModel model, String fieldName); String getFieldNameForPosition(CommonIndexModel model, int position); AdapterPersistenceEncoding encode(T entry, AdapterToIndexMapping indexMapping, final Index index); T decode( IndexedAdapterPersistenceEncoding data, AdapterToIndexMapping indexMapping, final Index index); boolean isCommonIndexField(AdapterToIndexMapping indexMapping, String fieldName); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/InternalDataAdapterImpl.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.IndexFieldMapper; import org.locationtech.geowave.core.store.api.RowBuilder; import org.locationtech.geowave.core.store.api.VisibilityHandler; import org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset; import org.locationtech.geowave.core.store.data.PersistentDataset; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldWriter; import org.locationtech.geowave.core.store.index.CommonIndexModel; import org.locationtech.geowave.core.store.util.DataStoreUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Maps; /** * This class generically supports most of the operations necessary to implement a Data Adapter and * can be easily extended to support specific data types.
Many of the details are handled by * mapping IndexFieldHandler's based on either types or exact dimensions. These handler mappings can * be supplied in the constructor. The dimension matching handlers are used first when trying to * decode a persistence encoded value. This can be done specifically to match a field (for example * if there are multiple ways of encoding/decoding the same type). Otherwise the type matching * handlers will simply match any field with the same type as its generic field type. * * @param The type for the entries handled by this adapter */ public class InternalDataAdapterImpl implements InternalDataAdapter { private static final Logger LOGGER = LoggerFactory.getLogger(InternalDataAdapterImpl.class); // this is not thread-safe, but should be ok given the only modification is on initialization // which is a synchronized operation /** Map of Field Readers associated with a Field ID */ private final Map> mapOfFieldNameToReaders = new HashMap<>(); /** Map of Field Writers associated with a Field ID */ private final Map> mapOfFieldNameToWriters = new HashMap<>(); private transient Map fieldToPositionMap = null; private transient Map positionToFieldMap = null; private transient Map> modelToDimensionsMap = null; private transient volatile boolean positionMapsInitialized = false; private Object MUTEX = new Object(); protected DataTypeAdapter adapter; protected short adapterId; protected VisibilityHandler visibilityHandler = null; public InternalDataAdapterImpl() {} public InternalDataAdapterImpl(final DataTypeAdapter adapter, final short adapterId) { this(adapter, adapterId, null); } public InternalDataAdapterImpl( final DataTypeAdapter adapter, final short adapterId, final VisibilityHandler visibilityHandler) { this.adapter = adapter; this.adapterId = adapterId; this.visibilityHandler = visibilityHandler; } @Override public VisibilityHandler getVisibilityHandler() { return visibilityHandler; } @edu.umd.cs.findbugs.annotations.SuppressFBWarnings() protected List getDimensionFieldNames(final CommonIndexModel model) { if (modelToDimensionsMap == null) { synchronized (MUTEX) { if (modelToDimensionsMap == null) { modelToDimensionsMap = new ConcurrentHashMap<>(); } } } final List retVal = modelToDimensionsMap.get(model.getId()); if (retVal != null) { return retVal; } final List dimensionFieldNames = DataStoreUtils.getUniqueDimensionFields(model); modelToDimensionsMap.put(model.getId(), dimensionFieldNames); return dimensionFieldNames; } @Override public AdapterPersistenceEncoding encode( final T entry, final AdapterToIndexMapping indexMapping, final Index index) { final PersistentDataset indexData = new MultiFieldPersistentDataset<>(); final Set nativeFieldsInIndex = new HashSet<>(); final Set dimensionFieldsUsed = new HashSet<>(); if (indexMapping != null) { for (final IndexFieldMapper indexField : indexMapping.getIndexFieldMappers()) { if (dimensionFieldsUsed.add(indexField.indexFieldName())) { final Object value = InternalAdapterUtils.entryToIndexValue(indexField, adapter, entry); if (value == null) { // The field value cannot be mapped to the index (such as null field values) return null; } indexData.addValue(indexField.indexFieldName(), value); Collections.addAll(nativeFieldsInIndex, indexField.getAdapterFields()); } } } final PersistentDataset extendedData = new MultiFieldPersistentDataset<>(); // now for the other data for (final FieldDescriptor desc : adapter.getFieldDescriptors()) { final String fieldName = desc.fieldName(); if (nativeFieldsInIndex.contains(fieldName)) { continue; } extendedData.addValue(fieldName, adapter.getFieldValue(entry, fieldName)); } return new AdapterPersistenceEncoding(adapterId, getDataId(entry), indexData, extendedData); } @Override public InternalDataAdapter asInternalAdapter(final short internalAdapterId) { return adapter.asInternalAdapter(internalAdapterId); } @Override public InternalDataAdapter asInternalAdapter( final short internalAdapterId, final VisibilityHandler visibilityHandler) { return adapter.asInternalAdapter(internalAdapterId, visibilityHandler); } @Override public boolean isCommonIndexField( final AdapterToIndexMapping indexMapping, final String fieldName) { for (final IndexFieldMapper indexField : indexMapping.getIndexFieldMappers()) { if (Arrays.stream(indexField.getAdapterFields()).anyMatch(field -> field.equals(fieldName))) { return true; } } return false; } @SuppressWarnings({"unchecked", "rawtypes"}) @Override public T decode( final IndexedAdapterPersistenceEncoding data, final AdapterToIndexMapping indexMapping, final Index index) { final RowBuilder builder = getRowBuilder(indexMapping); if (indexMapping != null) { for (final IndexFieldMapper fieldMapper : indexMapping.getIndexFieldMappers()) { final String fieldName = fieldMapper.indexFieldName(); final Object value = data.getCommonData().getValue(fieldName); if (value == null) { continue; } ((IndexFieldMapper) fieldMapper).toAdapter(value, builder); } } builder.setFields(data.getAdapterExtendedData().getValues()); return builder.buildRow(data.getDataId()); } @Override public byte[] toBinary() { final byte[] adapterBytes = PersistenceUtils.toBinary(adapter); final byte[] visibilityHanlderBytes = PersistenceUtils.toBinary(visibilityHandler); final ByteBuffer buffer = ByteBuffer.allocate( Short.BYTES + VarintUtils.unsignedIntByteLength(adapterBytes.length) + adapterBytes.length + VarintUtils.unsignedIntByteLength(visibilityHanlderBytes.length) + visibilityHanlderBytes.length); buffer.putShort(adapterId); VarintUtils.writeUnsignedInt(adapterBytes.length, buffer); buffer.put(adapterBytes); VarintUtils.writeUnsignedInt(visibilityHanlderBytes.length, buffer); buffer.put(visibilityHanlderBytes); return buffer.array(); } @SuppressWarnings("unchecked") @Override public void fromBinary(final byte[] bytes) { if ((bytes == null) || (bytes.length == 0)) { LOGGER.warn("Unable to deserialize data adapter. Binary is incomplete."); return; } final ByteBuffer buffer = ByteBuffer.wrap(bytes); adapterId = buffer.getShort(); final byte[] adapterBytes = new byte[VarintUtils.readUnsignedInt(buffer)]; buffer.get(adapterBytes); adapter = (DataTypeAdapter) PersistenceUtils.fromBinary(adapterBytes); final byte[] visibilityHandlerBytes = new byte[VarintUtils.readUnsignedInt(buffer)]; buffer.get(visibilityHandlerBytes); visibilityHandler = (VisibilityHandler) PersistenceUtils.fromBinary(visibilityHandlerBytes); } @Override public FieldReader getReader(final String fieldName) { FieldReader reader = mapOfFieldNameToReaders.get(fieldName); // Check the map to see if a reader has already been found. if (reader == null) { // Reader not in Map, go to the adapter and get the reader reader = adapter.getReader(fieldName); // Add it to map for the next time mapOfFieldNameToReaders.put(fieldName, reader); } return reader; } @Override public FieldWriter getWriter(final String fieldName) { // Go to the map to get a writer for given fieldId FieldWriter writer = mapOfFieldNameToWriters.get(fieldName); // Check the map to see if a writer has already been found. if (writer == null) { // Writer not in Map, go to the adapter and get the writer writer = adapter.getWriter(fieldName); // Add it to map for the next time mapOfFieldNameToWriters.put(fieldName, writer); } return writer; } @Override public String getTypeName() { return adapter.getTypeName(); } @Override public byte[] getDataId(final T entry) { return adapter.getDataId(entry); } @Override public Object getFieldValue(final T entry, final String fieldName) { return adapter.getFieldValue(entry, fieldName); } @Override public Class getDataClass() { return adapter.getDataClass(); } private ThreadLocal> builder = null; public RowBuilder getRowBuilder(final AdapterToIndexMapping indexMapping) { if (builder == null) { final FieldDescriptor[] outputFieldDescriptors = adapter.getFieldDescriptors(); if (indexMapping != null) { indexMapping.getIndexFieldMappers().forEach( mapping -> mapping.transformFieldDescriptors(outputFieldDescriptors)); } builder = new ThreadLocal>() { @Override protected RowBuilder initialValue() { return adapter.newRowBuilder(outputFieldDescriptors); } }; } return builder.get(); } @Override public RowBuilder newRowBuilder(final FieldDescriptor[] outputFieldDescriptors) { return adapter.newRowBuilder(outputFieldDescriptors); } @Override public FieldDescriptor[] getFieldDescriptors() { return adapter.getFieldDescriptors(); } @Override public FieldDescriptor getFieldDescriptor(final String fieldName) { return adapter.getFieldDescriptor(fieldName); } @Override public short getAdapterId() { return adapterId; } @Override public DataTypeAdapter getAdapter() { return adapter; } @Override public int getPositionOfOrderedField(final CommonIndexModel model, final String fieldName) { int numDimensions; if (model != null) { final List dimensionFieldNames = getDimensionFieldNames(model); // first check CommonIndexModel dimensions if (dimensionFieldNames.contains(fieldName)) { return dimensionFieldNames.indexOf(fieldName); } numDimensions = dimensionFieldNames.size(); } else { numDimensions = 0; } if (!positionMapsInitialized) { synchronized (MUTEX) { initializePositionMaps(); } } // next check other fields // dimension fields must be first, add padding final Integer position = fieldToPositionMap.get(fieldName); if (position == null) { return -1; } return position.intValue() + numDimensions; } @Override public String getFieldNameForPosition(final CommonIndexModel model, final int position) { final List dimensionFieldNames = getDimensionFieldNames(model); if (position >= dimensionFieldNames.size()) { final int adjustedPosition = position - dimensionFieldNames.size(); if (!positionMapsInitialized) { synchronized (MUTEX) { initializePositionMaps(); } } // check other fields return positionToFieldMap.get(adjustedPosition); } // otherwise check CommonIndexModel dimensions return dimensionFieldNames.get(position); } private void initializePositionMaps() { if (positionMapsInitialized) { return; } try { fieldToPositionMap = Maps.newHashMap(); positionToFieldMap = Maps.newHashMap(); final FieldDescriptor[] fields = adapter.getFieldDescriptors(); for (int i = 0; i < fields.length; i++) { final String currFieldName = fields[i].fieldName(); fieldToPositionMap.put(currFieldName, i); positionToFieldMap.put(i, currFieldName); } positionMapsInitialized = true; } catch (final Exception e) { LOGGER.error("Unable to initialize position map, continuing anyways", e); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/LazyReadPersistenceEncoding.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; import java.util.List; import java.util.function.Supplier; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.IndexFieldMapper; import org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils; import org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset; import org.locationtech.geowave.core.store.data.PersistentDataset; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.entities.GeoWaveValue; import org.locationtech.geowave.core.store.entities.GeoWaveValueImpl; import org.locationtech.geowave.core.store.flatten.BitmaskUtils; import org.locationtech.geowave.core.store.flatten.FlattenedFieldInfo; import org.locationtech.geowave.core.store.index.CommonIndexModel; import org.locationtech.geowave.core.store.util.DataStoreUtils; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; /** * This is an implements of persistence encoding that also contains all of the extended data values * used to form the native type supported by this adapter. It also contains information about the * persisted object within a particular index such as the insertion ID in the index and the number * of duplicates for this entry in the index, and is used when reading data from the index. */ public class LazyReadPersistenceEncoding extends IndexedAdapterPersistenceEncoding { private FieldValueReader deferredFieldReader; public LazyReadPersistenceEncoding( final short adapterId, final byte[] dataId, final byte[] partitionKey, final byte[] sortKey, final int duplicateCount, final InternalDataAdapter dataAdapter, final CommonIndexModel indexModel, final AdapterToIndexMapping indexMapping, final byte[] fieldSubsetBitmask, final GeoWaveValue[] fieldValues, final boolean isSecondaryIndex) { super( adapterId, dataId, partitionKey, sortKey, duplicateCount, new MultiFieldPersistentDataset<>(), new MultiFieldPersistentDataset(), new MultiFieldPersistentDataset<>()); deferredFieldReader = new InstanceFieldValueReader( fieldSubsetBitmask, dataAdapter, indexModel, indexMapping, fieldValues, isSecondaryIndex); } public LazyReadPersistenceEncoding( final short adapterId, final byte[] dataId, final byte[] partitionKey, final byte[] sortKey, final int duplicateCount, final InternalDataAdapter dataAdapter, final CommonIndexModel indexModel, final AdapterToIndexMapping indexMapping, final byte[] fieldSubsetBitmask, final Supplier fieldValues) { super( adapterId, dataId, partitionKey, sortKey, duplicateCount, new MultiFieldPersistentDataset<>(), new MultiFieldPersistentDataset(), new MultiFieldPersistentDataset<>()); deferredFieldReader = new SupplierFieldValueReader( fieldSubsetBitmask, dataAdapter, indexModel, indexMapping, fieldValues, true); } @Override public PersistentDataset getAdapterExtendedData() { // defer any reading of fieldValues until necessary deferredReadFields(); return super.getAdapterExtendedData(); } @Override public PersistentDataset getUnknownData() { // defer any reading of fieldValues until necessary deferredReadFields(); return super.getUnknownData(); } @Override public PersistentDataset getCommonData() { // defer any reading of fieldValues until necessary deferredReadFields(); return super.getCommonData(); } @SuppressFBWarnings(justification = "This is intentional to avoid unnecessary sync") private void deferredReadFields() { if (deferredFieldReader != null) { // this is intentional to check for null twice to avoid extra unnecessary synchronization synchronized (this) { if (deferredFieldReader != null) { deferredFieldReader.readValues(); deferredFieldReader = null; } } } } private abstract class FieldValueReader { private final byte[] fieldSubsetBitmask; private final InternalDataAdapter dataAdapter; private final CommonIndexModel indexModel; private final AdapterToIndexMapping indexMapping; private final boolean isSecondaryIndex; public FieldValueReader( final byte[] fieldSubsetBitmask, final InternalDataAdapter dataAdapter, final CommonIndexModel indexModel, final AdapterToIndexMapping indexMapping, final boolean isSecondaryIndex) { super(); this.fieldSubsetBitmask = fieldSubsetBitmask; this.dataAdapter = dataAdapter; this.indexModel = indexModel; this.indexMapping = indexMapping; this.isSecondaryIndex = isSecondaryIndex; } protected void readValues() { for (final GeoWaveValue value : getFieldValues()) { byte[] byteValue = value.getValue(); byte[] fieldMask = value.getFieldMask(); if (fieldSubsetBitmask != null) { final byte[] newBitmask = BitmaskUtils.generateANDBitmask(fieldMask, fieldSubsetBitmask); byteValue = BitmaskUtils.constructNewValue(byteValue, fieldMask, newBitmask); if ((byteValue == null) || (byteValue.length == 0)) { continue; } fieldMask = newBitmask; } readValue(new GeoWaveValueImpl(fieldMask, value.getVisibility(), byteValue)); } } abstract protected GeoWaveValue[] getFieldValues(); private void readValue(final GeoWaveValue value) { final List fieldInfos = DataStoreUtils.decomposeFlattenedFields( value.getFieldMask(), value.getValue(), value.getVisibility(), -2).getFieldsRead(); for (final FlattenedFieldInfo fieldInfo : fieldInfos) { final String fieldName = dataAdapter.getFieldNameForPosition( isSecondaryIndex ? DataIndexUtils.DATA_ID_INDEX.getIndexModel() : indexModel, fieldInfo.getFieldPosition()); FieldReader indexFieldReader = null; if (!isSecondaryIndex) { indexFieldReader = indexModel.getReader(fieldName); } if (indexFieldReader != null) { final Object indexValue = indexFieldReader.readField(fieldInfo.getValue()); commonData.addValue(fieldName, indexValue); } else { final FieldReader extFieldReader = dataAdapter.getReader(fieldName); if (extFieldReader != null) { final Object objValue = extFieldReader.readField(fieldInfo.getValue()); // TODO GEOWAVE-1018, do we care about visibility adapterExtendedData.addValue(fieldName, objValue); } else { LOGGER.error("field reader not found for data entry, the value may be ignored"); unknownData.addValue(fieldName, fieldInfo.getValue()); } } } if (isSecondaryIndex) { for (IndexFieldMapper mapper : indexMapping.getIndexFieldMappers()) { final Object commonIndexValue = InternalAdapterUtils.entryToIndexValue( mapper, dataAdapter.getAdapter(), adapterExtendedData); commonData.addValue(mapper.indexFieldName(), commonIndexValue); } } } } private class InstanceFieldValueReader extends FieldValueReader { private final GeoWaveValue[] fieldValues; public InstanceFieldValueReader( final byte[] fieldSubsetBitmask, final InternalDataAdapter dataAdapter, final CommonIndexModel indexModel, final AdapterToIndexMapping indexMapping, final GeoWaveValue[] fieldValues, final boolean isSecondaryIndex) { super(fieldSubsetBitmask, dataAdapter, indexModel, indexMapping, isSecondaryIndex); this.fieldValues = fieldValues; } @Override protected GeoWaveValue[] getFieldValues() { return fieldValues; } } private class SupplierFieldValueReader extends FieldValueReader { private final Supplier fieldValues; public SupplierFieldValueReader( final byte[] fieldSubsetBitmask, final InternalDataAdapter dataAdapter, final CommonIndexModel indexModel, final AdapterToIndexMapping indexMapping, final Supplier fieldValues, final boolean isSecondaryIndex) { super(fieldSubsetBitmask, dataAdapter, indexModel, indexMapping, isSecondaryIndex); this.fieldValues = fieldValues; } @Override protected GeoWaveValue[] getFieldValues() { return fieldValues.get(); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/MapRowBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; import java.util.Map; import java.util.stream.Collectors; import org.locationtech.geowave.core.store.api.RowBuilder; import com.beust.jcommander.internal.Maps; public class MapRowBuilder implements RowBuilder> { private final Map sourceMap; public MapRowBuilder() { sourceMap = Maps.newHashMap(); } public MapRowBuilder(final Map sourceMap) { this.sourceMap = sourceMap; } @Override public void setField(String fieldName, Object fieldValue) { sourceMap.put(fieldName, fieldValue); } @Override public void setFields(Map values) { sourceMap.putAll(values); } @Override public Map buildRow(byte[] dataId) { final Map returnValue = sourceMap.entrySet().stream().collect( Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); sourceMap.clear(); return returnValue; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/PartialAsyncPersistenceEncoding.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; import java.util.concurrent.CompletableFuture; import java.util.function.Supplier; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.base.dataidx.BatchDataIndexRetrieval; import org.locationtech.geowave.core.store.data.PersistentDataset; import org.locationtech.geowave.core.store.entities.GeoWaveValue; import org.locationtech.geowave.core.store.index.CommonIndexModel; /** * /** This is an implementation of persistence encoding that retrieves all of the extended data * values asynchronously but is supplied the common index values */ public class PartialAsyncPersistenceEncoding extends LazyReadPersistenceEncoding implements AsyncPersistenceEncoding { private final BatchDataIndexRetrieval asyncRetrieval; private CompletableFuture fieldValuesFuture = null; public PartialAsyncPersistenceEncoding( final short adapterId, final byte[] dataId, final byte[] partitionKey, final byte[] sortKey, final int duplicateCount, final BatchDataIndexRetrieval asyncRetrieval, final InternalDataAdapter dataAdapter, final CommonIndexModel indexModel, final AdapterToIndexMapping indexMapping, final byte[] fieldSubsetBitmask, final Supplier fieldValues) { super( adapterId, dataId, partitionKey, sortKey, duplicateCount, dataAdapter, indexModel, indexMapping, fieldSubsetBitmask, fieldValues); this.asyncRetrieval = asyncRetrieval; } @Override public CompletableFuture getFieldValuesFuture() { return fieldValuesFuture; } @Override public boolean isAsync() { return fieldValuesFuture != null; } @Override public PersistentDataset getAdapterExtendedData() { // defer any reading of fieldValues until necessary deferredReadFields(); return super.getAdapterExtendedData(); } @Override public PersistentDataset getUnknownData() { // defer any reading of fieldValues until necessary deferredReadFields(); return super.getUnknownData(); } private void deferredReadFields() { fieldValuesFuture = asyncRetrieval.getDataAsync(getInternalAdapterId(), getDataId()); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/PersistentAdapterStore.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; public interface PersistentAdapterStore extends AdapterStore> { } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/RowMergingDataAdapter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; import java.io.IOException; import java.util.Collections; import java.util.Map; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.api.DataTypeAdapter; public interface RowMergingDataAdapter extends DataTypeAdapter { default RowTransform getTransform() { return new SimpleRowTransform(mergeableClassId()); } default Short mergeableClassId() { return null; } default Map getOptions( final short internalAdapterId, final Map existingOptions) { return Collections.EMPTY_MAP; } static interface RowTransform extends Persistable { void initOptions(final Map options) throws IOException; M getRowAsMergeableObject( final short internalAdapterId, final ByteArray fieldId, final byte[] rowValueBinary); byte[] getBinaryFromMergedObject(final M rowObject); String getTransformName(); int getBaseTransformPriority(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/SimpleAbstractDataAdapter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistableFactory; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.RowBuilder; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldWriter; import org.locationtech.geowave.core.store.data.field.PersistableReader; import org.locationtech.geowave.core.store.data.field.PersistableWriter; abstract public class SimpleAbstractDataAdapter implements DataTypeAdapter { protected static final String SINGLETON_FIELD_NAME = "FIELD"; protected FieldDescriptor singletonFieldDescriptor; private FieldReader reader = null; private FieldWriter writer = null; public SimpleAbstractDataAdapter() { super(); singletonFieldDescriptor = new FieldDescriptorBuilder<>(getDataClass()).fieldName(SINGLETON_FIELD_NAME).build(); } @Override public byte[] toBinary() { return new byte[0]; } @Override public void fromBinary(final byte[] bytes) {} @Override public Object getFieldValue(final T entry, final String fieldName) { return entry; } @Override public RowBuilder newRowBuilder(final FieldDescriptor[] outputFieldDescriptors) { return new SingletonFieldRowBuilder(); } @Override public FieldDescriptor[] getFieldDescriptors() { return new FieldDescriptor[] {singletonFieldDescriptor}; } @Override public FieldDescriptor getFieldDescriptor(final String fieldName) { return singletonFieldDescriptor; } @Override public FieldWriter getWriter(final String fieldName) { if (writer == null) { writer = new PersistableWriter(); } return writer; } @Override public FieldReader getReader(final String fieldName) { if (reader == null) { reader = new PersistableReader( PersistableFactory.getInstance().getClassIdMapping().get(getDataClass())); } return reader; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/SimpleRowTransform.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; import java.io.IOException; import java.nio.ByteBuffer; import java.util.Map; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.index.persist.PersistableFactory; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter.RowTransform; public class SimpleRowTransform implements RowTransform { private Short classId; public SimpleRowTransform() { this(null); } public SimpleRowTransform(final Short classId) { this.classId = classId; } @Override public byte[] toBinary() { if (classId != null) { return ByteBuffer.allocate(2).putShort(classId).array(); } return new byte[0]; } @Override public void fromBinary(final byte[] bytes) { if (bytes.length > 1) { classId = ByteBuffer.wrap(bytes).getShort(); } } @Override public void initOptions(final Map options) throws IOException {} @Override public M getRowAsMergeableObject( final short internalAdapterId, final ByteArray fieldId, final byte[] rowValueBinary) { // if class ID is non-null then we can short-circuit reading it from the binary if (classId != null) { final M newInstance = (M) PersistableFactory.getInstance().newInstance(classId); newInstance.fromBinary(rowValueBinary); return newInstance; } return (M) PersistenceUtils.fromBinary(rowValueBinary); } @Override public byte[] getBinaryFromMergedObject(final M rowObject) { // if class ID is non-null then we can short-circuit writing it too if (classId != null) { if (rowObject != null) { return rowObject.toBinary(); } return new byte[0]; } return PersistenceUtils.toBinary(rowObject); } @Override public String getTransformName() { return "default"; } @Override public int getBaseTransformPriority() { return 0; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/SingletonFieldRowBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; import java.util.Map; import org.locationtech.geowave.core.store.api.RowBuilder; public class SingletonFieldRowBuilder implements RowBuilder { private T fieldValue; @SuppressWarnings("unchecked") @Override public void setField(final String fieldName, final Object fieldValue) { this.fieldValue = (T) fieldValue; } @SuppressWarnings("unchecked") @Override public void setFields(final Map values) { if (!values.isEmpty()) { this.fieldValue = (T) values.values().iterator().next(); } } @Override public T buildRow(final byte[] dataId) { return fieldValue; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/TransientAdapterStore.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; import org.locationtech.geowave.core.store.api.DataTypeAdapter; public interface TransientAdapterStore extends AdapterStore> { } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/annotation/AnnotatedFieldDescriptorBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter.annotation; import java.lang.reflect.Field; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; /** * Interface for creating field descriptors from annotated fields. */ public interface AnnotatedFieldDescriptorBuilder { FieldDescriptor buildFieldDescriptor(Field field); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/annotation/BaseAnnotatedFieldDescriptorBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter.annotation; import java.lang.reflect.Field; import org.locationtech.geowave.core.index.IndexDimensionHint; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.adapter.FieldDescriptorBuilder; import org.locationtech.geowave.core.store.adapter.BasicDataTypeAdapter; /** * Base implementation for annotated field descriptor builders. This builder is used by the * `@GeoWaveField` annotation. */ public class BaseAnnotatedFieldDescriptorBuilder implements AnnotatedFieldDescriptorBuilder { @Override public FieldDescriptor buildFieldDescriptor(Field field) { if (field.isAnnotationPresent(GeoWaveField.class)) { final GeoWaveField fieldAnnotation = field.getAnnotation(GeoWaveField.class); final String fieldName; if (fieldAnnotation.name().isEmpty()) { fieldName = field.getName(); } else { fieldName = fieldAnnotation.name(); } final String[] indexHints = fieldAnnotation.indexHints(); final FieldDescriptorBuilder builder = new FieldDescriptorBuilder<>(BasicDataTypeAdapter.normalizeClass(field.getType())); for (final String hint : indexHints) { builder.indexHint(new IndexDimensionHint(hint)); } return builder.fieldName(fieldName).build(); } throw new RuntimeException("Field is missing GeoWaveField annotation."); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/annotation/GeoWaveDataType.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * Marker annotation to indicate that GeoWave should use annotations to determine fields and their * properties for basic data adapters. */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.TYPE) public @interface GeoWaveDataType { } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/annotation/GeoWaveField.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Inherited; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * Annotation to mark data type fields for inclusion in the data type adapter. */ @Inherited @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.FIELD) @GeoWaveFieldAnnotation(fieldDescriptorBuilder = BaseAnnotatedFieldDescriptorBuilder.class) public @interface GeoWaveField { /** * The name to use for the field. */ String name() default ""; /** * Index hints to use for the field. */ String[] indexHints() default {}; } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/annotation/GeoWaveFieldAnnotation.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter.annotation; import java.lang.annotation.Inherited; import java.lang.annotation.Target; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; /** * Annotation for GeoWave field annotations. This annotation provides a way to convert the annotated * field into a field descriptor. */ @Inherited @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.ANNOTATION_TYPE) public @interface GeoWaveFieldAnnotation { Class fieldDescriptorBuilder(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/exceptions/AdapterException.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter.exceptions; public class AdapterException extends Exception { /** * */ private static final long serialVersionUID = 1L; public AdapterException(final String msg) { super(msg); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/statistics/histogram/ByteUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter.statistics.histogram; /** * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * *

http://www.apache.org/licenses/LICENSE-2.0 * *

Unless required by applicable law or agreed to in writing, software distributed under the * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing permissions and * limitations under the License. */ import java.math.BigDecimal; import java.math.BigInteger; import org.locationtech.geowave.core.index.lexicoder.Lexicoders; public class ByteUtils { private static final byte[] INFINITY_BYTE = new byte[] { (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff}; public static byte[] toBytes(final double val) { final BigInteger tmp = new BigDecimal(val).toBigInteger(); byte[] arr = Lexicoders.LONG.toByteArray(tmp.longValue()); if ((arr[0] == (byte) 0) && (arr.length > 1) && (arr[1] == (byte) 0xff)) { // to represent {0xff, 0xff}, big integer uses {0x00, 0xff, 0xff} // due to the one's compliment representation. final byte[] clipped = new byte[arr.length - 1]; System.arraycopy(arr, 1, clipped, 0, arr.length - 1); arr = clipped; } if (arr.length > 8) { arr = INFINITY_BYTE; } return toPaddedBytes(arr); } public static byte[] toBytes(final long val) { byte[] arr = Lexicoders.LONG.toByteArray(val); if ((arr[0] == (byte) 0) && (arr.length > 1) && (arr[1] == (byte) 0xff)) { // to represent {0xff, 0xff}, big integer uses {0x00, 0xff, 0xff} // due to the one's compliment representation. final byte[] clipped = new byte[arr.length - 1]; System.arraycopy(arr, 1, clipped, 0, arr.length - 1); arr = clipped; } if (arr.length > 8) { arr = INFINITY_BYTE; } return toPaddedBytes(arr); } public static long toLong(final byte[] data) { return Lexicoders.LONG.fromByteArray(toPaddedBytes(data)); } public static double toDouble(final byte[] data) { return Lexicoders.LONG.fromByteArray(toPaddedBytes(data)); } public static double toDoubleAsPreviousPrefix(final byte[] data) { return Lexicoders.LONG.fromByteArray(toPreviousPrefixPaddedBytes(data)); } public static double toDoubleAsNextPrefix(final byte[] data) { return Lexicoders.LONG.fromByteArray(toNextPrefixPaddedBytes(data)); } public static byte[] toPaddedBytes(final byte[] b) { if (b.length == 8) { return b; } final byte[] newD = new byte[8]; System.arraycopy(b, 0, newD, 0, Math.min(b.length, 8)); return newD; } public static byte[] toPreviousPrefixPaddedBytes(final byte[] b) { int offset = Math.min(8, b.length); while (offset > 0) { if (b[offset - 1] != (byte) 0x00) { break; } offset--; } final byte[] newD = new byte[8]; if (offset == 0) { return new byte[8]; } System.arraycopy(b, 0, newD, 0, offset); newD[offset - 1]--; return newD; } public static byte[] toNextPrefixPaddedBytes(final byte[] b) { final byte[] newD = new byte[8]; System.arraycopy(b, 0, newD, 0, Math.min(8, b.length)); int offset = Math.min(8, b.length); while (offset > 0) { if (b[offset - 1] != (byte) 0xFF) { break; } offset--; } if (offset == 0 && b.length < 8) { for (int i = b.length; i < 8; i++) { newD[i] = (byte) 0xFF; } } else if (offset > 0) { newD[offset - 1]++; } return newD; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/statistics/histogram/FixedBinNumericHistogram.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter.statistics.histogram; /** * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * *

http://www.apache.org/licenses/LICENSE-2.0 * *

Unless required by applicable law or agreed to in writing, software distributed under the * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing permissions and * limitations under the License. */ import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.FloatCompareUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * Fixed number of bins for a histogram. Unless configured, the range will expand dynamically, * redistributing the data as necessary into the wider bins. * *

The advantage of constraining the range of the statistic is to ignore values outside the * range, such as erroneous values. Erroneous values force extremes in the histogram. For example, * if the expected range of values falls between 0 and 1 and a value of 10000 occurs, then a single * bin contains the entire population between 0 and 1, a single bin represents the single value of * 10000. */ public class FixedBinNumericHistogram implements NumericHistogram { private static final Logger LOGGER = LoggerFactory.getLogger(FixedBinNumericHistogram.class.getName()); private long count[] = new long[32]; private long totalCount = 0; private double minValue = Double.MAX_VALUE; private double maxValue = -Double.MAX_VALUE; private boolean constrainedRange = false; /** Creates a new histogram object. */ public FixedBinNumericHistogram() { totalCount = 0; } /** Creates a new histogram object. */ public FixedBinNumericHistogram(final int size) { count = new long[size]; } public FixedBinNumericHistogram(final int bins, final double minValue, final double maxValue) { count = new long[bins]; if (Double.isInfinite(minValue) || Double.isInfinite(maxValue)) { throw new IllegalArgumentException("Histogram cannot use infinity as min or max value"); } this.minValue = minValue; this.maxValue = maxValue; constrainedRange = true; } public double[] quantile(final int bins) { return NumericHistogram.binQuantiles(this, bins); } @Override public double cdf(final double val) { return sum(val, false) / totalCount; } /** * Estimate number of values consumed up to provided value. * * @param val * @return the number of estimated points */ @Override public double sum(final double val, final boolean inclusive) { if (val < minValue) { return 0.0; } final double range = maxValue - minValue; if ((range <= 0.0) || (totalCount == 0)) { return totalCount; } final int bin = Math.min((int) Math.floor((((val - minValue) / range) * count.length)), count.length - 1); double c = 0; final double perBinSize = binSize(); for (int i = 0; i < bin; i++) { c += count[i]; } final double percentageOfLastBin = Math.min(1.0, (val - ((perBinSize * (bin)) + minValue)) / perBinSize); c += (percentageOfLastBin * count[bin]); return c > 0 ? c : (inclusive ? 1.0 : c); } private double binSize() { final double v = (maxValue - minValue) / count.length; return (FloatCompareUtils.checkDoublesEqual(v, 0.0)) ? 1.0 : v; } @Override public double quantile(final double percentage) { final double fractionOfTotal = percentage * totalCount; double countThisFar = 0; int bin = 0; for (; (bin < count.length) && (countThisFar < fractionOfTotal); bin++) { countThisFar += count[bin]; } if (bin == 0) { return minValue; } final double perBinSize = binSize(); final double countUptoLastBin = countThisFar - count[bin - 1]; return minValue + ((perBinSize * bin) + (perBinSize * ((fractionOfTotal - countUptoLastBin) / count[bin - 1]))); } public double percentPopulationOverRange(final double start, final double stop) { return cdf(stop) - cdf(start); } public long totalSampleSize() { return totalCount; } public long[] count(final int bins) { return NumericHistogram.binCounts(this, bins); } @Override public void merge(final NumericHistogram mergeable) { final FixedBinNumericHistogram myTypeOfHist = (FixedBinNumericHistogram) mergeable; final double newMinValue = Math.min(minValue, myTypeOfHist.minValue); final double newMaxValue = Math.max(maxValue, myTypeOfHist.maxValue); try { this.redistribute(newMinValue, newMaxValue); myTypeOfHist.redistribute(newMinValue, newMaxValue); } catch (final IllegalArgumentException e) { LOGGER.error("Failed to redistribute values during merge", e); } for (int i = 0; i < count.length; i++) { count[i] += myTypeOfHist.count[i]; } maxValue = newMaxValue; minValue = newMinValue; totalCount += myTypeOfHist.totalCount; } @Override public int bufferSize() { int bufferSize = VarintUtils.unsignedLongByteLength(totalCount) + VarintUtils.unsignedIntByteLength(count.length) + 16; for (int i = 0; i < count.length; i++) { bufferSize += VarintUtils.unsignedLongByteLength(count[i]); } return bufferSize; } @Override public void toBinary(final ByteBuffer buffer) { VarintUtils.writeUnsignedLong(totalCount, buffer); buffer.putDouble(minValue); buffer.putDouble(maxValue); VarintUtils.writeUnsignedInt(count.length, buffer); for (int i = 0; i < count.length; i++) { VarintUtils.writeUnsignedLong(count[i], buffer); } } @Override public void fromBinary(final ByteBuffer buffer) { totalCount = VarintUtils.readUnsignedLong(buffer); minValue = buffer.getDouble(); maxValue = buffer.getDouble(); final int s = VarintUtils.readUnsignedInt(buffer); count = new long[s]; for (int i = 0; i < s; i++) { count[i] = VarintUtils.readUnsignedLong(buffer); } } @Override public String toString() { return NumericHistogram.histogramToString(this); } /** @return the total number of consumed values */ @Override public long getTotalCount() { return totalCount; } /** @return the number of bins used */ public int getNumBins() { return count.length; } @Override public void add(final double num) { add(1L, num); } public void add(final long amount, final double num) { if (constrainedRange && ((num < minValue) || (num > maxValue))) { return; } // entry of the the same value or first entry if ((totalCount == 0L) || FloatCompareUtils.checkDoublesEqual(minValue, num)) { count[0] += amount; minValue = num; maxValue = Math.max(num, maxValue); } // else if entry has a different value else if (FloatCompareUtils.checkDoublesEqual(maxValue, minValue)) { // && // num // is // neither if (num < minValue) { count[count.length - 1] = count[0]; count[0] = amount; minValue = num; } else if (num > maxValue) { count[count.length - 1] = amount; // count[0] is unchanged maxValue = num; } } else { if (num < minValue) { try { redistribute(num, maxValue); } catch (final IllegalArgumentException e) { LOGGER.error("Failed to redistribute values during add", e); } minValue = num; } else if (num > maxValue) { try { redistribute(minValue, num); } catch (final IllegalArgumentException e) { LOGGER.error("Failed to redistribute values during add", e); } maxValue = num; } final double range = maxValue - minValue; final double b = (((num - minValue) / range) * count.length); final int bin = Math.min((int) Math.floor(b), count.length - 1); count[bin] += amount; } totalCount += amount; } private void redistribute(final double newMinValue, final double newMaxValue) throws IllegalArgumentException { redistribute(new long[count.length], newMinValue, newMaxValue); } private void redistribute( final long[] newCount, final double newMinValue, final double newMaxValue) { if (Double.isInfinite(minValue) || Double.isInfinite(maxValue)) { throw new IllegalArgumentException( "Histogram cannot redistribute with min or max value set to infinity"); } if (Double.isInfinite(newMinValue) || Double.isInfinite(newMaxValue)) { throw new IllegalArgumentException( "Histogram cannot redistribute with new min or max value set to infinity"); } final double perBinSize = binSize(); final double newRange = (newMaxValue - newMinValue); final double newPerBinsSize = newRange / count.length; double currentWindowStart = minValue; double currentWindowStop = minValue + perBinSize; for (int bin = 0; bin < count.length; bin++) { long distributionCount = 0; int destinationBin = Math.min( (int) Math.floor((((currentWindowStart - newMinValue) / newRange) * count.length)), count.length - 1); double destinationWindowStart = newMinValue + (destinationBin * newPerBinsSize); double destinationWindowStop = destinationWindowStart + newPerBinsSize; while (count[bin] > 0) { if (currentWindowStart < destinationWindowStart) { // take whatever is left over distributionCount = count[bin]; } else { final double diff = Math.min(Math.max(currentWindowStop - destinationWindowStop, 0.0), perBinSize); distributionCount = Math.round(count[bin] * (1.0 - (diff / perBinSize))); } newCount[destinationBin] += distributionCount; count[bin] -= distributionCount; if (destinationWindowStop < currentWindowStop) { destinationWindowStart = destinationWindowStop; destinationWindowStop += newPerBinsSize; destinationBin += 1; if ((destinationBin == count.length) && (count[bin] > 0)) { newCount[bin] += count[bin]; count[bin] = 0; } } } currentWindowStart = currentWindowStop; currentWindowStop += perBinSize; } count = newCount; } @Override public double getMaxValue() { return maxValue; }; @Override public double getMinValue() { return minValue; }; public static class FixedBinNumericHistogramFactory implements NumericHistogramFactory { @Override public NumericHistogram create(final int bins) { return new FixedBinNumericHistogram(bins); } @Override public NumericHistogram create(final int bins, final double minValue, final double maxValue) { return new FixedBinNumericHistogram(bins, minValue, maxValue); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/statistics/histogram/MinimalBinDistanceHistogram.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter.statistics.histogram; /** * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * *

http://www.apache.org/licenses/LICENSE-2.0 * *

Unless required by applicable law or agreed to in writing, software distributed under the * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing permissions and * limitations under the License. */ import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; import java.util.Random; import org.locationtech.geowave.core.index.VarintUtils; /** * Dynamic Histogram: * *

Derived from work for Hive and based on Yael Ben-Haim and Elad Tom-Tov, "A streaming parallel * decision tree algorithm", J. Machine Learning Research 11 (2010), pp. 849--872. * *

Note: the paper refers to a bins as a pair (p,m) where p = lower bound and m = count. Some of * the interpolation treats the pair as a coordinate. * *

Although there are no approximation guarantees, it appears to work well with adequate data * and a large number of histogram bins. */ public class MinimalBinDistanceHistogram implements NumericHistogram { // Class variables private int nbins = 1024; // the fix maximum number of bins to maintain private long totalCount; // cache to avoid counting all the bins private ArrayList bins; private final Random prng; private double maxValue; // the maximum value consumed /** Creates a new histogram object. */ public MinimalBinDistanceHistogram() { totalCount = 0; // init the RNG for breaking ties in histogram merging. prng = new Random(System.currentTimeMillis()); bins = new ArrayList<>(nbins); } /** Creates a new histogram object. */ public MinimalBinDistanceHistogram(final int size) { totalCount = 0; // init the RNG for breaking ties in histogram merging. prng = new Random(System.currentTimeMillis()); bins = new ArrayList<>(size); nbins = size; } /** Resets a histogram object to its initial state. */ public void reset() { bins.clear(); totalCount = 0; } /** @return the total number of consumed values */ @Override public long getTotalCount() { return totalCount; } /** @return the number of bins used */ public int getNumBins() { return bins.size(); } /** * @param other A serialized histogram created by the serialize() method * @see #merge */ @Override public void merge(final NumericHistogram other) { if (other == null) { return; } final MinimalBinDistanceHistogram myTypeOfHist = (MinimalBinDistanceHistogram) other; totalCount += myTypeOfHist.totalCount; maxValue = Math.max(myTypeOfHist.maxValue, maxValue); if ((nbins == 0) || (bins.size() == 0)) { // Just make a copy bins = new ArrayList<>(myTypeOfHist.bins.size()); for (final Bin coord : myTypeOfHist.bins) { bins.add(coord); } // the constrained bin sizes may not match trim(); } else { // The aggregation buffer already contains a partial histogram. // Merge using Algorithm #2 from the Ben-Haim and // Tom-Tov paper. final ArrayList mergedBins = new ArrayList<>(getNumBins() + myTypeOfHist.getNumBins()); mergedBins.addAll(bins); for (final Bin oldBin : myTypeOfHist.bins) { mergedBins.add(new Bin(oldBin.lowerBound, oldBin.count)); } Collections.sort(mergedBins); bins = mergedBins; // Now trim the overstuffed histogram down to the correct number of // bins trim(); } } /** * Adds a new data point to the histogram approximation. Make sure you have called either * allocate() or merge() first. This method implements Algorithm #1 from Ben-Haim and Tom-Tov, "A * Streaming Parallel Decision Tree Algorithm", JMLR 2010. * * @param v The data point to add to the histogram approximation. */ @Override public void add(final double v) { this.add(1, v); } public void add(final long count, final double v) { // Binary search to find the closest bucket that v should go into. // 'bin' should be interpreted as the bin to shift right in order to // accomodate // v. As a result, bin is in the range [0,N], where N means that the // value v is // greater than all the N bins currently in the histogram. It is also // possible that // a bucket centered at 'v' already exists, so this must be checked in // the next step. totalCount++; maxValue = Math.max(maxValue, v); int bin = 0; for (int l = 0, r = bins.size(); l < r;) { bin = (l + r) / 2; if (bins.get(bin).lowerBound > v) { r = bin; } else { if (bins.get(bin).lowerBound < v) { l = ++bin; } else { break; // break loop on equal comparator } } } // If we found an exact bin match for value v, then just increment that // bin's count. // Otherwise, we need to insert a new bin and trim the resulting // histogram back to size. // A possible optimization here might be to set some threshold under // which 'v' is just // assumed to be equal to the closest bin -- if fabs(v-bins[bin].x) < // THRESHOLD, then // just increment 'bin'. This is not done now because we don't want to // make any // assumptions about the range of numeric data being analyzed. if ((bin < bins.size()) && (Math.abs(bins.get(bin).lowerBound - v) < 1E-12)) { bins.get(bin).count += count; } else { bins.add(bin, new Bin(v, count)); // Trim the bins down to the correct number of bins. if (bins.size() > nbins) { trim(); } } } /** * Trims a histogram down to 'nbins' bins by iteratively merging the closest bins. If two pairs of * bins are equally close to each other, decide uniformly at random which pair to merge, based on * a PRNG. */ private void trim() { while (bins.size() > nbins) { // Find the closest pair of bins in terms of x coordinates. Break // ties randomly. double smallestdiff = bins.get(1).lowerBound - bins.get(0).lowerBound; int smallestdiffloc = 0, smallestdiffcount = 1; final int s = bins.size() - 1; for (int i = 1; i < s; i++) { final double diff = bins.get(i + 1).lowerBound - bins.get(i).lowerBound; if (diff < smallestdiff) { smallestdiff = diff; smallestdiffloc = i; smallestdiffcount = 1; } else { // HP Fortify "Insecure Randomness" false positive // This random number is not used for any purpose // related to security or cryptography if (((diff - smallestdiff) < 1E-12) && (prng.nextDouble() <= (1.0 / ++smallestdiffcount))) { smallestdiffloc = i; } } } // Merge the two closest bins into their average x location, // weighted by their heights. // The height of the new bin is the sum of the heights of the old // bins. final Bin smallestdiffbin = bins.get(smallestdiffloc); final double d = smallestdiffbin.count + bins.get(smallestdiffloc + 1).count; smallestdiffbin.lowerBound *= smallestdiffbin.count / d; smallestdiffbin.lowerBound += (bins.get(smallestdiffloc + 1).lowerBound / d) * bins.get(smallestdiffloc + 1).count; smallestdiffbin.count = d; // Shift the remaining bins left one position bins.remove(smallestdiffloc + 1); } } /** @return The quantiles over the given number of bins. */ public double[] quantile(final int bins) { final double increment = 1.0 / bins; final double[] result = new double[bins]; double val = increment; for (int i = 0; i < bins; i++, val += increment) { result[i] = quantile(val); } return result; } /** * Gets an approximate quantile value from the current histogram. Some popular quantiles are 0.5 * (median), 0.95, and 0.98. * * @param q The requested quantile, must be strictly within the range (0,1). * @return The quantile value. */ @Override public double quantile(final double q) { double csum = 0; final int binsCount = bins.size(); for (int b = 0; b < binsCount; b++) { csum += bins.get(b).count; if ((csum / totalCount) >= q) { if (b == 0) { return bins.get(b).lowerBound; } csum -= bins.get(b).count; final double r = bins.get(b - 1).lowerBound + ((((q * totalCount) - csum) * (bins.get(b).lowerBound - bins.get(b - 1).lowerBound)) / (bins.get(b).count)); return r; } } return maxValue; // should not get here } /** * Estimate number of values consumed up to provided value. * * @param val * @return the number of estimated points */ @Override public double sum(final double val, final boolean inclusive) { if (bins.isEmpty()) { return 0.0; } final double minValue = bins.get(0).lowerBound; final double range = maxValue - minValue; // one value if ((range <= 0.0) || (val > maxValue)) { return totalCount; } else if (val < minValue) { return 0.0; } double foundCount = 0; int i = 0; for (final Bin coord : bins) { if (coord.lowerBound < val) { foundCount += coord.count; } else { break; } i++; } final double upperBoundary = (i < getNumBins()) ? bins.get(i).lowerBound : maxValue; final double lowerBoundary = i > 0 ? bins.get(i - 1).lowerBound : 0.0; final double upperCount = (i < getNumBins()) ? bins.get(i).count : 0; final double lowerCount = i > 0 ? bins.get(i - 1).count : 0; foundCount -= lowerCount; // from paper 'sum' procedure // the paper treats Bins like coordinates, taking the area of histogram // (lowerBoundary,0) (lowerBoundary,lowerCount) // (upperBoundary,upperCount) (upperBoundary,0) // divided by (upperBoundary - lowerBoundary). final double mb = lowerCount + (((upperCount - lowerCount) / (upperBoundary - lowerBoundary)) * (val - lowerBoundary)); final double s = (((lowerCount + mb) / 2.0) * (val - lowerBoundary)) / (upperBoundary - lowerBoundary); final double r = foundCount + s + (lowerCount / 2.0); return r > 1.0 ? r : (inclusive ? 1.0 : r); } @Override public double cdf(final double val) { return sum(val, false) / totalCount; } public long[] count(final int bins) { final long[] result = new long[bins]; double start = getMinValue(); final double range = maxValue - start; final double increment = range / bins; start += increment; long last = 0; for (int bin = 0; bin < bins; bin++, start += increment) { final long aggSum = (long) Math.ceil(sum(start, false)); result[bin] = aggSum - last; last = aggSum; } return result; } @Override public String toString() { return NumericHistogram.histogramToString(this); } @Override public int bufferSize() { return VarintUtils.unsignedLongByteLength(totalCount) + VarintUtils.unsignedIntByteLength(nbins) + VarintUtils.unsignedIntByteLength(bins.size()) + (bins.size() * Bin.bufferSize()) + 8; } @Override public void toBinary(final ByteBuffer buffer) { VarintUtils.writeUnsignedLong(totalCount, buffer); buffer.putDouble(maxValue); VarintUtils.writeUnsignedInt(nbins, buffer); VarintUtils.writeUnsignedInt(bins.size(), buffer); for (final Bin bin : bins) { bin.toBuffer(buffer); } } @Override public void fromBinary(final ByteBuffer buffer) { totalCount = VarintUtils.readUnsignedLong(buffer); maxValue = buffer.getDouble(); nbins = VarintUtils.readUnsignedInt(buffer); final int usedBinCount = VarintUtils.readUnsignedInt(buffer); bins.clear(); bins.ensureCapacity(nbins); for (int i = 0; i < usedBinCount; i++) { bins.add(new Bin().fromBuffer(buffer)); } } /** The Bin class defines a histogram bin, which is just an (x,y) pair. */ static class Bin implements Comparable { double lowerBound; // Counts can be split fractionally double count; public Bin() {} public Bin(final double lowerBound, final double count) { super(); this.lowerBound = lowerBound; this.count = count; } @Override public int compareTo(final Bin other) { return Double.compare(lowerBound, other.lowerBound); } public void toBuffer(final ByteBuffer buffer) { buffer.putDouble(lowerBound); buffer.putDouble(count); } public Bin fromBuffer(final ByteBuffer buffer) { lowerBound = buffer.getDouble(); count = buffer.getDouble(); return this; } static int bufferSize() { return 16; } @Override public int hashCode() { final int prime = 31; int result = 1; long temp; temp = Double.doubleToLongBits(count); result = (prime * result) + (int) (temp ^ (temp >>> 32)); temp = Double.doubleToLongBits(lowerBound); result = (prime * result) + (int) (temp ^ (temp >>> 32)); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final Bin other = (Bin) obj; if (Double.doubleToLongBits(count) != Double.doubleToLongBits(other.count)) { return false; } if (Double.doubleToLongBits(lowerBound) != Double.doubleToLongBits(other.lowerBound)) { return false; } return true; } } @Override public double getMaxValue() { return maxValue; }; @Override public double getMinValue() { return !bins.isEmpty() ? bins.get(0).lowerBound : 0.0; }; public static class MinimalBinDistanceHistogramFactory implements NumericHistogramFactory { @Override public NumericHistogram create(final int bins) { return new MinimalBinDistanceHistogram(bins); } @Override public NumericHistogram create(final int bins, final double minValue, final double maxValue) { return new MinimalBinDistanceHistogram(bins); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/statistics/histogram/NumericHistogram.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter.statistics.histogram; import java.nio.ByteBuffer; public interface NumericHistogram { void merge(final NumericHistogram other); /** @param v The data point to add to the histogram approximation. */ void add(final double v); /** * Gets an approximate quantile value from the current histogram. Some popular quantiles are 0.5 * (median), 0.95, and 0.98. * * @param q The requested quantile, must be strictly within the range (0,1). * @return The quantile value. */ double quantile(final double q); /** * Returns the fraction of all points added which are <= x. * * @return the cumulative distribution function (cdf) result */ double cdf(final double val); /** * Estimate number of values consumed up to provided value. * * @param val * @return the number of estimated points */ double sum(final double val, boolean inclusive); /** @return the amount of byte buffer space to serialize this histogram */ int bufferSize(); void toBinary(final ByteBuffer buffer); void fromBinary(final ByteBuffer buffer); double getMaxValue(); double getMinValue(); long getTotalCount(); static String histogramToString(final NumericHistogram histogram) { return "Numeric Histogram[Min: " + histogram.getMinValue() + ", Max: " + histogram.getMaxValue() + ", Median: " + histogram.quantile(0.5) + "]"; } static double[] binQuantiles(final NumericHistogram histogram, final int bins) { final double[] result = new double[bins]; final double binSize = 1.0 / bins; for (int bin = 0; bin < bins; bin++) { result[bin] = histogram.quantile(binSize * (bin + 1)); } return result; } static long[] binCounts(final NumericHistogram histogram, final int bins) { final long[] result = new long[bins]; double start = histogram.getMinValue(); final double range = histogram.getMaxValue() - start; final double increment = range / bins; start += increment; long last = 0; for (int bin = 0; bin < bins; bin++, start += increment) { final long aggSum = (long) Math.ceil(histogram.sum(start, false)); result[bin] = aggSum - last; last = aggSum; } return result; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/statistics/histogram/NumericHistogramFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter.statistics.histogram; public interface NumericHistogramFactory { public NumericHistogram create(int bins); public NumericHistogram create(int bins, double minValue, double maxValue); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/adapter/statistics/histogram/TDigestNumericHistogram.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter.statistics.histogram; import java.nio.ByteBuffer; import com.tdunning.math.stats.MergingDigest; import com.tdunning.math.stats.TDigest; public class TDigestNumericHistogram implements NumericHistogram { private static final double DEFAULT_COMPRESSION = 100; private TDigest tdigest; public TDigestNumericHistogram() { this(DEFAULT_COMPRESSION); } public TDigestNumericHistogram(final double compression) { super(); tdigest = TDigest.createMergingDigest(DEFAULT_COMPRESSION); } @Override public void merge(final NumericHistogram other) { if ((other instanceof TDigestNumericHistogram) && (other.getTotalCount() > 0)) { tdigest.add(((TDigestNumericHistogram) other).tdigest); } } @Override public void add(final double v) { tdigest.add(v); } @Override public double quantile(final double q) { return tdigest.quantile(q); } @Override public double cdf(final double val) { return tdigest.cdf(val); } @Override public int bufferSize() { return tdigest.smallByteSize(); } @Override public void toBinary(final ByteBuffer buffer) { tdigest.asSmallBytes(buffer); } @Override public void fromBinary(final ByteBuffer buffer) { tdigest = MergingDigest.fromBytes(buffer); } @Override public double getMaxValue() { return tdigest.getMax(); } @Override public double getMinValue() { return tdigest.getMin(); } @Override public long getTotalCount() { return tdigest.size(); } @Override public double sum(final double val, final boolean inclusive) { return tdigest.cdf(val) * tdigest.size(); } @Override public String toString() { return NumericHistogram.histogramToString(this); } public TDigest getTdigest() { return tdigest; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/api/Aggregation.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.api; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.index.persist.Persistable; /** * An Aggregation function that mathematically represents any commutative monoid (ie. a function * that is both commutative and associative). For some data stores Aggregations will be run * distributed on the server within the scope of iterating through the results for maximum * efficiency. A third party Aggregation can be used, but if serverside processing is enabled, the * third party Aggregation implementation must also be on the server classpath. * * @param

Parameters for the aggregation. What is needed to configure it correctly * @param Result type for the aggregation, the output when given an entry of type T. * @param Data type of the entries for the aggregation. */ public interface Aggregation

extends Persistable { /** * Returns a persistable object for any parameters that must be persisted to properly compute the * aggregation * * @return A persistable object for any parameters that must be persisted to properly compute the * aggregation */ P getParameters(); /** * Sets the parameters based on what has been persisted * * @param parameters the persisted parameters for this aggregation function */ void setParameters(P parameters); /** * Get the current result of the aggregation. This must be mergeable and it is the responsibility * of the caller to merge separate results if desired. It is the responsibility of the aggregation * to start with a new instance of the result at the beginning of any aggregation. * * @return the current result of the aggregation */ R getResult(); /** * Merge two aggregation results into a single result * * @param result1 the first result * @param result2 the second result * @return the merged result */ default R merge(final R result1, final R result2) { if (result1 == null) { return result2; } else if (result2 == null) { return result1; } else if ((result1 instanceof Mergeable) && (result2 instanceof Mergeable)) { ((Mergeable) result1).merge((Mergeable) result2); return result1; } return null; } /** * This is responsible for writing the result to binary * * @param result the result value * @return the binary representing this value */ byte[] resultToBinary(R result); /** * This is responsible for reading the result from binary * * @param binary the binary representing this result * @return the result value */ R resultFromBinary(byte[] binary); /** this will be called if the result should be reset to its default value */ void clearResult(); /** * Update the aggregation result using the new entry provided * * @param adapter the adapter for this entry * @param entry the new entry to compute an updated aggregation result on */ void aggregate(DataTypeAdapter adapter, T entry); /** * Because the serialization of aggregation is just the function without the parameters or the * result, its expected that this is empty */ @Override default byte[] toBinary() { return new byte[0]; } /** * Because the serialization of aggregation is just the function without the parameters or the * result, its expected that there's nothing to deserialize */ @Override default void fromBinary(final byte[] bytes) {} } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/api/AggregationQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.api; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.query.BaseQuery; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.geowave.core.store.query.options.AggregateTypeQueryOptions; import org.locationtech.geowave.core.store.query.options.CommonQueryOptions; import org.locationtech.geowave.core.store.query.options.IndexQueryOptions; /** * As the name suggests, an aggregation query is a special-purposed query for performing an * aggregation on your dataset. The same set of query criteria can be applied as the input of the * aggregation. Typical use should be to use * * @param

input type for the aggregation * @param result type for the aggregation * @param data type of the entries for the aggregation */ public class AggregationQuery

extends BaseQuery> { /** default constructor useful only for serialization and deserialization */ public AggregationQuery() { super(); } /** * This constructor should generally not be used directly. Instead use AggregationQueryBuilder to * construct this object. * * @param commonQueryOptions basic query options * @param dataTypeQueryOptions query options related to data type * @param indexQueryOptions query options related to index * @param queryConstraints constraints defining the range of data to query */ public AggregationQuery( final CommonQueryOptions commonQueryOptions, final AggregateTypeQueryOptions dataTypeQueryOptions, final IndexQueryOptions indexQueryOptions, final QueryConstraints queryConstraints) { super(commonQueryOptions, dataTypeQueryOptions, indexQueryOptions, queryConstraints); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/api/AggregationQueryBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.api; import java.util.Map; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.query.BaseQueryBuilder; import org.locationtech.geowave.core.store.query.aggregate.AggregationQueryBuilderImpl; import org.locationtech.geowave.core.store.query.aggregate.BinningAggregationOptions; /** * This and its extensions should be used to create an AggregationQuery. * * @param

input type for the aggregation * @param result type for the aggregation * @param data type of the entries for the aggregation * @param the type of the builder, useful for extending this builder and maintaining the builder * type */ public interface AggregationQueryBuilder

> extends BaseQueryBuilder, A> { /** * get a new default implementation of the builder * * @return an AggregationQueryBuilder */ static

> AggregationQueryBuilder newBuilder() { return new AggregationQueryBuilderImpl<>(); } /** * Instead of having a scalar aggregation, bin the results by a given strategy. * * Calling this produces a 'meta aggregation', which uses the current aggregation along with the * binning strategy to perform aggregations. * * entries of type {@link T} are binned using the strategy. When a new bin is required, it is * created by instantiating a fresh aggregation (based on the current aggregation) * * @param binningStrategy The strategy to bin the hashes of given data. * @param maxBins The maximum bins to allow in the aggregation. -1 for no limit. * @return A complete aggregation query, ready to consume data. */ AggregationQuery, Map, T> buildWithBinningStrategy( BinningStrategy binningStrategy, int maxBins); /** * Provide the Aggregation function and the type name to apply the aggregation on * * @param typeName the type name of the dataset * @param aggregation the aggregation function * @return an aggregation */ A aggregate(String typeName, Aggregation aggregation); /** * this is a convenience method to set the count aggregation if no type names are given it is * assumed to count every type * * @param typeNames the type names to count results * @return a count of how many entries match the query criteria */ A count(String... typeNames); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/api/AttributeIndex.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.api; /** * An index on a single field of a data type. */ public interface AttributeIndex extends Index { /** * @return the attribute that is being indexed */ String getAttributeName(); /** * Provides a default name for an attribute index. * * @param typeName the data type that the attribute belongs to * @param attributeName the attribute that is being indexed * @return the default index name */ public static String defaultAttributeIndexName( final String typeName, final String attributeName) { return typeName + "_" + attributeName + "_idx"; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/api/BinConstraints.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.api; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.ByteArrayRange; import org.locationtech.geowave.core.store.statistics.query.BinConstraintsImpl; /** * This is used by the DataStore to represent constraints on any statistics with binning strategies * to only return a certain set of the statistic's bins. * */ public interface BinConstraints { /** * Unconstrained, a query will return all of the bins. * * @return a bin constraint representing all bins */ static BinConstraints allBins() { return new BinConstraintsImpl(true); } /** * Sets the bins of the query explicitly. If a queried statistic uses a binning strategy, only * values contained in one of the given bins will be return. * * @param exactMatchBins the bins to match * @return a bin constraint representing exact matches of the provided bins */ static BinConstraints of(final ByteArray... exactMatchBins) { return new BinConstraintsImpl(exactMatchBins, false); } /** * Sets the bins of the query by prefix. If a queried statistic uses a binning strategy, only * values matching the bin prefix will be returned. * * @param prefixBins the prefixes used to match the bins * @return a bin constraint representing the set of bin prefixes */ static BinConstraints ofPrefix(final ByteArray... prefixBins) { return new BinConstraintsImpl(prefixBins, true); } /** * Sets the bins of the query by range. If a queried statistic uses a binning strategy, only * values matching the range will be returned. * * @param binRanges the ranges used to match the bins * @return a bin constraint representing the set of bin ranges */ static BinConstraints ofRange(final ByteArrayRange... binRanges) { return new BinConstraintsImpl(binRanges); } /** * Sets the bins of the query using an object type that is supported by the binning strategy. The * result will be constrained to only statistics that use binning strategies that support this * type of constraint and the resulting bins will be constrained according to that strategy's * usage of this object. For example, spatial binning strategies may use spatial Envelope as * constraints, or another example might be a numeric field binning strategy using Range * as constraints. If a queried statistic uses a binning strategy, only values contained in one of * the given bins will be return. * * @param binningStrategyConstraint an object of any type supported by the binning strategy. It * will be interpreted as appropriate by the binning strategy and binning strategies that * do not support this object type will not return any results. * @return bin constraints representing the Object */ static BinConstraints ofObject(final Object binningStrategyConstraint) { return new BinConstraintsImpl(binningStrategyConstraint); } /** * Used primarily internally to get the explicit bins for this constraint but can be used if there * is a need to understand the bins being queried. * * @param stat the statistic being queried * @return the explicit bins being queried */ ByteArrayConstraints constraints(Statistic stat); /** * Represents more explicit bins than BinConstraints as Objects must be resolved to ByteArrays */ static interface ByteArrayConstraints { /** * is this a prefix query * * @return a flag indicating if it is intended to query by bin prefix (otherwise its an exact * match) */ boolean isPrefix(); /** * get the bins to query for * * @return the bins to query for */ ByteArray[] getBins(); /** * get the bin ranges to query for * * @return the bin ranges to query for */ ByteArrayRange[] getBinRanges(); /** * is this meant to query all bins * * @return a flag indiciating if it is meant to query all bins */ boolean isAllBins(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/api/BinningStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.api; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.entities.GeoWaveRow; /** * * A binning strategy is used to bin data in an aggregation query or in a statistic. * */ public interface BinningStrategy extends Persistable { /** * Get the bins used by the given entry. Each bin will have a separate value. * * @param type the data type * @param entry the entry * @param rows the rows created for the entry * @return a set of bins used by the given entry * * @param The type that will be used to bin on and the weight for a particular bin (if * multiple bins sometimes they can be weighted, a supplier is used to defer evaluation). * This could be anything, but you may see things like {@code SimpleFeature}, or * {@code CommonIndexedPersistenceEncoding} used mostly. */ ByteArray[] getBins(DataTypeAdapter type, T entry, GeoWaveRow... rows); /** * This computes a weight for the bin of a given entry. This can be useful for binning strategies * that produce multiple bins for a single entry to be able to weight/scale statistics by the * percent of coverage that the bounds of the bin covers the overall entry. For example, a time * range may cover multiple bins and the weight would likely be the percent of coverage that each * bin overlaps the ingested time range (and therefore something like a count statistic or any * summing statistic could scale the contribution by the weight). * * @param The type that will be used to bin on and the weight for a particular bin (if * multiple bins sometimes they can be weighted, a supplier is used to defer evaluation). * This could be anything, but you may see things like {@code SimpleFeature}, or * {@code CommonIndexedPersistenceEncoding} used mostly. * @param bin the bin used for the given entry for which to get a weighting factor * @param type the data type * @param entry the entry * @param rows the rows created for the entry * @return the weighting factor for this bin */ default double getWeight( final ByteArray bin, final DataTypeAdapter type, final T entry, final GeoWaveRow... rows) { return 1; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/api/DataStore.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.api; import java.util.List; import javax.annotation.Nullable; import org.apache.commons.lang3.tuple.Pair; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.query.gwql.ResultSet; import org.locationtech.geowave.core.store.statistics.StatisticType; /** * A DataStore can both ingest and query data based on persisted indices and data type adapters. * When the data is ingested it is explicitly given an index and a data type adapter which is then * persisted to be used in subsequent queries. Also, implicitly statistics are maintained associated * with all data ingested. These statistics can be queried. Furthermore, aggregations can be applied * directly to the data which are similar to statistics, but are more dynamic in that any query * criteria can be applied as the input of the aggregation. Data stores that support server-side * processing will run the aggregation within the scope of iterating through the results for * additional efficiency. * *

Here is a simple snippet of pseudocode showing how a data store can be used to store and * retrieve your data. * *

 * {@code
 *  DataStore store = DataStoreFactory.createDataStore();
 * 	store.addType(, );
 *  try(Writer writer = store.createWriter()){
 *    //write data
 *    writer.writer(
 *
 */
public interface DataStore {

  /**
   * Ingest from path. If this is a directory, this method will recursively search for valid files
   * to ingest in the directory. This will iterate through registered IngestFormatPlugins to find
   * one that works for a given file. The applicable ingest format plugin will choose the
   * DataTypeAdapter and may even use additional indices than the one provided.
   *
   * @param inputPath The path for data to read and ingest into this data store
   * @param index The indexing approach to use.
   */
   void ingest(String inputPath, Index... index);

  /**
   * Ingest from path with options. If this is a directory, this method will recursively search for
   * valid files to ingest in the directory. The applicable ingest format plugin will choose the
   * DataTypeAdapter and may even use additional indices than the one provided.
   *
   * @param inputPath The path for data to read and ingest into this data store
   * @param options a set of available options for ingesting from a URL
   * @param index The configuration information for the primary index to use.
   */
   void ingest(String inputPath, IngestOptions options, Index... index);

  /**
   * Returns all data in this data store that matches the query parameter. All data that matches the
   * query will be returned as an instance of the native data type. The Iterator must be closed when
   * it is no longer needed - this wraps the underlying scanner implementation and closes underlying
   * resources.
   *
   * @param query data constraints for the query and additional options for processing the query
   * @return An iterator on all results that match the query. The iterator implements Closeable and
   *         it is best practice to close the iterator after it is no longer needed.
   */
   CloseableIterator query(final Query query);

  /**
   * Perform a query using the GeoWave Query Language (GWQL).
   * 
   * @param queryStr the GWQL query to perform
   * @param authorizations the authorizations to use for the query
   * @return the set of results that match the given query string
   */
  ResultSet query(final String queryStr, final String... authorizations);

  /**
   * Perform an aggregation on the data and just return the aggregated result. The query criteria is
   * very similar to querying the individual entries except in this case it defines the input to the
   * aggregation function, and the aggregation function produces a single result. Examples of this
   * might be simply counting matched entries, producing a bounding box or other range/extent for
   * matched entries, or producing a histogram.
   *
   * @param query the Aggregation Query, use AggregationQueryBuilder or its extensions to create
   * @return the single result of the aggregation
   */
  

R aggregate(final AggregationQuery query); /** * Get the data type adapter with the given type name from the data store. * * @param typeName the name of the type to get * @return The data type adapter with the given name, or {@code null} if it couldn't be found */ DataTypeAdapter getType(final String typeName); /** * Get all the data type adapters that have been used within this data store * * @return An array of the types used within this datastore. */ DataTypeAdapter[] getTypes(); /** * Add a statistic to the data store. The initial value of the statistic will not be calculated * and if there is existing relevant data, this statistic will not be accurate without forcing a * calculation. If instead it is not desire-able to calculate on add use {@code addStatistic} * instead. * * @param statistics the statistics to add */ void addEmptyStatistic(Statistic... statistic); /** * Add a statistic to the data store. The initial value of the statistic will be calculated after * being added. If this calculation is not desired use {@code addEmptyStatistic} instead. * * @param statistics the statistics to add */ void addStatistic(Statistic... statistic); /** * Remove statistics from the data store. * * @param statistic the statistics to remove */ void removeStatistic(final Statistic... statistic); /** * Force a recomputation of the stats * * @param statistic the statistics to recompute */ void recalcStatistic(Statistic... statistic); /** * Gets all of the statistics that are being tracked on the provided data type adapter. * * @param typeName the data type adapter to get the statistics for * @return An array of all the statistics that are being tracked on the provided data type * adapter. Note this is the descriptors of the statistics, not the values. */ DataTypeStatistic[] getDataTypeStatistics(final String typeName); /** * Gets the statistic that is being tracked for the data type, statistic type, and tag specified. * * @param the StatisticValue implementation of the statistic * @param the raw value type of the statistic * @param statisticType the statistic type for the statistic to get * @param typeName the data type name to get the statistic for * @param tag the tag of the statistic, if not specified, a tag will be inferred * @return the statistic, or null if no statistic matches the criteria */ , R> DataTypeStatistic getDataTypeStatistic( final StatisticType statisticType, final String typeName, @Nullable final String tag); /** * Gets all of the statistics that are being tracked on the provided index. * * @param indexName the index name to retrieve statistics for * @return An array of all the statistics that are being tracked on the provided index. Note this * is the descriptors of the statistics, not the values. */ IndexStatistic[] getIndexStatistics(final String indexName); /** * Gets the statistic that is being tracked for the index, statistic type, and tag specified. * * @param the StatisticValue implementation of the statistic * @param the raw value type of the statistic * @param statisticType the statistic type for the statistic to get * @param indexName * @param tag the tag of the statistic, if not specified, a tag will be inferred * @return the statistic, or null if no statistic matches the criteria */ , R> IndexStatistic getIndexStatistic( final StatisticType statisticType, final String indexName, @Nullable final String tag); /** * Gets all of the statistics that are being tracked on the provided type/field pair. * * @param typeName the data type name to get the statistics for * @param fieldName the field name to get the statistics for * @return An array of all the statistics that are being tracked on the provided field. Note this * is the descriptors of the statistics, not the values. */ FieldStatistic[] getFieldStatistics(final String typeName, final String fieldName); /** * Gets the statistic that is being tracked for the data type, field, statistic type, and tag * specified. * * @param the StatisticValue implementation of the statistic * @param the raw value type of the statistic * @param statisticType the statistic type for the statistic to get * @param typeName the data type name to get the statistic for * @param fieldName * @param tag the tag of the statistic, if not specified, a tag will be inferred * @return the statistic, or null if no statistic matches the criteria */ , R> FieldStatistic getFieldStatistic( final StatisticType statisticType, final String typeName, final String fieldName, @Nullable final String tag); /** * The statistic value of this stat (if multiple bins match, it will automatically aggregate the * resulting values together). For statistics with bins, it will always aggregate all bins. * * @param the StatisticValue implementation of the statistic * @param the raw value type of the statistic * @param stat the statistic to get the value for * @return the statistic's value, aggregated together if there are multiple matching values. */ default , R> R getStatisticValue(final Statistic stat) { return getStatisticValue(stat, BinConstraints.allBins()); } /** * The statistic value of this stat (if multiple bins match, it will automatically aggregate the * resulting values together). * * @param the StatisticValue implementation of the statistic * @param the raw value type of the statistic * @param stat the statistic to get the value for * @param binConstraints the bin(s) to get the value for based on the constraints * @return the statistic's value, aggregated together if there are multiple matching values. */ , R> R getStatisticValue( Statistic stat, BinConstraints binConstraints); /** * Returns all of the statistic values of this stat as well as the associated bin. It will return * each individual match as a bin-value pair. * * @param the StatisticValue implementation of the statistic * @param the raw value type of the statistic * @param stat the statistic to get the value for * @return the statistic bin-value pairs, if there are multiple matching values which should only * be the case for different bins it will return each individual value. It will return an * empty iterator if there are no matching values. */ default , R> CloseableIterator> getBinnedStatisticValues( final Statistic stat) { return getBinnedStatisticValues(stat, BinConstraints.allBins()); } /** * The statistic values of this stat as well as the associated bin. If multiple bins match, it * will return each individual match as a bin-value pair. * * @param the StatisticValue implementation of the statistic * @param the raw value type of the statistic * @param stat the statistic to get the value for * @param binConstraints the bin(s) to get the value for based on the constraints * @return the statistic bin-value pairs, if there are multiple matching values which should only * be the case for different bins it will return each individual value. It will return an * empty iterator if there are no matching values. */ , R> CloseableIterator> getBinnedStatisticValues( Statistic stat, BinConstraints binConstraints); /** * Get data statistics that match the given query criteria * * @param query the query criteria, use StatisticQueryBuilder or its extensions and if you're * interested in a particular common statistics type use StatisticsQueryBuilder.factory() * @return An array of statistics that result from the query */ , R> CloseableIterator queryStatistics(StatisticQuery query); /** * Get a single statistical result that matches the given query criteria * * @param query the query criteria, use StatisticQueryBuilder or its extensions and if you're * interested in a particular common statistics type use StatisticsQueryBuilder.factory() * @return If the query does not define that statistics type it will return null as aggregation * only makes sense within a single type, otherwise aggregates the results of the query * into a single result that is returned */ , R> V aggregateStatistics(StatisticQuery query); /** * Add an index to the data store. * * @param index the index to add */ void addIndex(Index index); /** * Get the indices that have been used within this data store. * * @return all indices used within this datastore */ Index[] getIndices(); /** * Get the indices that have been used within this data store for a particular type. If data type * name is null it will return all indices. * * @param the data type name * * @return An array of the indices for a given data type. */ Index[] getIndices(String typeName); /** * Get a particular index by its index name. If one doesn't exist it will return null. * * @param indexName the index name for which to retrieve an index * @return The index matching the specified index name or null if it doesn't exist */ Index getIndex(String indexName); /** * copy all data from this store into a specified other store * * @param other the other store to copy data into */ void copyTo(DataStore other); /** * copy the subset of data matching this query from this store into a specified other store * * @param other the other store to copy data into * @param query a query to select which data to copy - use QueryBuilder or its extension to create */ void copyTo(DataStore other, Query query); /** * Add new indices for the given type. If there is data in other indices for this type, for * consistency it will need to copy all of the data into the new indices, which could be a long * process for lots of data. * * @param typeName the type * @param indices the new indices to add */ void addIndex(String typeName, Index... indices); /** * remove an index completely for all types. If this is the last index for any type it throws an * illegal state exception, expecting the user to remove the type before removing the index to * protect a user from losing any reference to their data unknowingly for a type. * * @param indexName the index * @throws IllegalStateException if this is the last index for a type, remove the type first */ void removeIndex(String indexName) throws IllegalStateException; /** * remove an index for the given type. If this is the last index for that type it throws an * illegal state exception, expecting the user to remove the type before removing the index to * protect a user from losing any reference to their data unknowingly for a type. * * @param typeName the type * @param indexName the index * @throws IllegalStateException if this is the last index for a type, remove the type first */ void removeIndex(String typeName, String indexName) throws IllegalStateException; /** * Remove all data and statistics associated with the given type. * * @param typeName the type */ void removeType(String typeName); /** * Delete all data in this data store that matches the query parameter. * *

Statistics are updated as required. * * @param query the query criteria to use for deletion * @return true on success */ boolean delete(final Query query); /** * Delete ALL data and ALL metadata for this datastore. This is provided for convenience as a * simple way to wipe a datastore cleanly, but don't be surprised if everything is gone. */ void deleteAll(); /** * Add this type to the data store. This only needs to be called one time ever per type. * * @param dataTypeAdapter the data type adapter for this type that is used to read and write * GeoWave entries * @param initialIndices the initial indexing for this type, in the future additional indices can * be added */ void addType(DataTypeAdapter dataTypeAdapter, Index... initialIndices); /** * Add this type to the data store with the given statistics. This only needs to be called one * time ever per type. * * @param dataTypeAdapter the data type adapter for this type that is used to read and write * GeoWave entries * @param statistics the initial set of statistics that will be used with this adapter * @param initialIndices the initial indexing for this type, in the future additional indices can * be added */ void addType( DataTypeAdapter dataTypeAdapter, List> statistics, Index... initialIndices); /** * Add this type to the data store with the given statistics and visibility handler. This only * needs to be called one time ever per type. * * @param dataTypeAdapter the data type adapter for this type that is used to read and write * GeoWave entries * @param visibilityHandler the visibility handler for the adapter entries * @param statistics the initial set of statistics that will be used with this adapter * @param initialIndices the initial indexing for this type, in the future additional indices can * be added */ void addType( DataTypeAdapter dataTypeAdapter, VisibilityHandler visibilityHandler, List> statistics, Index... initialIndices); /** * Returns an index writer to perform batched write operations for the given data type name. It * assumes the type has already been used previously or added using addType and assumes one or * more indices have been provided for this type. * * @param typeName the type * @return a writer which can be used to write entries into this datastore of the given type */ Writer createWriter(String typeName); /** * Returns an index writer to perform batched write operations for the given data type name. It * assumes the type has already been used previously or added using addType and assumes one or * more indices have been provided for this type. * * @param typeName the type * @param visibilityHandler the visibility handler for newly written entries * @return a writer which can be used to write entries into this datastore of the given type */ Writer createWriter(String typeName, VisibilityHandler visibilityHandler); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/api/DataStoreFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.api; import org.locationtech.geowave.core.store.StoreFactoryOptions; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; /** * This is a very simple way to create a data store given an instance of that particular data * store's options */ public class DataStoreFactory { /** * Create a data store given that particular datastore implementation's options. The options * usually define connection parameters as well as other useful configuration particular to that * datastore. * * @param requiredOptions the options for the desired data store * @return the data store */ public static DataStore createDataStore(final StoreFactoryOptions requiredOptions) { return new DataStorePluginOptions(requiredOptions).createDataStore(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/api/DataTypeAdapter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.api; import java.util.Map; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.InternalDataAdapterImpl; import org.locationtech.geowave.core.store.data.DataReader; import org.locationtech.geowave.core.store.data.DataWriter; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldUtils; import org.locationtech.geowave.core.store.data.field.FieldWriter; import com.beust.jcommander.internal.Maps; /** * This interface should be implemented by any custom data type that must be stored in the GeoWave * index. It enables storing and retrieving the data, as well as translating the data into values * and queries that can be used to index. Additionally, each entry is responsible for providing * visibility if applicable. * * @param The type of entries that this adapter works on. */ public interface DataTypeAdapter extends DataReader, DataWriter, Persistable { /** * Return the data adapter's type name. This also must be unique within a datastore. * * @return the type name which serves as a unique identifier for this adapter */ String getTypeName(); /** * Get a data ID for the entry. This should uniquely identify the entry in the data set. * * @param entry the entry * @return the data ID */ byte[] getDataId(T entry); default InternalDataAdapter asInternalAdapter(final short internalAdapterId) { return new InternalDataAdapterImpl<>(this, internalAdapterId); } default InternalDataAdapter asInternalAdapter( final short internalAdapterId, final VisibilityHandler visibilityHandler) { return new InternalDataAdapterImpl<>(this, internalAdapterId, visibilityHandler); } @SuppressWarnings("unchecked") @Override default FieldWriter getWriter(final String fieldName) { final FieldDescriptor descriptor = getFieldDescriptor(fieldName); if (descriptor == null) { throw new IllegalArgumentException("'" + fieldName + "' does not exist for field writer"); } return (FieldWriter) FieldUtils.getDefaultWriterForClass(descriptor.bindingClass()); } @SuppressWarnings("unchecked") @Override default FieldReader getReader(final String fieldName) { final FieldDescriptor descriptor = getFieldDescriptor(fieldName); if (descriptor == null) { throw new IllegalArgumentException("'" + fieldName + "' does not exist for field reader"); } return (FieldReader) FieldUtils.getDefaultReaderForClass(descriptor.bindingClass()); } /** * Returns the value of the field with the given name from the entry. * * @param entry the entry * @param fieldName the field name * @return the value of the field on the entry */ Object getFieldValue(T entry, String fieldName); /** * Return the class that represents the data stored by this adapter. * * @return the class of the data */ Class getDataClass(); RowBuilder newRowBuilder(FieldDescriptor[] outputFieldDescriptors); FieldDescriptor[] getFieldDescriptors(); FieldDescriptor getFieldDescriptor(String fieldName); default Map describe() { return Maps.newHashMap(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/api/DataTypeStatistic.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.api; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.statistics.StatisticId; import org.locationtech.geowave.core.store.statistics.adapter.DataTypeStatisticType; import com.beust.jcommander.Parameter; /** * Base class for data type statistics. These statistics are generally updated without looking at * individual fields on the data type. */ public abstract class DataTypeStatistic> extends Statistic { @Parameter( names = "--typeName", required = true, description = "The data type for the statistic.") private String typeName = null; public DataTypeStatistic(final DataTypeStatisticType statisticsType) { super(statisticsType); } public DataTypeStatistic(final DataTypeStatisticType statisticsType, final String typeName) { super(statisticsType); this.typeName = typeName; } public void setTypeName(final String name) { this.typeName = name; } public final String getTypeName() { return typeName; } @Override public boolean isCompatibleWith(final Class adapterClass) { return true; } @Override public final StatisticId getId() { if (cachedStatisticId == null) { cachedStatisticId = generateStatisticId(typeName, (DataTypeStatisticType) getStatisticType(), getTag()); } return cachedStatisticId; } @Override protected int byteLength() { return super.byteLength() + VarintUtils.unsignedShortByteLength((short) typeName.length()) + typeName.length(); } @Override protected void writeBytes(final ByteBuffer buffer) { super.writeBytes(buffer); VarintUtils.writeUnsignedShort((short) typeName.length(), buffer); buffer.put(StringUtils.stringToBinary(typeName)); } @Override protected void readBytes(final ByteBuffer buffer) { super.readBytes(buffer); final byte[] nameBytes = new byte[VarintUtils.readUnsignedShort(buffer)]; buffer.get(nameBytes); typeName = StringUtils.stringFromBinary(nameBytes); } @Override public String toString() { final StringBuffer buffer = new StringBuffer(); buffer.append(getStatisticType().getString()).append("[type=").append(typeName).append("]"); return buffer.toString(); } public static > StatisticId generateStatisticId( final String typeName, final DataTypeStatisticType statisticType, final String tag) { return new StatisticId<>(generateGroupId(typeName), statisticType, tag); } public static ByteArray generateGroupId(final String typeName) { return new ByteArray("A" + typeName); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/api/FieldStatistic.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.api; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.statistics.StatisticId; import org.locationtech.geowave.core.store.statistics.field.FieldStatisticId; import org.locationtech.geowave.core.store.statistics.field.FieldStatisticType; import com.beust.jcommander.Parameter; /** * Base class for field statistics. These statistics are generally updated by using a specific field * on a data type. */ public abstract class FieldStatistic> extends Statistic { @Parameter( names = "--typeName", required = true, description = "The data type that contains the field for the statistic.") private String typeName = null; @Parameter( names = "--fieldName", required = true, description = "The field name to use for statistics.") private String fieldName = null; public FieldStatistic(final FieldStatisticType statisticsType) { this(statisticsType, null, null); } public FieldStatistic( final FieldStatisticType statisticsType, final String typeName, final String fieldName) { super(statisticsType); this.typeName = typeName; this.fieldName = fieldName; } public void setTypeName(final String name) { this.typeName = name; } public final String getTypeName() { return typeName; } public void setFieldName(final String fieldName) { this.fieldName = fieldName; } public String getFieldName() { return this.fieldName; } @Override public abstract boolean isCompatibleWith(Class fieldClass); @Override public final StatisticId getId() { if (cachedStatisticId == null) { cachedStatisticId = generateStatisticId( typeName, (FieldStatisticType) getStatisticType(), fieldName, getTag()); } return cachedStatisticId; } @Override protected int byteLength() { return super.byteLength() + VarintUtils.unsignedShortByteLength((short) typeName.length()) + VarintUtils.unsignedShortByteLength((short) fieldName.length()) + typeName.length() + fieldName.length(); } @Override protected void writeBytes(final ByteBuffer buffer) { super.writeBytes(buffer); VarintUtils.writeUnsignedShort((short) typeName.length(), buffer); buffer.put(StringUtils.stringToBinary(typeName)); VarintUtils.writeUnsignedShort((short) fieldName.length(), buffer); buffer.put(StringUtils.stringToBinary(fieldName)); } @Override protected void readBytes(final ByteBuffer buffer) { super.readBytes(buffer); final byte[] typeBytes = new byte[VarintUtils.readUnsignedShort(buffer)]; buffer.get(typeBytes); final byte[] nameBytes = new byte[VarintUtils.readUnsignedShort(buffer)]; buffer.get(nameBytes); typeName = StringUtils.stringFromBinary(typeBytes); fieldName = StringUtils.stringFromBinary(nameBytes); } @Override public String toString() { final StringBuffer buffer = new StringBuffer(); buffer.append(getStatisticType().getString()).append("[type=").append(typeName).append( ", field=").append(fieldName).append("]"); return buffer.toString(); } public static > StatisticId generateStatisticId( final String typeName, final FieldStatisticType statisticType, final String fieldName, final String tag) { return new FieldStatisticId<>(generateGroupId(typeName), statisticType, fieldName, tag); } public static ByteArray generateGroupId(final String typeName) { return new ByteArray("F" + typeName); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/api/Index.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.api; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.index.CommonIndexModel; /** * An index represents how to efficiently store and retrieve data. The common index model allows for * easily searching certain fields across all types within an index. The numeric index strategy maps * real-world values to insertion keys and query ranges for efficient range scans within a key-value * store. */ public interface Index extends Persistable { /** * get the name of the index * * @return the name of the index (should be unique per data store) */ String getName(); /** * get the index strategy which maps real-world values to insertion keys and query ranges for * efficient range scans within a key-value store. * * @return the numeric index strategy */ NumericIndexStrategy getIndexStrategy(); /** * The common index model allows for easily searching certain fields across all types within an * index. For example, if geometry is a common index field, one could ubiquitously search all * types within this index spatially. This could apply to any field type desired. * * @return the common index model */ CommonIndexModel getIndexModel(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/api/IndexFieldMapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.api; import java.nio.ByteBuffer; import java.util.List; import java.util.Set; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.index.IndexFieldMapperRegistry; import com.google.common.collect.Sets; /** * Abstract base class for mapping one or more adapter fields to a single index field. These field * mappers are registered and discovered via SPI through the {@link IndexFieldMapperRegistry}. * * @param the adapter field type * @param the index field type */ public abstract class IndexFieldMapper implements Persistable { protected String indexFieldName = null; protected String[] adapterFields = null; public final void init( final String indexFieldName, final List> inputFieldDescriptors, final IndexFieldOptions options) { this.indexFieldName = indexFieldName; this.adapterFields = inputFieldDescriptors.stream().map(FieldDescriptor::fieldName).toArray(String[]::new); initFromOptions(inputFieldDescriptors, options); } /** * Initialize the field mapper with the given field descriptors and index field options. * * @param inputFieldDescriptors the adapter field descriptors to use in the mapping * @param options the index field options provided by the index */ protected void initFromOptions( final List> inputFieldDescriptors, final IndexFieldOptions options) {}; /** * As a performance measure, sometimes the queried data will vary from the data that was ingested. * For example querying a spatial index with a custom CRS will return data in that CRS, even if * the data was originally in a different CRS. This method transforms the adapter field * descriptors to appropriately represent the queried data. * * @param fieldDescriptors the output field descriptors */ public void transformFieldDescriptors(final FieldDescriptor[] fieldDescriptors) {} /** * @return the adapter field names used in the mapping */ public String[] getAdapterFields() { return adapterFields; } /** * @return the adapter field names used in the mapping, ordered by the index dimensions they are * associated with */ public String[] getIndexOrderedAdapterFields() { return adapterFields; } /** * @return the index field used in the mapping */ public String indexFieldName() { return indexFieldName; } /** * Converts native field values to the value expected by the index. * * @param nativeFieldValues the native field values * @return the value to use in the index */ public abstract I toIndex(final List nativeFieldValues); /** * Converts an index value back to the fields used by the adapter. * * @param indexFieldValue the index value * @return the adapter values */ public abstract void toAdapter(I indexFieldValue, RowBuilder rowBuilder); /** * @return the index field type */ public abstract Class indexFieldType(); /** * @return the adapter field type */ public abstract Class adapterFieldType(); /** * @return a set of suggested adapter field names that might be associated with this field mapper */ public Set getLowerCaseSuggestedFieldNames() { return Sets.newHashSet(); } public boolean isCompatibleWith(final Class fieldClass) { // The logic here is that if the index field type is the same as the adapter field type, most // likely the field value will be directly used by the index, so the child class would be // preserved. If they don't match, a transformation will occur, in which case an exact match // would be needed to be able to transform the index value back to the appropriate adapter field // type. if (indexFieldType().equals(adapterFieldType())) { return adapterFieldType().isAssignableFrom(fieldClass); } return adapterFieldType().equals(fieldClass); } /** * @return the number of adapter fields used in the index field mapping */ public abstract short adapterFieldCount(); private byte[] indexFieldBytes = null; private byte[] adapterFieldsBytes = null; protected int byteLength() { indexFieldBytes = StringUtils.stringToBinary(indexFieldName); adapterFieldsBytes = StringUtils.stringsToBinary(adapterFields); return VarintUtils.unsignedShortByteLength((short) indexFieldBytes.length) + indexFieldBytes.length + VarintUtils.unsignedShortByteLength((short) adapterFieldsBytes.length) + adapterFieldsBytes.length; } protected void writeBytes(final ByteBuffer buffer) { VarintUtils.writeUnsignedShort((short) indexFieldBytes.length, buffer); buffer.put(indexFieldBytes); VarintUtils.writeUnsignedShort((short) adapterFieldsBytes.length, buffer); buffer.put(adapterFieldsBytes); } protected void readBytes(final ByteBuffer buffer) { indexFieldBytes = new byte[VarintUtils.readUnsignedShort(buffer)]; buffer.get(indexFieldBytes); this.indexFieldName = StringUtils.stringFromBinary(indexFieldBytes); adapterFieldsBytes = new byte[VarintUtils.readUnsignedShort(buffer)]; buffer.get(adapterFieldsBytes); this.adapterFields = StringUtils.stringsFromBinary(adapterFieldsBytes); indexFieldBytes = null; adapterFieldsBytes = null; } @Override public final byte[] toBinary() { final ByteBuffer buffer = ByteBuffer.allocate(byteLength()); writeBytes(buffer); return buffer.array(); } @Override public final void fromBinary(final byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); readBytes(buffer); } /** * Provides an open-ended interface so that custom index fields can provide any information to the * mapper that may be needed. One example is that spatial index fields provide CRS information to * spatial field mappers. */ public static interface IndexFieldOptions { } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/api/IndexStatistic.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.api; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.statistics.StatisticId; import org.locationtech.geowave.core.store.statistics.index.IndexStatisticType; import com.beust.jcommander.Parameter; /** * Base class for index statistics. These statistics are generally updated without using specific * details of the entry or the data type. */ public abstract class IndexStatistic> extends Statistic { @Parameter(names = "--indexName", required = true, description = "The index for the statistic.") private String indexName = null; public IndexStatistic(final IndexStatisticType statisticsType) { this(statisticsType, null); } public IndexStatistic(final IndexStatisticType statisticsType, final String indexName) { super(statisticsType); this.indexName = indexName; } public void setIndexName(final String name) { this.indexName = name; } public String getIndexName() { return indexName; } @Override public boolean isCompatibleWith(final Class indexClass) { return true; } @Override public final StatisticId getId() { if (cachedStatisticId == null) { cachedStatisticId = generateStatisticId(indexName, (IndexStatisticType) getStatisticType(), getTag()); } return cachedStatisticId; } @Override protected int byteLength() { return super.byteLength() + VarintUtils.unsignedShortByteLength((short) indexName.length()) + indexName.length(); } @Override protected void writeBytes(final ByteBuffer buffer) { super.writeBytes(buffer); VarintUtils.writeUnsignedShort((short) indexName.length(), buffer); buffer.put(StringUtils.stringToBinary(indexName)); } @Override protected void readBytes(final ByteBuffer buffer) { super.readBytes(buffer); final byte[] nameBytes = new byte[VarintUtils.readUnsignedShort(buffer)]; buffer.get(nameBytes); indexName = StringUtils.stringFromBinary(nameBytes); } @Override public String toString() { final StringBuffer buffer = new StringBuffer(); buffer.append(getStatisticType().getString()).append("[index=").append(indexName).append("]"); return buffer.toString(); } public static > StatisticId generateStatisticId( final String indexName, final IndexStatisticType statisticType, final String tag) { return new StatisticId<>(generateGroupId(indexName), statisticType, tag); } public static ByteArray generateGroupId(final String indexName) { return new ByteArray("I" + indexName); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/api/IngestOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.api; import java.util.Properties; import java.util.function.Function; import java.util.function.Predicate; import org.locationtech.geowave.core.store.ingest.IngestOptionsBuilderImpl; import org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin; /** * When ingesting into a DataStore from a URL, this is a set of available options that can be * provided. Use the Builder to construct IngestOptions. * * @param the type for entries that are being ingested */ public class IngestOptions { /** * A Builder to create IngestOptions * * @param the type for entries that are being ingested */ public static interface Builder { /** * the ingest format plugin which does the actual parsing of the files and converting to GeoWave * entries * * @param format the format * @return this builder */ Builder format(LocalFileIngestPlugin format); /** * Number of threads to use for ingest * * @param threads the number of threads * @return this builder */ Builder threads(int threads); /** * Set a visibility handler that will be applied to all data ingested * * @param visibilityHandler the visibility handler to use * @return this builder */ Builder visibility(VisibilityHandler visibilityHandler); /** * Set an array of acceptable file extensions. If this is empty, all files will be accepted * regardless of extension. Additionally each format plugin may only accept certain file * extensions. * * @param fileExtensions the array of acceptable file extensions * @return this builder */ Builder extensions(String[] fileExtensions); /** * Add a new file extension to the array of acceptable file extensions * * @param fileExtension the file extension to add * @return this builder */ Builder addExtension(String fileExtension); /** * Filter data prior to being ingesting using a Predicate (if transform is provided, transform * will be applied before the filter) * * @param filter the filter * @return this builder */ Builder filter(Predicate filter); /** * Transform the data prior to ingestion * * @param transform the transform function * @return this builder */ Builder transform(Function transform); /** * register a callback to get notifications of the data and its insertion ID(s) within the * indices after it has been ingested. * * @param callback the callback * @return this builder */ Builder callback(IngestCallback callback); /** * provide properties used for particular URL handlers * * @param properties for URL handlers such as s3.endpoint.url=s3.amazonaws.com or * hdfs.defaultFS.url=sandbox.mydomain.com:8020 * @return this builder */ Builder properties(Properties properties); /** * Construct the IngestOptions with the provided values from this builder * * @return the IngestOptions */ IngestOptions build(); } /** * get a default implementation of this builder * * @return a new builder */ public static Builder newBuilder() { return new IngestOptionsBuilderImpl(); } /** * An interface to get callbacks of ingest * * @param the type of data ingested */ public static interface IngestCallback { void dataWritten(WriteResults insertionIds, T data); } private final LocalFileIngestPlugin format; private final int threads; private final VisibilityHandler visibilityHandler; private final String[] fileExtensions; private final Predicate filter; private final Function transform; private final IngestCallback callback; private final Properties properties; /** * Use the Builder to construct instead of this constructor. * * @param format the ingest format plugin * @param threads number of threads * @param globalVisibility visibility applied to all entries * @param fileExtensions an array of acceptable file extensions * @param filter a function to filter entries prior to ingest * @param transform a function to transform entries prior to ingest * @param callback a callback to get entries ingested and their insertion ID(s) in GeoWave * @param properties properties used for particular URL handlers */ public IngestOptions( final LocalFileIngestPlugin format, final int threads, final VisibilityHandler visibilityHandler, final String[] fileExtensions, final Predicate filter, final Function transform, final IngestCallback callback, final Properties properties) { super(); this.format = format; this.threads = threads; this.visibilityHandler = visibilityHandler; this.fileExtensions = fileExtensions; this.filter = filter; this.transform = transform; this.callback = callback; this.properties = properties; } public LocalFileIngestPlugin getFormat() { return format; } public int getThreads() { return threads; } public VisibilityHandler getVisibilityHandler() { return visibilityHandler; } public String[] getFileExtensions() { return fileExtensions; } public Predicate getFilter() { return filter; } public Function getTransform() { return transform; } public IngestCallback getCallback() { return callback; } public Properties getProperties() { return properties; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/api/Query.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.api; import org.locationtech.geowave.core.store.query.BaseQuery; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.geowave.core.store.query.options.CommonQueryOptions; import org.locationtech.geowave.core.store.query.options.FilterByTypeQueryOptions; import org.locationtech.geowave.core.store.query.options.IndexQueryOptions; /** * This represent all the constraints and options available in a geowave query. Use QueryBuilder or * one of its extensions to construct this object. * * @param the type of data being retrieved */ public class Query extends BaseQuery> { public Query() { super(); } /** * This is better built through QueryBuilder or one of its extensions. * * @param commonQueryOptions * @param dataTypeQueryOptions * @param indexQueryOptions * @param queryConstraints */ public Query( final CommonQueryOptions commonQueryOptions, final FilterByTypeQueryOptions dataTypeQueryOptions, final IndexQueryOptions indexQueryOptions, final QueryConstraints queryConstraints) { super(commonQueryOptions, dataTypeQueryOptions, indexQueryOptions, queryConstraints); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/api/QueryBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.api; import java.util.Arrays; import org.locationtech.geowave.core.store.query.BaseQueryBuilder; import org.locationtech.geowave.core.store.query.QueryBuilderImpl; /** * A QueryBuilder can be used to easily construct a query which can be used to retrieve data from a * GeoWave datastore. * * @param the data type * @param the type of the builder so that extensions of this builder can maintain type */ public interface QueryBuilder> extends BaseQueryBuilder, R> { /** * retrieve all data types (this is the default behavior) * * @return this builder */ R allTypes(); /** * add a type name to filter by * * @param typeName the type name * @return this builder */ R addTypeName(String typeName); /** * set the type names to filter by - an empty array will filter by all types. * * @param typeNames the type names * @return this builder */ R setTypeNames(String[] typeNames); /** * Subset fields by field names. If empty it will get all fields. * * @param typeName the type name * @param fieldNames the field names to subset * @return the entry */ R subsetFields(String typeName, String... fieldNames); /** * retrieve all fields (this is the default behavior) * * @return this builder */ R allFields(); /** * get a default query builder * * @return the new builder */ static QueryBuilder newBuilder() { return new QueryBuilderImpl<>(); } static QueryBuilder newBuilder(Class clazz) { return new QueryBuilderImpl<>(); } @SafeVarargs static QueryBuilder newBuilder( DataTypeAdapter adapter, DataTypeAdapter... otherAdapters) { QueryBuilder queryBuilder = new QueryBuilderImpl<>(); queryBuilder.addTypeName(adapter.getTypeName()); if (otherAdapters != null && otherAdapters.length > 0) { Arrays.stream(otherAdapters).forEach(a -> queryBuilder.addTypeName(a.getTypeName())); } return queryBuilder; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/api/QueryConstraintsFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.api; import org.locationtech.geowave.core.index.MultiDimensionalCoordinateRangesArray; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.index.CustomIndex; import org.locationtech.geowave.core.store.query.constraints.Constraints; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.geowave.core.store.query.filter.BasicQueryFilter.BasicQueryCompareOperation; /** This is a simple mechanism to create existing supported query constraints. */ public interface QueryConstraintsFactory { /** * constrain a query by data IDs * * @param dataIds the data IDs to constrain to * @return the constraints */ QueryConstraints dataIds(final byte[]... dataIds); /** * constrain a query using a range of data IDs, assuming big endian ordering * * @param startDataIdInclusive the start of data ID range (inclusive) * @param endDataIdInclusive the end of data ID range (inclusive) * @return the constraints */ QueryConstraints dataIdsByRange( final byte[] startDataIdInclusive, final byte[] endDataIdInclusive); /** * constrain a query using a range of data IDs, assuming big endian ordering * * RocksDB and HBase are currently the only two that will support this, but allows for reverse * iteration from "end" to "start" data ID * * All other datastores will throw an UnsupportedOperationException and the forward scan should be * preferred for those datastores * * @param startDataIdInclusive the start of data ID range (inclusive) * @param endDataIdInclusive the end of data ID range (inclusive) * @return the constraints */ QueryConstraints dataIdsByRangeReverse( final byte[] startDataIdInclusive, final byte[] endDataIdInclusive); /** * constrain a query by prefix * * @param partitionKey the prefix * @param sortKeyPrefix the sort prefix * @return the constraints */ QueryConstraints prefix(final byte[] partitionKey, final byte[] sortKeyPrefix); /** * constrain by coordinate ranges * * @param indexStrategy the index strategy * @param coordinateRanges the coordinate ranges * @return the constraints */ QueryConstraints coordinateRanges( final NumericIndexStrategy indexStrategy, final MultiDimensionalCoordinateRangesArray[] coordinateRanges); /** * constrain generally by constraints * * @param constraints the constraints * @return the query constraints */ QueryConstraints constraints(final Constraints constraints); /** * constrain generally by constraints with a compare operation * * @param constraints the constraints * @param compareOp the relationship to use for comparison * @return the query constraints */ QueryConstraints constraints( final Constraints constraints, final BasicQueryCompareOperation compareOp); /** * constrain using a custom persistable object NOTE: this only applies to an index that is a * {@link CustomIndex} and the instance of these custom constraints must match the generic of the * custom index's strategy * * @param customConstraints the instance of custom constraints * @return the query constraints */ QueryConstraints customConstraints(final Persistable customConstraints); /** * no query constraints, meaning wide open query (this is the default) * * @return the query constraints */ QueryConstraints noConstraints(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/api/RowBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.api; import java.util.Map; /** * Interface for building data type rows from a set of field values. * * @param the data type */ public interface RowBuilder { /** * Set a field name/value pair * * @param fieldValue the field ID/value pair */ void setField(String fieldName, Object fieldValue); /** * Sets a set of fields on the row builder * * @param values the values to set */ void setFields(Map values); /** * Create a row with the previously set fields * * @param dataId the unique data ID for the row * @return the row */ T buildRow(byte[] dataId); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/api/Statistic.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.api; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.statistics.StatisticId; import org.locationtech.geowave.core.store.statistics.StatisticType; import com.beust.jcommander.Parameter; public abstract class Statistic> implements Persistable { /** * Statistics that are used by internal GeoWave systems use this tag. */ public static final String INTERNAL_TAG = "internal"; /** * Statistics that are not explicitly tagged and do not have a binning strategy will use this tag. */ public static final String DEFAULT_TAG = "default"; private StatisticBinningStrategy binningStrategy = null; /** * Get a human-readable description of this statistic. * * @return a description of the statistic */ public abstract String getDescription(); /** * Create a new value for this statistic, initialized to a base state (no entries ingested). * * @return the new value */ public abstract V createEmpty(); /** * @return {@code true} if the statistic is an internal statistic */ public boolean isInternal() { return INTERNAL_TAG.equals(getTag()); } /** * Determine if the statistic is compatible with the given class. * * @param clazz the class to check * @return {@code true} if the statistic is compatible */ public abstract boolean isCompatibleWith(final Class clazz); /** * Return the unique identifier for the statistic. * * @return the statistic id */ public abstract StatisticId getId(); @Parameter( names = "--tag", description = "A tag for the statistic. If one is not provided, a default will be set.") private String tag = null; private final StatisticType statisticType; protected StatisticId cachedStatisticId = null; public Statistic(final StatisticType statisticType) { this.statisticType = statisticType; } public void setTag(final String tag) { this.tag = tag; } public void setInternal() { this.tag = INTERNAL_TAG; } /** * Get the tag for the statistic. * * @return the tag */ public final String getTag() { if (tag == null) { return binningStrategy != null ? binningStrategy.getDefaultTag() : DEFAULT_TAG; } return tag; } public void setBinningStrategy(final StatisticBinningStrategy binningStrategy) { this.binningStrategy = binningStrategy; } /** * Returns the binning strategy used by the statistic. * * @return the binning strategy, or {@code null} if there is none */ public StatisticBinningStrategy getBinningStrategy() { return binningStrategy; } /** * Get the statistic type associated with the statistic. * * @return the statistic type */ public final StatisticType getStatisticType() { return statisticType; } private byte[] binningStrategyBytesCache = null; protected int byteLength() { binningStrategyBytesCache = PersistenceUtils.toBinary(binningStrategy); final String resolvedTag = getTag(); return VarintUtils.unsignedShortByteLength((short) binningStrategyBytesCache.length) + binningStrategyBytesCache.length + VarintUtils.unsignedShortByteLength((short) resolvedTag.length()) + resolvedTag.length(); } protected void writeBytes(final ByteBuffer buffer) { if (binningStrategyBytesCache == null) { binningStrategyBytesCache = PersistenceUtils.toBinary(binningStrategy); } VarintUtils.writeUnsignedShort((short) binningStrategyBytesCache.length, buffer); buffer.put(binningStrategyBytesCache); binningStrategyBytesCache = null; final byte[] stringBytes = StringUtils.stringToBinary(getTag()); VarintUtils.writeUnsignedShort((short) stringBytes.length, buffer); buffer.put(stringBytes); } protected void readBytes(final ByteBuffer buffer) { short length = VarintUtils.readUnsignedShort(buffer); binningStrategyBytesCache = new byte[length]; buffer.get(binningStrategyBytesCache); binningStrategy = (StatisticBinningStrategy) PersistenceUtils.fromBinary(binningStrategyBytesCache); binningStrategyBytesCache = null; length = VarintUtils.readUnsignedShort(buffer); final byte[] tagBytes = new byte[length]; buffer.get(tagBytes); tag = StringUtils.stringFromBinary(tagBytes); } @Override public final byte[] toBinary() { final ByteBuffer buffer = ByteBuffer.allocate(byteLength()); writeBytes(buffer); return buffer.array(); } @Override public final void fromBinary(final byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); readBytes(buffer); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/api/StatisticBinningStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.api; import java.util.Arrays; import java.util.Set; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.ByteArrayRange; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.api.BinConstraints.ByteArrayConstraints; import org.locationtech.geowave.core.store.statistics.query.BinConstraintsImpl.ExplicitConstraints; /** * Base interface for statistic binning strategies. These strategies allow a statistic's values to * be split up by an arbitrary strategy. This allows a simple statistic to be used in many different * ways. */ public interface StatisticBinningStrategy extends Persistable, BinningStrategy { /** * Get the name of the binning strategy. * * @return the binning strategy name */ String getStrategyName(); /** * Get a human-readable description of the binning strategy. * * @return a description of the binning strategy */ String getDescription(); /** * Get a human-readable string of a bin. * * @param bin the bin * @return the string value of the bin */ String binToString(final ByteArray bin); /** * Get a default tag for statistics that use this binning strategy. * * @return the default tag */ String getDefaultTag(); /** * Adds all of the field names used by the binning strategy to the provided set. */ default void addFieldsUsed(final Set fieldsUsed) {} default Class[] supportedConstraintClasses() { return new Class[] { ByteArray[].class, ByteArray.class, ByteArrayRange[].class, ByteArrayRange.class, String.class, String[].class, BinConstraints.class, ByteArrayConstraints.class}; } default ByteArrayConstraints constraints(final Object constraints) { if (constraints instanceof ByteArray[]) { return new ExplicitConstraints((ByteArray[]) constraints); } else if (constraints instanceof ByteArray) { return new ExplicitConstraints(new ByteArray[] {(ByteArray) constraints}); } else if (constraints instanceof String) { return new ExplicitConstraints(new ByteArray[] {new ByteArray((String) constraints)}); } else if (constraints instanceof String[]) { return new ExplicitConstraints( Arrays.stream((String[]) constraints).map(ByteArray::new).toArray(ByteArray[]::new)); } else if (constraints instanceof ByteArrayRange) { return new ExplicitConstraints(new ByteArrayRange[] {(ByteArrayRange) constraints}); } else if (constraints instanceof ByteArrayRange[]) { return new ExplicitConstraints((ByteArrayRange[]) constraints); } else if (constraints instanceof ByteArrayConstraints) { return (ByteArrayConstraints) constraints; } else if (constraints instanceof BinConstraints) { return ((BinConstraints) constraints).constraints(null); } return new ExplicitConstraints(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/api/StatisticQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.api; import org.locationtech.geowave.core.store.statistics.StatisticType; /** * Base interface for statistic queries. * * @param the statistic value type * @param the return type of the statistic value */ public interface StatisticQuery, R> { /** * @return the statistic type for the query */ public StatisticType statisticType(); /** * @return the tag filter */ public String tag(); /** * @return the bin filter */ public BinConstraints binConstraints(); /** * @return the authorizations for the query */ public String[] authorizations(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/api/StatisticQueryBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.api; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.lang3.Range; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.IndexMetaData; import org.locationtech.geowave.core.store.adapter.statistics.histogram.FixedBinNumericHistogram; import org.locationtech.geowave.core.store.adapter.statistics.histogram.NumericHistogram; import org.locationtech.geowave.core.store.statistics.adapter.CountStatistic; import org.locationtech.geowave.core.store.statistics.adapter.CountStatistic.CountValue; import org.locationtech.geowave.core.store.statistics.adapter.DataTypeStatisticType; import org.locationtech.geowave.core.store.statistics.field.BloomFilterStatistic; import org.locationtech.geowave.core.store.statistics.field.BloomFilterStatistic.BloomFilterValue; import org.locationtech.geowave.core.store.statistics.field.CountMinSketchStatistic; import org.locationtech.geowave.core.store.statistics.field.CountMinSketchStatistic.CountMinSketchValue; import org.locationtech.geowave.core.store.statistics.field.FieldStatisticType; import org.locationtech.geowave.core.store.statistics.field.FixedBinNumericHistogramStatistic; import org.locationtech.geowave.core.store.statistics.field.FixedBinNumericHistogramStatistic.FixedBinNumericHistogramValue; import org.locationtech.geowave.core.store.statistics.field.HyperLogLogStatistic; import org.locationtech.geowave.core.store.statistics.field.HyperLogLogStatistic.HyperLogLogPlusValue; import org.locationtech.geowave.core.store.statistics.field.NumericHistogramStatistic; import org.locationtech.geowave.core.store.statistics.field.NumericHistogramStatistic.NumericHistogramValue; import org.locationtech.geowave.core.store.statistics.field.NumericMeanStatistic; import org.locationtech.geowave.core.store.statistics.field.NumericMeanStatistic.NumericMeanValue; import org.locationtech.geowave.core.store.statistics.field.NumericRangeStatistic; import org.locationtech.geowave.core.store.statistics.field.NumericRangeStatistic.NumericRangeValue; import org.locationtech.geowave.core.store.statistics.field.NumericStatsStatistic; import org.locationtech.geowave.core.store.statistics.field.NumericStatsStatistic.NumericStatsValue; import org.locationtech.geowave.core.store.statistics.field.Stats; import org.locationtech.geowave.core.store.statistics.index.DifferingVisibilityCountStatistic; import org.locationtech.geowave.core.store.statistics.index.DifferingVisibilityCountStatistic.DifferingVisibilityCountValue; import org.locationtech.geowave.core.store.statistics.index.DuplicateEntryCountStatistic; import org.locationtech.geowave.core.store.statistics.index.DuplicateEntryCountStatistic.DuplicateEntryCountValue; import org.locationtech.geowave.core.store.statistics.index.FieldVisibilityCountStatistic; import org.locationtech.geowave.core.store.statistics.index.FieldVisibilityCountStatistic.FieldVisibilityCountValue; import org.locationtech.geowave.core.store.statistics.index.IndexMetaDataSetStatistic; import org.locationtech.geowave.core.store.statistics.index.IndexMetaDataSetStatistic.IndexMetaDataSetValue; import org.locationtech.geowave.core.store.statistics.index.IndexStatisticType; import org.locationtech.geowave.core.store.statistics.index.MaxDuplicatesStatistic; import org.locationtech.geowave.core.store.statistics.index.MaxDuplicatesStatistic.MaxDuplicatesValue; import org.locationtech.geowave.core.store.statistics.index.PartitionsStatistic; import org.locationtech.geowave.core.store.statistics.index.PartitionsStatistic.PartitionsValue; import org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic; import org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic.RowRangeHistogramValue; import org.locationtech.geowave.core.store.statistics.query.DataTypeStatisticQueryBuilder; import org.locationtech.geowave.core.store.statistics.query.FieldStatisticQueryBuilder; import org.locationtech.geowave.core.store.statistics.query.IndexStatisticQueryBuilder; import com.clearspring.analytics.stream.cardinality.HyperLogLogPlus; import com.clearspring.analytics.stream.frequency.CountMinSketch; import com.google.common.hash.BloomFilter; /** * Base interface for constructing statistic queries. * * @param the statistic value type * @param the return type of the statistic value * @param the builder type */ public interface StatisticQueryBuilder, R, B extends StatisticQueryBuilder> { /** * Set the tag for the query. Only statistics that match the given tag will be queried. * * @param tag the tag to use * @return {@code this} */ B tag(final String tag); /** * Set the tag filter to internal statistics. If this is set, only internal statistics willb e * queried. * * @return {@code this} */ B internal(); /** * Add an authorization to the query. * * @param authorization the authorization to add * @return {@code this} */ B addAuthorization(final String authorization); /** * Set the query authorizations to the given set. * * @param authorizations the authorizations to use * @return {@code this} */ B authorizations(final String[] authorizations); /** * Sets the bins of the query. If a queried statistic uses a binning strategy, only values * contained in one of the bins matching {@code BinConstraints} will be returned. * * @param binConstraints the binConstraints object to use which will be appropriately interpreted * for this query * @return {@code this} */ B binConstraints(final BinConstraints binConstraints); /** * Build the statistic query. * * @return the statistic query */ StatisticQuery build(); /** * Create a new index statistic query builder for the given statistic type. * * @param statisticType the index statistic type to query * @return the index statistic query builder */ static , R> IndexStatisticQueryBuilder newBuilder( final IndexStatisticType statisticType) { return new IndexStatisticQueryBuilder<>(statisticType); } /** * Create a new data type statistic query builder for the given statistic type. * * @param statisticType the data type statistic type to query * @return the data type statistic query builder */ static , R> DataTypeStatisticQueryBuilder newBuilder( final DataTypeStatisticType statisticType) { return new DataTypeStatisticQueryBuilder<>(statisticType); } /** * Create a new field statistic query builder for the given statistic type. * * @param statisticType the field statistic type to query * @return the field statistic query builder */ static , R> FieldStatisticQueryBuilder newBuilder( final FieldStatisticType statisticType) { return new FieldStatisticQueryBuilder<>(statisticType); } /** * Create a new index statistic query builder for a differing visibility count statistic. * * @return the index statistic query builder */ static IndexStatisticQueryBuilder differingVisibilityCount() { return newBuilder(DifferingVisibilityCountStatistic.STATS_TYPE); } /** * Create a new index statistic query builder for a duplicate entry count statistic. * * @return the index statistic query builder */ static IndexStatisticQueryBuilder duplicateEntryCount() { return newBuilder(DuplicateEntryCountStatistic.STATS_TYPE); } /** * Create a new index statistic query builder for a field visibility count statistic. * * @return the index statistic query builder */ static IndexStatisticQueryBuilder> fieldVisibilityCount() { return newBuilder(FieldVisibilityCountStatistic.STATS_TYPE); } /** * Create a new index statistic query builder for an index metadata set statistic. * * @return the index statistic query builder */ static IndexStatisticQueryBuilder> indexMetaDataSet() { return newBuilder(IndexMetaDataSetStatistic.STATS_TYPE); } /** * Create a new index statistic query builder for a max duplicates statistic. * * @return the index statistic query builder */ static IndexStatisticQueryBuilder maxDuplicates() { return newBuilder(MaxDuplicatesStatistic.STATS_TYPE); } /** * Create a new index statistic query builder for a partitions statistic. * * @return the index statistic query builder */ static IndexStatisticQueryBuilder> partitions() { return newBuilder(PartitionsStatistic.STATS_TYPE); } /** * Create a new index statistic query builder for a row range histogram statistic. * * @return the index statistic query builder */ static IndexStatisticQueryBuilder rowRangeHistogram() { return newBuilder(RowRangeHistogramStatistic.STATS_TYPE); } /** * Create a new data type statistic query builder for a count statistic. * * @return the data type statistic query builder */ static DataTypeStatisticQueryBuilder count() { return newBuilder(CountStatistic.STATS_TYPE); } /** * Create a new field statistic query builder for a bloom filter statistic. * * @return the field statistic query builder */ static FieldStatisticQueryBuilder> bloomFilter() { return newBuilder(BloomFilterStatistic.STATS_TYPE); } /** * Create a new field statistic query builder for a count min sketch statistic. * * @return the field statistic query builder */ static FieldStatisticQueryBuilder countMinSketch() { return newBuilder(CountMinSketchStatistic.STATS_TYPE); } /** * Create a new field statistic query builder for a fixed bin numeric histogram statistic. * * @return the field statistic query builder */ static FieldStatisticQueryBuilder fixedBinNumericHistogram() { return newBuilder(FixedBinNumericHistogramStatistic.STATS_TYPE); } /** * Create a new field statistic query builder for a hyper log log statistic. * * @return the field statistic query builder */ static FieldStatisticQueryBuilder hyperLogLog() { return newBuilder(HyperLogLogStatistic.STATS_TYPE); } /** * Create a new field statistic query builder for a numeric histogram statistic. * * @return the field statistic query builder */ static FieldStatisticQueryBuilder numericHistogram() { return newBuilder(NumericHistogramStatistic.STATS_TYPE); } /** * Create a new field statistic query builder for a numeric mean statistic. * * @return the field statistic query builder */ static FieldStatisticQueryBuilder numericMean() { return newBuilder(NumericMeanStatistic.STATS_TYPE); } /** * Create a new field statistic query builder for a numeric range statistic. * * @return the field statistic query builder */ static FieldStatisticQueryBuilder> numericRange() { return newBuilder(NumericRangeStatistic.STATS_TYPE); } /** * Create a new field statistic query builder for a numeric stats statistic. * * @return the field statistic query builder */ static FieldStatisticQueryBuilder numericStats() { return newBuilder(NumericStatsStatistic.STATS_TYPE); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/api/StatisticValue.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.api; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.store.statistics.StatisticId; import com.google.common.primitives.Bytes; /** * Base class for values of a statistic. This class is responsible for the updates, serialization, * and merging of statistic values. * * @param the return type of the statistic value */ public abstract class StatisticValue implements Mergeable { public static final ByteArray NO_BIN = new ByteArray(); protected final Statistic statistic; protected ByteArray bin = NO_BIN; /** * Construct a new value with the given parent statistic. * * @param statistic the parent statistic */ public StatisticValue(final Statistic statistic) { this.statistic = statistic; } /** * Get the parent statistic. Note, this may be null in cases of server-side statistic merging. * * @return the parent statistic */ public Statistic getStatistic() { return statistic; } /** * Sets the bin for this value. Only used if the underlying statistic uses a binning strategy. * * @param bin the bin for this value */ public void setBin(final ByteArray bin) { this.bin = bin; } /** * Gets the bin for this value. If the underlying statistic does not use a binning strategy, an * empty byte array will be returned. * * @return the bin for this value */ public ByteArray getBin() { return bin; } /** * Merge another statistic value into this one. * * IMPORTANT: This function cannot guarantee that the Statistic will be available. Any variables * needed from the statistic for merging must be serialized with the value. */ @Override public abstract void merge(Mergeable merge); /** * Get the raw value of the statistic value. * * @return the raw value */ public abstract R getValue(); @Override public String toString() { return getValue().toString(); } /** * Get a unique identifier for a value given a statistic id and bin. * * @param statisticId the statistic id * @param bin the bin * @return a unique identifier for the value */ public static byte[] getValueId(StatisticId statisticId, ByteArray bin) { return getValueId(statisticId, bin == null ? null : bin.getBytes()); } /** * Get a unique identifier for a value given a statistic id and bin. * * @param statisticId the statistic id * @param bin the bin * @return a unique identifier for the value */ public static byte[] getValueId(StatisticId statisticId, byte[] bin) { if (bin != null) { return Bytes.concat( statisticId.getUniqueId().getBytes(), StatisticId.UNIQUE_ID_SEPARATOR, bin); } return statisticId.getUniqueId().getBytes(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/api/VisibilityHandler.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.api; import org.locationtech.geowave.core.index.persist.Persistable; /** * This interface serves to provide visibility information for a given field of an adapter entry. */ public interface VisibilityHandler extends Persistable { /** * Determine visibility of the field. * * @param adapter the adapter for the entry * @param entry the entry * @param fieldName the field to determine visibility for * @return The visibility for the field */ public String getVisibility(DataTypeAdapter adapter, T entry, String fieldName); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/api/WriteResults.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.api; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Set; import org.locationtech.geowave.core.index.InsertionIds; public class WriteResults { private final Map insertionIdsPerIndex; public WriteResults() { insertionIdsPerIndex = new HashMap<>(); } public WriteResults(final String indexName, final InsertionIds insertionIds) { insertionIdsPerIndex = Collections.singletonMap(indexName, insertionIds); } public WriteResults(final Map insertionIdsPerIndex) { super(); this.insertionIdsPerIndex = insertionIdsPerIndex; } public Set getWrittenIndexNames() { return insertionIdsPerIndex.keySet(); } public InsertionIds getInsertionIdsWritten(final String indexName) { return insertionIdsPerIndex.get(indexName); } public boolean isEmpty() { return insertionIdsPerIndex.isEmpty(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/api/Writer.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.api; import java.io.Closeable; public interface Writer extends Closeable { /** * Writes an entry using default visibilities set elsewhere. * * @param entry the entry to write * @return the Insertion IDs representing where this entry was written */ WriteResults write(final T entry); /** * Writes an entry using the provided visibility handler. * * @param entry the entry to write * @param visibilityHandler the handler for determining field visibility * @return the Insertion IDs representing where this entry was written */ WriteResults write(final T entry, final VisibilityHandler visibilityHandler); /** * Get the indices that are being written to. * * @return the indices that are being written to */ Index[] getIndices(); /** * Flush the underlying row writer to ensure entries queued for write are fully written. This is * particularly useful for streaming data as an intermittent mechanism to ensure periodic updates * are being stored. */ void flush(); /** Flush all entries enqueued and close all resources for this writer. */ @Override void close(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/base/AbstractBaseRowQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.base; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.DataStoreOptions; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.base.dataidx.DataIndexRetrieval; import org.locationtech.geowave.core.store.callback.ScanCallback; import org.locationtech.geowave.core.store.operations.DataStoreOperations; import org.locationtech.geowave.core.store.operations.RowReader; import org.locationtech.geowave.core.store.statistics.index.DifferingVisibilityCountStatistic.DifferingVisibilityCountValue; import org.locationtech.geowave.core.store.statistics.index.FieldVisibilityCountStatistic.FieldVisibilityCountValue; import org.locationtech.geowave.core.store.util.NativeEntryTransformer; /** * Represents a query operation by an Accumulo row. This abstraction is re-usable for both exact row * ID queries and row prefix queries. */ abstract class AbstractBaseRowQuery extends BaseQuery { public AbstractBaseRowQuery( final Index index, final String[] authorizations, final ScanCallback scanCallback, final DifferingVisibilityCountValue differingVisibilityCounts, final FieldVisibilityCountValue visibilityCounts, final DataIndexRetrieval dataIndexRetrieval) { super( index, scanCallback, differingVisibilityCounts, visibilityCounts, dataIndexRetrieval, authorizations); } public CloseableIterator query( final DataStoreOperations operations, final DataStoreOptions options, final double[] maxResolutionSubsamplingPerDimension, final double[] targetResolutionPerDimensionForHierarchicalIndex, final PersistentAdapterStore adapterStore, final AdapterIndexMappingStore mappingStore, final InternalAdapterStore internalAdapterStore, final Integer limit, final Integer queryMaxRangeDecomposition, final boolean delete) { final RowReader reader = getReader( operations, options, adapterStore, mappingStore, internalAdapterStore, maxResolutionSubsamplingPerDimension, targetResolutionPerDimensionForHierarchicalIndex, limit, queryMaxRangeDecomposition, new NativeEntryTransformer<>( adapterStore, mappingStore, index, getClientFilters(options), (ScanCallback) scanCallback, getFieldBitmask(), maxResolutionSubsamplingPerDimension, !isCommonIndexAggregation(), getDataIndexRetrieval()), delete); return reader; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/base/BaseConstraintsQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.base; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.apache.commons.lang3.tuple.Pair; import org.locationtech.geowave.core.index.IndexMetaData; import org.locationtech.geowave.core.index.MultiDimensionalCoordinateRanges; import org.locationtech.geowave.core.index.MultiDimensionalCoordinateRangesArray; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.index.QueryRanges; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.DataStoreOptions; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.base.dataidx.DataIndexRetrieval; import org.locationtech.geowave.core.store.callback.ScanCallback; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer; import org.locationtech.geowave.core.store.entities.GeoWaveValue; import org.locationtech.geowave.core.store.operations.DataStoreOperations; import org.locationtech.geowave.core.store.operations.RowReader; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.geowave.core.store.query.filter.CoordinateRangeQueryFilter; import org.locationtech.geowave.core.store.query.filter.DedupeFilter; import org.locationtech.geowave.core.store.query.filter.FilterList; import org.locationtech.geowave.core.store.query.filter.QueryFilter; import org.locationtech.geowave.core.store.statistics.index.DifferingVisibilityCountStatistic.DifferingVisibilityCountValue; import org.locationtech.geowave.core.store.statistics.index.DuplicateEntryCountStatistic.DuplicateEntryCountValue; import org.locationtech.geowave.core.store.statistics.index.FieldVisibilityCountStatistic.FieldVisibilityCountValue; import org.locationtech.geowave.core.store.statistics.index.IndexMetaDataSetStatistic.IndexMetaDataSetValue; import org.locationtech.geowave.core.store.util.DataStoreUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Iterators; /** This class represents basic numeric contraints applied to a datastore query */ public class BaseConstraintsQuery extends BaseFilteredIndexQuery { private static final Logger LOGGER = LoggerFactory.getLogger(BaseConstraintsQuery.class); private boolean queryFiltersEnabled; public final Pair, Aggregation> aggregation; public final List constraints; public List distributableFilters; public final IndexMetaData[] indexMetaData; private final Index index; public BaseConstraintsQuery( final short[] adapterIds, final Index index, final QueryConstraints query, final DedupeFilter clientDedupeFilter, final ScanCallback scanCallback, final Pair, Aggregation> aggregation, final Pair> fieldIdsAdapterPair, final IndexMetaDataSetValue indexMetaData, final DuplicateEntryCountValue duplicateCounts, final DifferingVisibilityCountValue differingVisibilityCounts, final FieldVisibilityCountValue visibilityCounts, final DataIndexRetrieval dataIndexRetrieval, final String[] authorizations) { this( adapterIds, index, query != null ? query.getIndexConstraints(index) : null, query != null ? query.createFilters(index) : null, clientDedupeFilter, scanCallback, aggregation, fieldIdsAdapterPair, indexMetaData, duplicateCounts, differingVisibilityCounts, visibilityCounts, dataIndexRetrieval, authorizations); } public BaseConstraintsQuery( final short[] adapterIds, final Index index, final List constraints, final List queryFilters, DedupeFilter clientDedupeFilter, final ScanCallback scanCallback, final Pair, Aggregation> aggregation, final Pair> fieldIdsAdapterPair, final IndexMetaDataSetValue indexMetaData, final DuplicateEntryCountValue duplicateCounts, final DifferingVisibilityCountValue differingVisibilityCounts, final FieldVisibilityCountValue visibilityCounts, final DataIndexRetrieval dataIndexRetrieval, final String[] authorizations) { super( adapterIds, index, scanCallback, fieldIdsAdapterPair, differingVisibilityCounts, visibilityCounts, dataIndexRetrieval, authorizations); this.constraints = constraints; this.aggregation = aggregation; this.indexMetaData = indexMetaData != null ? indexMetaData.toArray() : new IndexMetaData[] {}; this.index = index; if ((duplicateCounts != null) && !duplicateCounts.isAnyEntryHaveDuplicates()) { clientDedupeFilter = null; } if (clientDedupeFilter != null) { clientFilters = new ArrayList<>(Collections.singleton(clientDedupeFilter)); } else { clientFilters = new ArrayList<>(); } distributableFilters = queryFilters; queryFiltersEnabled = true; } @Override public QueryFilter getServerFilter(final DataStoreOptions options) { // TODO GEOWAVE-1018 is options necessary? is this correct? if ((distributableFilters == null) || distributableFilters.isEmpty()) { return null; } else if (distributableFilters.size() > 1) { return new FilterList(distributableFilters); } else { return distributableFilters.get(0); } } public boolean isQueryFiltersEnabled() { return queryFiltersEnabled; } public void setQueryFiltersEnabled(final boolean queryFiltersEnabled) { this.queryFiltersEnabled = queryFiltersEnabled; } @SuppressWarnings("unchecked") @Override public CloseableIterator query( final DataStoreOperations datastoreOperations, final DataStoreOptions options, final PersistentAdapterStore adapterStore, final AdapterIndexMappingStore mappingStore, final InternalAdapterStore internalAdapterStore, final double[] maxResolutionSubsamplingPerDimension, final double[] targetResolutionPerDimensionForHierarchicalIndex, final Integer limit, final Integer queryMaxRangeDecomposition, final boolean delete) { if (isAggregation()) { if ((options == null) || !options.isServerSideLibraryEnabled()) { // Aggregate client-side final CloseableIterator it = super.query( datastoreOperations, options, adapterStore, mappingStore, internalAdapterStore, maxResolutionSubsamplingPerDimension, targetResolutionPerDimensionForHierarchicalIndex, limit, queryMaxRangeDecomposition, false); return BaseDataStoreUtils.aggregate( it, (Aggregation) aggregation.getRight(), (DataTypeAdapter) aggregation.getLeft()); } else { // the aggregation is run server-side use the reader to // aggregate to a single value here // should see if there is a client dedupe filter thats been // added and run it serverside // also if so and duplicates cross partitions, the dedupe filter // still won't be effective and the aggregation will return // incorrect results if (!clientFilters.isEmpty()) { final QueryFilter f = clientFilters.get(clientFilters.size() - 1); if (f instanceof DedupeFilter) { // in case the list is immutable or null we need to create a new mutable list if (distributableFilters != null) { distributableFilters = new ArrayList<>(distributableFilters); } else { distributableFilters = new ArrayList<>(); } distributableFilters.add(f); LOGGER.warn( "Aggregating results when duplicates exist in the table may result in duplicate aggregation"); } } try (final RowReader reader = getReader( datastoreOperations, options, adapterStore, mappingStore, internalAdapterStore, maxResolutionSubsamplingPerDimension, targetResolutionPerDimensionForHierarchicalIndex, limit, queryMaxRangeDecomposition, GeoWaveRowIteratorTransformer.NO_OP_TRANSFORMER, false)) { Object mergedAggregationResult = null; final Aggregation agg = (Aggregation) aggregation.getValue(); if ((reader == null) || !reader.hasNext()) { return new CloseableIterator.Empty(); } else { while (reader.hasNext()) { final GeoWaveRow row = reader.next(); for (final GeoWaveValue value : row.getFieldValues()) { if ((value.getValue() != null) && (value.getValue().length > 0)) { if (mergedAggregationResult == null) { mergedAggregationResult = agg.resultFromBinary(value.getValue()); } else { mergedAggregationResult = agg.merge(mergedAggregationResult, agg.resultFromBinary(value.getValue())); } } } } return new CloseableIterator.Wrapper<>( Iterators.singletonIterator(mergedAggregationResult)); } } catch (final Exception e) { LOGGER.warn("Unable to close reader for aggregation", e); } } } return super.query( datastoreOperations, options, adapterStore, mappingStore, internalAdapterStore, maxResolutionSubsamplingPerDimension, targetResolutionPerDimensionForHierarchicalIndex, limit, queryMaxRangeDecomposition, delete); } @Override protected List getClientFiltersList(final DataStoreOptions options) { // Since we have custom filters enabled, this list should only return // the client filters if ((options != null) && options.isServerSideLibraryEnabled()) { return clientFilters; } // add a index filter to the front of the list if there isn't already a // filter if (distributableFilters.isEmpty() || ((distributableFilters.size() == 1) && (distributableFilters.get(0) instanceof DedupeFilter))) { final List coords = getCoordinateRanges(); if (!coords.isEmpty() && !(coords.size() == 1 && coords.get(0).getRangesArray().length == 0)) { clientFilters.add( 0, new CoordinateRangeQueryFilter( index.getIndexStrategy(), coords.toArray(new MultiDimensionalCoordinateRangesArray[] {}))); } } else { // Without custom filters, we need all the filters on the client // side for (final QueryFilter distributable : distributableFilters) { if (!clientFilters.contains(distributable)) { clientFilters.add(distributable); } } } return clientFilters; } @Override protected boolean isCommonIndexAggregation() { return BaseDataStoreUtils.isCommonIndexAggregation(aggregation); } @Override protected Pair, Aggregation> getAggregation() { return aggregation; } @Override public List getConstraints() { return constraints; } @Override public List getCoordinateRanges() { if ((constraints == null) || constraints.isEmpty()) { return new ArrayList<>(); } else { final NumericIndexStrategy indexStrategy = index.getIndexStrategy(); final List ranges = new ArrayList<>(); for (final MultiDimensionalNumericData nd : constraints) { final MultiDimensionalCoordinateRanges[] indexStrategyCoordRanges = indexStrategy.getCoordinateRangesPerDimension(nd, indexMetaData); if (indexStrategyCoordRanges != null) { ranges.add(new MultiDimensionalCoordinateRangesArray(indexStrategyCoordRanges)); } } return ranges; } } @Override protected QueryRanges getRanges( final int maxRangeDecomposition, final double[] targetResolutionPerDimensionForHierarchicalIndex) { return DataStoreUtils.constraintsToQueryRanges( constraints, index, targetResolutionPerDimensionForHierarchicalIndex, maxRangeDecomposition, indexMetaData); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/base/BaseDataIndexWriter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.base; import java.io.Closeable; import java.io.Flushable; import java.io.IOException; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.DataStoreOptions; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.VisibilityHandler; import org.locationtech.geowave.core.store.api.WriteResults; import org.locationtech.geowave.core.store.api.Writer; import org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils; import org.locationtech.geowave.core.store.callback.IngestCallback; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.operations.DataStoreOperations; import org.locationtech.geowave.core.store.operations.RowWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; class BaseDataIndexWriter implements Writer { private static final Logger LOGGER = LoggerFactory.getLogger(BaseIndexWriter.class); protected final DataStoreOperations operations; protected final DataStoreOptions options; protected final IngestCallback callback; protected RowWriter writer; protected final InternalDataAdapter adapter; protected final AdapterToIndexMapping indexMapping; protected final VisibilityHandler visibilityHandler; final Closeable closable; protected BaseDataIndexWriter( final InternalDataAdapter adapter, final AdapterToIndexMapping indexMapping, final VisibilityHandler visibilityHandler, final DataStoreOperations operations, final DataStoreOptions options, final IngestCallback callback, final Closeable closable) { this.operations = operations; this.options = options; this.callback = callback; this.adapter = adapter; this.closable = closable; this.indexMapping = indexMapping; this.visibilityHandler = visibilityHandler; } @Override public Index[] getIndices() { return new Index[] {DataIndexUtils.DATA_ID_INDEX}; } @Override public WriteResults write(final T entry) { return write(entry, visibilityHandler); } @Override public WriteResults write(final T entry, final VisibilityHandler visibilityHandler) { IntermediaryWriteEntryInfo entryInfo; ensureOpen(); if (writer == null) { LOGGER.error("Null writer - empty list returned"); return new WriteResults(); } entryInfo = BaseDataStoreUtils.getWriteInfo( entry, adapter, indexMapping, DataIndexUtils.DATA_ID_INDEX, visibilityHandler, options.isSecondaryIndexing(), true, options.isVisibilityEnabled()); final GeoWaveRow[] rows = entryInfo.getRows(); writer.write(rows); callback.entryIngested(entry, rows); return new WriteResults(); } @Override public void close() { try { closable.close(); } catch (final IOException e) { LOGGER.error("Cannot close callbacks", e); } // thread safe close closeInternal(); } @Override public synchronized void flush() { // thread safe flush of the writers if (writer != null) { writer.flush(); } if (this.callback instanceof Flushable) { try { ((Flushable) callback).flush(); } catch (final IOException e) { LOGGER.error("Cannot flush callbacks", e); } } } protected synchronized void closeInternal() { if (writer != null) { try { writer.close(); writer = null; } catch (final Exception e) { LOGGER.warn("Unable to close writer", e); } } } @SuppressFBWarnings(justification = "This is intentional to avoid unnecessary sync") protected void ensureOpen() { if (writer == null) { synchronized (this) { if (writer == null) { try { writer = operations.createDataIndexWriter(adapter); } catch (final Exception e) { LOGGER.error("Unable to open writer", e); } } } } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/base/BaseDataStore.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.base; import java.io.Closeable; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Collectors; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.tuple.Pair; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.CloseableIteratorWrapper; import org.locationtech.geowave.core.store.DataStoreOptions; import org.locationtech.geowave.core.store.DataStoreProperty; import org.locationtech.geowave.core.store.PropertyStore; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.adapter.IndexDependentDataAdapter; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.api.AggregationQuery; import org.locationtech.geowave.core.store.api.BinConstraints; import org.locationtech.geowave.core.store.api.BinConstraints.ByteArrayConstraints; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.DataTypeStatistic; import org.locationtech.geowave.core.store.api.FieldStatistic; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.IndexStatistic; import org.locationtech.geowave.core.store.api.IngestOptions; import org.locationtech.geowave.core.store.api.Query; import org.locationtech.geowave.core.store.api.QueryBuilder; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticQuery; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.api.VisibilityHandler; import org.locationtech.geowave.core.store.api.Writer; import org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils; import org.locationtech.geowave.core.store.callback.DeleteCallbackList; import org.locationtech.geowave.core.store.callback.DeleteOtherIndicesCallback; import org.locationtech.geowave.core.store.callback.DuplicateDeletionCallback; import org.locationtech.geowave.core.store.callback.IngestCallback; import org.locationtech.geowave.core.store.callback.IngestCallbackList; import org.locationtech.geowave.core.store.callback.ScanCallback; import org.locationtech.geowave.core.store.entities.GeoWaveMetadata; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer; import org.locationtech.geowave.core.store.entities.GeoWaveRowMergingTransform; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.index.writer.IndependentAdapterIndexWriter; import org.locationtech.geowave.core.store.index.writer.IndexCompositeWriter; import org.locationtech.geowave.core.store.ingest.BaseDataStoreIngestDriver; import org.locationtech.geowave.core.store.memory.MemoryAdapterIndexMappingStore; import org.locationtech.geowave.core.store.memory.MemoryPersistentAdapterStore; import org.locationtech.geowave.core.store.operations.DataIndexReaderParamsBuilder; import org.locationtech.geowave.core.store.operations.DataStoreOperations; import org.locationtech.geowave.core.store.operations.MetadataQuery; import org.locationtech.geowave.core.store.operations.MetadataReader; import org.locationtech.geowave.core.store.operations.MetadataType; import org.locationtech.geowave.core.store.operations.MetadataWriter; import org.locationtech.geowave.core.store.operations.ReaderParamsBuilder; import org.locationtech.geowave.core.store.operations.RowReader; import org.locationtech.geowave.core.store.operations.RowWriter; import org.locationtech.geowave.core.store.query.aggregate.AdapterAndIndexBasedAggregation; import org.locationtech.geowave.core.store.query.constraints.AdapterAndIndexBasedQueryConstraints; import org.locationtech.geowave.core.store.query.constraints.DataIdQuery; import org.locationtech.geowave.core.store.query.constraints.DataIdRangeQuery; import org.locationtech.geowave.core.store.query.constraints.EverythingQuery; import org.locationtech.geowave.core.store.query.constraints.InsertionIdQuery; import org.locationtech.geowave.core.store.query.constraints.PrefixIdQuery; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.geowave.core.store.query.constraints.TypeConstraintQuery; import org.locationtech.geowave.core.store.query.filter.DedupeFilter; import org.locationtech.geowave.core.store.query.gwql.ResultSet; import org.locationtech.geowave.core.store.query.gwql.parse.GWQLParser; import org.locationtech.geowave.core.store.query.gwql.statement.Statement; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.locationtech.geowave.core.store.statistics.DefaultStatisticsProvider; import org.locationtech.geowave.core.store.statistics.InternalStatisticsHelper; import org.locationtech.geowave.core.store.statistics.StatisticId; import org.locationtech.geowave.core.store.statistics.StatisticType; import org.locationtech.geowave.core.store.statistics.StatisticUpdateCallback; import org.locationtech.geowave.core.store.statistics.adapter.DataTypeStatisticType; import org.locationtech.geowave.core.store.statistics.field.FieldStatisticType; import org.locationtech.geowave.core.store.statistics.index.DifferingVisibilityCountStatistic.DifferingVisibilityCountValue; import org.locationtech.geowave.core.store.statistics.index.FieldVisibilityCountStatistic.FieldVisibilityCountValue; import org.locationtech.geowave.core.store.statistics.index.IndexStatisticType; import org.locationtech.geowave.core.store.statistics.query.DataTypeStatisticQuery; import org.locationtech.geowave.core.store.statistics.query.FieldStatisticQuery; import org.locationtech.geowave.core.store.statistics.query.IndexStatisticQuery; import org.locationtech.geowave.core.store.util.DataStoreUtils; import org.locationtech.geowave.core.store.util.NativeEntryIteratorWrapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.internal.Maps; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import com.google.common.collect.Sets; public class BaseDataStore implements DataStore { private static final Logger LOGGER = LoggerFactory.getLogger(BaseDataStore.class); protected final IndexStore indexStore; protected final PersistentAdapterStore adapterStore; protected final DataStatisticsStore statisticsStore; protected final AdapterIndexMappingStore indexMappingStore; protected final DataStoreOperations baseOperations; protected final DataStoreOptions baseOptions; protected final InternalAdapterStore internalAdapterStore; protected final PropertyStore propertyStore; protected enum DeletionMode { DONT_DELETE, DELETE, DELETE_WITH_DUPLICATES; } public BaseDataStore( final IndexStore indexStore, final PersistentAdapterStore adapterStore, final DataStatisticsStore statisticsStore, final AdapterIndexMappingStore indexMappingStore, final DataStoreOperations operations, final DataStoreOptions options, final InternalAdapterStore internalAdapterStore, final PropertyStore propertyStore) { this.indexStore = indexStore; this.adapterStore = adapterStore; this.statisticsStore = statisticsStore; this.indexMappingStore = indexMappingStore; this.internalAdapterStore = internalAdapterStore; this.propertyStore = propertyStore; baseOperations = operations; baseOptions = options; } public void store(final Index index) { checkNewDataStore(); if (!indexStore.indexExists(index.getName())) { indexStore.addIndex(index); if (index instanceof DefaultStatisticsProvider) { ((DefaultStatisticsProvider) index).getDefaultStatistics().forEach( stat -> statisticsStore.addStatistic(stat)); } } } protected synchronized void store(final InternalDataAdapter adapter) { checkNewDataStore(); if (!adapterStore.adapterExists(adapter.getAdapterId())) { adapterStore.addAdapter(adapter); if (adapter.getAdapter() instanceof DefaultStatisticsProvider) { ((DefaultStatisticsProvider) adapter.getAdapter()).getDefaultStatistics().forEach( stat -> statisticsStore.addStatistic(stat)); } } } private void checkNewDataStore() { if ((propertyStore.getProperty(BaseDataStoreUtils.DATA_VERSION_PROPERTY) == null) && !BaseDataStoreUtils.hasMetadata(baseOperations, MetadataType.ADAPTER) && !BaseDataStoreUtils.hasMetadata(baseOperations, MetadataType.INDEX)) { // Only set the data version if no adapters and indices have already been added propertyStore.setProperty( new DataStoreProperty( BaseDataStoreUtils.DATA_VERSION_PROPERTY, BaseDataStoreUtils.DATA_VERSION)); } } public DataStatisticsStore getStatisticsStore() { return statisticsStore; } public Short getAdapterId(final String typeName) { return internalAdapterStore.getAdapterId(typeName); } private VisibilityHandler resolveVisibilityHandler( final InternalDataAdapter adapter, final VisibilityHandler visibilityHandler) { if (visibilityHandler != null) { return visibilityHandler; } if (adapter.getVisibilityHandler() != null) { return adapter.getVisibilityHandler(); } final DataStoreProperty globalVis = propertyStore.getProperty(BaseDataStoreUtils.GLOBAL_VISIBILITY_PROPERTY); if (globalVis != null) { return (VisibilityHandler) globalVis.getValue(); } return DataStoreUtils.UNCONSTRAINED_VISIBILITY; } @SuppressWarnings("unchecked") private Writer createWriter( final InternalDataAdapter adapter, final VisibilityHandler visibilityHandler, final boolean writingOriginalData, final Index... indices) { final boolean secondaryIndex = writingOriginalData && baseOptions.isSecondaryIndexing() && DataIndexUtils.adapterSupportsDataIndex(adapter); final Writer[] writers = new Writer[secondaryIndex ? indices.length + 1 : indices.length]; final VisibilityHandler resolvedVisibilityHandler = resolveVisibilityHandler(adapter, visibilityHandler); int i = 0; if (secondaryIndex) { final DataStoreCallbackManager callbackManager = new DataStoreCallbackManager(statisticsStore, true); final AdapterToIndexMapping indexMapping = indexMappingStore.getMapping( adapter.getAdapterId(), DataIndexUtils.DATA_ID_INDEX.getName()); final List> callbacks = Collections.singletonList( callbackManager.getIngestCallback( adapter, indexMapping, DataIndexUtils.DATA_ID_INDEX)); final IngestCallbackList callbacksList = new IngestCallbackList<>(callbacks); writers[i++] = createDataIndexWriter( adapter, indexMapping, resolvedVisibilityHandler, baseOperations, baseOptions, callbacksList, callbacksList); } for (final Index index : indices) { final DataStoreCallbackManager callbackManager = new DataStoreCallbackManager(statisticsStore, i == 0); callbackManager.setPersistStats(baseOptions.isPersistDataStatistics()); final AdapterToIndexMapping indexMapping = indexMappingStore.getMapping(adapter.getAdapterId(), index.getName()); final List> callbacks = writingOriginalData ? Collections.singletonList( callbackManager.getIngestCallback(adapter, indexMapping, index)) : Collections.emptyList(); final IngestCallbackList callbacksList = new IngestCallbackList<>(callbacks); writers[i] = createIndexWriter( adapter, indexMapping, index, resolvedVisibilityHandler, baseOperations, baseOptions, callbacksList, callbacksList); if (adapter.getAdapter() instanceof IndexDependentDataAdapter) { writers[i] = new IndependentAdapterIndexWriter<>( (IndexDependentDataAdapter) adapter.getAdapter(), index, resolvedVisibilityHandler, writers[i]); } i++; } return new IndexCompositeWriter<>(writers); } public CloseableIterator query( final Query query, final ScanCallback scanCallback) { return internalQuery(query, DeletionMode.DONT_DELETE, scanCallback); } @Override public CloseableIterator query(final Query query) { return internalQuery(query, DeletionMode.DONT_DELETE); } @Override public ResultSet query(final String queryStr, final String... authorizations) { final Statement statement = GWQLParser.parseStatement(this, queryStr); return statement.execute(authorizations); } protected CloseableIterator internalQuery( final Query query, final DeletionMode delete) { return internalQuery(query, delete, null); } /* * Since this general-purpose method crosses multiple adapters, the type of result cannot be * assumed. * * (non-Javadoc) * * @see org.locationtech.geowave.core.store.DataStore#query(org.locationtech. geowave. * core.store.query.QueryOptions, org.locationtech.geowave.core.store.query.Query) */ protected CloseableIterator internalQuery( Query query, final DeletionMode delete, final ScanCallback scanCallback) { if (query == null) { query = (Query) QueryBuilder.newBuilder().build(); } final BaseQueryOptions queryOptions = new BaseQueryOptions(query, adapterStore, internalAdapterStore, scanCallback); return internalQuery(query.getQueryConstraints(), queryOptions, delete); } protected CloseableIterator internalQuery( final QueryConstraints constraints, final BaseQueryOptions queryOptions, final DeletionMode deleteMode) { // Note: The DeletionMode option is provided to avoid recursively // adding DuplicateDeletionCallbacks when actual duplicates are removed // via the DuplicateDeletionCallback. The callback should only be added // during the initial deletion query. final boolean delete = ((deleteMode == DeletionMode.DELETE) || (deleteMode == DeletionMode.DELETE_WITH_DUPLICATES)); final List> results = new ArrayList<>(); // If CQL filter is set if (constraints instanceof TypeConstraintQuery) { final String constraintTypeName = ((TypeConstraintQuery) constraints).getTypeName(); if ((queryOptions.getAdapterIds() == null) || (queryOptions.getAdapterIds().length == 0)) { queryOptions.setAdapterId(internalAdapterStore.getAdapterId(constraintTypeName)); } else if (queryOptions.getAdapterIds().length == 1) { final Short adapterId = internalAdapterStore.getAdapterId(constraintTypeName); if ((adapterId == null) || (queryOptions.getAdapterIds()[0] != adapterId.shortValue())) { LOGGER.error("Constraint Query Type name does not match Query Options Type Name"); throw new RuntimeException( "Constraint Query Type name does not match Query Options Type Name"); } } else { // Throw exception when QueryOptions has more than one adapter // and CQL Adapter is set. LOGGER.error("Constraint Query Type name does not match Query Options Type Name"); throw new RuntimeException( "Constraint Query Type name does not match Query Options Type Name"); } } final QueryConstraints sanitizedConstraints = (constraints == null) ? new EverythingQuery() : constraints; final List deleteCallbacks = new ArrayList<>(); final Map> dataIdsToDelete; if (DeletionMode.DELETE_WITH_DUPLICATES.equals(deleteMode) && (baseOptions.isSecondaryIndexing())) { dataIdsToDelete = new ConcurrentHashMap<>(); } else { dataIdsToDelete = null; } final boolean dataIdIndexIsBest = baseOptions.isSecondaryIndexing() && ((sanitizedConstraints instanceof DataIdQuery) || (sanitizedConstraints instanceof DataIdRangeQuery) || (sanitizedConstraints instanceof EverythingQuery)); if (!delete && dataIdIndexIsBest) { try { // just grab the values directly from the Data Index InternalDataAdapter[] adapters = queryOptions.getAdaptersArray(adapterStore); if (!queryOptions.isAllIndices()) { final Set adapterIds = new HashSet<>( Arrays.asList( ArrayUtils.toObject( queryOptions.getValidAdapterIds( internalAdapterStore, indexMappingStore)))); adapters = Arrays.stream(adapters).filter(a -> adapterIds.contains(a.getAdapterId())).toArray( i -> new InternalDataAdapter[i]); } // TODO test whether aggregations work in this case for (final InternalDataAdapter adapter : adapters) { RowReader rowReader; if (sanitizedConstraints instanceof DataIdQuery) { rowReader = DataIndexUtils.getRowReader( baseOperations, adapterStore, indexMappingStore, internalAdapterStore, queryOptions.getFieldIdsAdapterPair(), queryOptions.getAggregation(), queryOptions.getAuthorizations(), adapter.getAdapterId(), ((DataIdQuery) sanitizedConstraints).getDataIds()); } else if (sanitizedConstraints instanceof DataIdRangeQuery) { if (((DataIdRangeQuery) sanitizedConstraints).isReverse() && !isReverseIterationSupported()) { throw new UnsupportedOperationException( "Currently the underlying datastore does not support reverse iteration"); } rowReader = DataIndexUtils.getRowReader( baseOperations, adapterStore, indexMappingStore, internalAdapterStore, queryOptions.getFieldIdsAdapterPair(), queryOptions.getAggregation(), queryOptions.getAuthorizations(), adapter.getAdapterId(), ((DataIdRangeQuery) sanitizedConstraints).getStartDataIdInclusive(), ((DataIdRangeQuery) sanitizedConstraints).getEndDataIdInclusive(), ((DataIdRangeQuery) sanitizedConstraints).isReverse()); } else { rowReader = DataIndexUtils.getRowReader( baseOperations, adapterStore, indexMappingStore, internalAdapterStore, queryOptions.getFieldIdsAdapterPair(), queryOptions.getAggregation(), queryOptions.getAuthorizations(), adapter.getAdapterId()); } results.add( new CloseableIteratorWrapper( rowReader, new NativeEntryIteratorWrapper( adapterStore, indexMappingStore, DataIndexUtils.DATA_ID_INDEX, rowReader, null, queryOptions.getScanCallback(), BaseDataStoreUtils.getFieldBitmask( queryOptions.getFieldIdsAdapterPair(), DataIndexUtils.DATA_ID_INDEX), queryOptions.getMaxResolutionSubsamplingPerDimension(), !BaseDataStoreUtils.isCommonIndexAggregation(queryOptions.getAggregation()), null))); } if (BaseDataStoreUtils.isAggregation(queryOptions.getAggregation())) { return BaseDataStoreUtils.aggregate(new CloseableIteratorWrapper(new Closeable() { @Override public void close() throws IOException { for (final CloseableIterator result : results) { result.close(); } } }, Iterators.concat(results.iterator())), (Aggregation) queryOptions.getAggregation().getRight(), (DataTypeAdapter) queryOptions.getAggregation().getLeft()); } } catch (final IOException e1) { LOGGER.error("Failed to resolve adapter or index for query", e1); } } else { final boolean isConstraintsAdapterIndexSpecific = sanitizedConstraints instanceof AdapterAndIndexBasedQueryConstraints; final boolean isAggregationAdapterIndexSpecific = (queryOptions.getAggregation() != null) && (queryOptions.getAggregation().getRight() instanceof AdapterAndIndexBasedAggregation); // all queries will use the same instance of the dedupe filter for // client side filtering because the filter needs to be applied across // indices DedupeFilter dedupeFilter = new DedupeFilter(); MemoryPersistentAdapterStore tempAdapterStore = new MemoryPersistentAdapterStore(queryOptions.getAdaptersArray(adapterStore)); MemoryAdapterIndexMappingStore memoryMappingStore = new MemoryAdapterIndexMappingStore(); // keep a list of adapters that have been queried, to only load an // adapter to be queried once final Set queriedAdapters = new HashSet<>(); // if its an ordered constraints then it is dependent on the index selected, if its // secondary indexing its inefficient to delete by constraints final boolean deleteAllIndicesByConstraints = ((delete && ((constraints == null) || !constraints.indexMustBeSpecified()) && !baseOptions.isSecondaryIndexing())); final List>>> indexAdapterPairList = (deleteAllIndicesByConstraints) ? queryOptions.getIndicesForAdapters(tempAdapterStore, indexMappingStore, indexStore) : queryOptions.getBestQueryIndices( tempAdapterStore, indexMappingStore, indexStore, statisticsStore, sanitizedConstraints); Map> additionalIndicesToDelete = null; if (DeletionMode.DELETE_WITH_DUPLICATES.equals(deleteMode) && !deleteAllIndicesByConstraints) { additionalIndicesToDelete = new HashMap<>(); // we have to make sure to delete from the other indices if they exist final List>>> allIndices = queryOptions.getIndicesForAdapters(tempAdapterStore, indexMappingStore, indexStore); for (final Pair>> allPair : allIndices) { for (final Pair>> constraintPair : indexAdapterPairList) { if (((constraintPair.getKey() == null) && (allPair.getKey() == null)) || constraintPair.getKey().equals(allPair.getKey())) { allPair.getRight().removeAll(constraintPair.getRight()); break; } } for (final InternalDataAdapter adapter : allPair.getRight()) { List indices = additionalIndicesToDelete.get(adapter.getAdapterId()); if (indices == null) { indices = new ArrayList<>(); additionalIndicesToDelete.put(adapter.getAdapterId(), indices); } indices.add(allPair.getLeft()); } } } final Pair, Aggregation> aggregation = queryOptions.getAggregation(); final ScanCallback callback = queryOptions.getScanCallback(); for (final Pair>> indexAdapterPair : indexAdapterPairList) { if (indexAdapterPair.getKey() == null) { // this indicates there are no indices that satisfy this set of adapters // we can still satisfy it with the data ID index if its available for certain types of // queries if (dataIdIndexIsBest) { // and in fact this must be a deletion operation otherwise it would have been caught in // prior logic for !delete for (final InternalDataAdapter adapter : indexAdapterPair.getRight()) { // this must be a data index only adapter, just worry about updating statistics and // not other indices or duplicates ScanCallback scanCallback = callback; if (baseOptions.isPersistDataStatistics()) { final DataStoreCallbackManager callbackCache = new DataStoreCallbackManager( statisticsStore, queriedAdapters.add(adapter.getAdapterId())); deleteCallbacks.add(callbackCache); scanCallback = new ScanCallback() { @Override public void entryScanned(final Object entry, final GeoWaveRow row) { if (callback != null) { callback.entryScanned(entry, row); } callbackCache.getDeleteCallback(adapter, null, null).entryDeleted(entry, row); } }; } if (sanitizedConstraints instanceof DataIdQuery) { DataIndexUtils.delete( baseOperations, adapterStore, indexMappingStore, internalAdapterStore, queryOptions.getFieldIdsAdapterPair(), queryOptions.getAggregation(), queryOptions.getAuthorizations(), scanCallback, adapter.getAdapterId(), ((DataIdQuery) sanitizedConstraints).getDataIds()); } else if (sanitizedConstraints instanceof DataIdRangeQuery) { DataIndexUtils.delete( baseOperations, adapterStore, indexMappingStore, internalAdapterStore, queryOptions.getFieldIdsAdapterPair(), queryOptions.getAggregation(), queryOptions.getAuthorizations(), scanCallback, adapter.getAdapterId(), ((DataIdRangeQuery) sanitizedConstraints).getStartDataIdInclusive(), ((DataIdRangeQuery) sanitizedConstraints).getEndDataIdInclusive()); } else { DataIndexUtils.delete( baseOperations, adapterStore, indexMappingStore, internalAdapterStore, queryOptions.getFieldIdsAdapterPair(), queryOptions.getAggregation(), queryOptions.getAuthorizations(), scanCallback, adapter.getAdapterId()); } } } else { final String[] typeNames = indexAdapterPair.getRight().stream().map(a -> a.getAdapter().getTypeName()).toArray( k -> new String[k]); LOGGER.warn( "Data types '" + ArrayUtils.toString(typeNames) + "' do not have an index that satisfies the query"); } continue; } final List adapterIdsToQuery = new ArrayList<>(); // this only needs to be done once per index, not once per // adapter boolean queriedAllAdaptersByPrefix = false; // maintain a set of data IDs if deleting using secondary indexing for (final InternalDataAdapter adapter : indexAdapterPair.getRight()) { final Index index = indexAdapterPair.getLeft(); final AdapterToIndexMapping indexMapping = indexMappingStore.getMapping(adapter.getAdapterId(), index.getName()); memoryMappingStore.addAdapterIndexMapping(indexMapping); if (delete) { final DataStoreCallbackManager callbackCache = new DataStoreCallbackManager( statisticsStore, queriedAdapters.add(adapter.getAdapterId())); // the duplicate deletion callback utilizes insertion id // query to clean up the dupes, in this case we do not // want the stats to change if (!(constraints instanceof InsertionIdQuery)) { callbackCache.setPersistStats(baseOptions.isPersistDataStatistics()); } else { callbackCache.setPersistStats(false); } deleteCallbacks.add(callbackCache); if (deleteMode == DeletionMode.DELETE_WITH_DUPLICATES) { final DeleteCallbackList delList = (DeleteCallbackList) callbackCache.getDeleteCallback( adapter, indexMapping, index); final DuplicateDeletionCallback dupDeletionCallback = new DuplicateDeletionCallback<>(this, adapter, indexMapping, index); delList.addCallback(dupDeletionCallback); if ((additionalIndicesToDelete != null) && (additionalIndicesToDelete.get(adapter.getAdapterId()) != null)) { delList.addCallback( new DeleteOtherIndicesCallback<>( baseOperations, adapter, additionalIndicesToDelete.get(adapter.getAdapterId()), adapterStore, indexMappingStore, internalAdapterStore, queryOptions.getAuthorizations())); } } final Map> internalDataIdsToDelete = dataIdsToDelete; queryOptions.setScanCallback(new ScanCallback() { @Override public void entryScanned(final Object entry, final GeoWaveRow row) { if (callback != null) { callback.entryScanned(entry, row); } if (internalDataIdsToDelete != null) { final ByteArray dataId = new ByteArray(row.getDataId()); Set currentDataIdsToDelete = internalDataIdsToDelete.get(row.getAdapterId()); if (currentDataIdsToDelete == null) { synchronized (internalDataIdsToDelete) { currentDataIdsToDelete = internalDataIdsToDelete.get(row.getAdapterId()); if (currentDataIdsToDelete == null) { currentDataIdsToDelete = Sets.newConcurrentHashSet(); internalDataIdsToDelete.put(row.getAdapterId(), currentDataIdsToDelete); } } } currentDataIdsToDelete.add(dataId); } callbackCache.getDeleteCallback(adapter, indexMapping, index).entryDeleted( entry, row); } }); } QueryConstraints adapterIndexConstraints; if (isConstraintsAdapterIndexSpecific) { adapterIndexConstraints = ((AdapterAndIndexBasedQueryConstraints) sanitizedConstraints).createQueryConstraints( adapter, indexAdapterPair.getLeft(), indexMapping); if (adapterIndexConstraints == null) { continue; } } else { adapterIndexConstraints = sanitizedConstraints; } if (isAggregationAdapterIndexSpecific) { queryOptions.setAggregation( ((AdapterAndIndexBasedAggregation) aggregation.getRight()).createAggregation( adapter, indexMapping, index), aggregation.getLeft()); } if (adapterIndexConstraints instanceof InsertionIdQuery) { queryOptions.setLimit(-1); results.add( queryInsertionId( adapter, index, (InsertionIdQuery) adapterIndexConstraints, dedupeFilter, queryOptions, tempAdapterStore, delete)); continue; } else if (adapterIndexConstraints instanceof PrefixIdQuery) { if (!queriedAllAdaptersByPrefix) { final PrefixIdQuery prefixIdQuery = (PrefixIdQuery) adapterIndexConstraints; results.add( queryRowPrefix( index, prefixIdQuery.getPartitionKey(), prefixIdQuery.getSortKeyPrefix(), queryOptions, indexAdapterPair.getRight(), tempAdapterStore, delete)); queriedAllAdaptersByPrefix = true; } continue; } else if (isConstraintsAdapterIndexSpecific || isAggregationAdapterIndexSpecific) { // can't query multiple adapters in the same scan results.add( queryConstraints( Collections.singletonList(adapter.getAdapterId()), index, adapterIndexConstraints, dedupeFilter, queryOptions, tempAdapterStore, memoryMappingStore, delete)); continue; } // finally just add it to a list to query multiple adapters // in on scan adapterIdsToQuery.add(adapter.getAdapterId()); } // supports querying multiple adapters in a single index // in one query instance (one scanner) for efficiency if (adapterIdsToQuery.size() > 0) { results.add( queryConstraints( adapterIdsToQuery, indexAdapterPair.getLeft(), sanitizedConstraints, dedupeFilter, queryOptions, tempAdapterStore, memoryMappingStore, delete)); } if (DeletionMode.DELETE_WITH_DUPLICATES.equals(deleteMode)) { // Make sure each index query has a clean dedupe filter so that entries from other indices // get deleted dedupeFilter = new DedupeFilter(); } } } return new CloseableIteratorWrapper<>(new Closeable() { @Override public void close() throws IOException { for (final CloseableIterator result : results) { result.close(); } for (final DataStoreCallbackManager c : deleteCallbacks) { c.close(); } if ((dataIdsToDelete != null) && !dataIdsToDelete.isEmpty()) { if (baseOptions.isSecondaryIndexing()) { deleteFromDataIndex(dataIdsToDelete, queryOptions.getAuthorizations()); } } } }, Iterators.concat(new CastIterator(results.iterator()))); } protected void deleteFromDataIndex( final Map> dataIdsToDelete, final String... authorizations) { for (final Entry> entry : dataIdsToDelete.entrySet()) { final Short adapterId = entry.getKey(); baseOperations.delete( new DataIndexReaderParamsBuilder<>( adapterStore, indexMappingStore, internalAdapterStore).additionalAuthorizations( authorizations).isAuthorizationsLimiting(false).adapterId(adapterId).dataIds( entry.getValue().stream().map(b -> b.getBytes()).toArray( i -> new byte[i][])).build()); } } private boolean isAllAdapters(final String[] typeNames) { return Arrays.equals(internalAdapterStore.getTypeNames(), typeNames); } private Short[] getAdaptersForIndex(final String indexName) { final ArrayList markedAdapters = new ArrayList<>(); // remove the given index for all types final InternalDataAdapter[] adapters = adapterStore.getAdapters(); for (final InternalDataAdapter dataAdapter : adapters) { final AdapterToIndexMapping[] adapterIndexMap = indexMappingStore.getIndicesForAdapter(dataAdapter.getAdapterId()); for (int i = 0; i < adapterIndexMap.length; i++) { if (adapterIndexMap[i].getIndexName().equals(indexName)) { // check if it is the only index for the current adapter if (adapterIndexMap.length == 1) { throw new IllegalStateException( "Index removal failed. Adapters require at least one index."); } else { // mark the index for removal markedAdapters.add(dataAdapter.getAdapterId()); } } } } final Short[] adapterIds = new Short[markedAdapters.size()]; return markedAdapters.toArray(adapterIds); } public boolean delete( Query query, final ScanCallback scanCallback, final boolean deleteDuplicates) { if (query == null) { query = (Query) QueryBuilder.newBuilder().build(); } if (((query.getQueryConstraints() == null) || (query.getQueryConstraints() instanceof EverythingQuery))) { if ((query.getDataTypeQueryOptions().getTypeNames() == null) || (query.getDataTypeQueryOptions().getTypeNames().length == 0) || isAllAdapters(query.getDataTypeQueryOptions().getTypeNames())) { // TODO what about authorizations here? return deleteEverything(); } else { try { final BaseQueryOptions sanitizedQueryOptions = new BaseQueryOptions(query, adapterStore, internalAdapterStore); for (final Pair>> indexAdapterPair : sanitizedQueryOptions.getIndicesForAdapters( adapterStore, indexMappingStore, indexStore)) { if (indexAdapterPair.getLeft() != null) { for (final InternalDataAdapter adapter : indexAdapterPair.getRight()) { try { deleteEntries( adapter, indexAdapterPair.getLeft(), query.getCommonQueryOptions().getAuthorizations()); } catch (final IOException e) { LOGGER.warn("Unable to delete by adapter", e); return false; } } } } if (baseOptions.isSecondaryIndexing()) { for (final InternalDataAdapter adapter : sanitizedQueryOptions.getAdaptersArray( adapterStore)) { deleteEntries( adapter, DataIndexUtils.DATA_ID_INDEX, query.getCommonQueryOptions().getAuthorizations()); } } } catch (final IOException e) { LOGGER.warn("Unable to get adapters to delete", e); return false; } } } else { try (CloseableIterator dataIt = internalQuery( query, deleteDuplicates ? DeletionMode.DELETE_WITH_DUPLICATES : DeletionMode.DELETE, scanCallback)) { while (dataIt.hasNext()) { dataIt.next(); } } } return true; } @Override public boolean delete(final Query query) { return delete(query, null, true); } public boolean delete(final Query query, final ScanCallback scanCallback) { return delete(query, scanCallback, true); } public boolean delete(final Query query, final boolean deleteDuplicates) { return delete(query, null, deleteDuplicates); } protected boolean deleteEverything() { try { indexStore.removeAll(); adapterStore.removeAll(); statisticsStore.removeAll(); internalAdapterStore.removeAll(); indexMappingStore.removeAll(); baseOperations.deleteAll(); return true; } catch (final Exception e) { LOGGER.error("Unable to delete all tables", e); } return false; } private void deleteEntries( final InternalDataAdapter adapter, final Index index, final String... additionalAuthorizations) throws IOException { try (CloseableIterator>> adapterStats = statisticsStore.getDataTypeStatistics(adapter.getAdapter(), null, null)) { statisticsStore.removeStatistics(adapterStats); } // cannot delete because authorizations are not used // this.indexMappingStore.remove(adapter.getAdapterId()); baseOperations.deleteAll( index.getName(), adapter.getTypeName(), adapter.getAdapterId(), additionalAuthorizations); } protected CloseableIterator queryConstraints( final List adapterIdsToQuery, final Index index, final QueryConstraints sanitizedQuery, final DedupeFilter filter, final BaseQueryOptions sanitizedQueryOptions, final PersistentAdapterStore tempAdapterStore, final AdapterIndexMappingStore mappingStore, final boolean delete) { final BaseConstraintsQuery constraintsQuery = new BaseConstraintsQuery( ArrayUtils.toPrimitive(adapterIdsToQuery.toArray(new Short[0])), index, sanitizedQuery, filter, sanitizedQueryOptions.getScanCallback(), sanitizedQueryOptions.getAggregation(), sanitizedQueryOptions.getFieldIdsAdapterPair(), InternalStatisticsHelper.getIndexMetadata( index, adapterIdsToQuery, tempAdapterStore, statisticsStore, sanitizedQueryOptions.getAuthorizations()), InternalStatisticsHelper.getDuplicateCounts( index, adapterIdsToQuery, tempAdapterStore, statisticsStore, sanitizedQueryOptions.getAuthorizations()), InternalStatisticsHelper.getDifferingVisibilityCounts( index, adapterIdsToQuery, tempAdapterStore, statisticsStore, sanitizedQueryOptions.getAuthorizations()), InternalStatisticsHelper.getVisibilityCounts( index, adapterIdsToQuery, tempAdapterStore, statisticsStore, sanitizedQueryOptions.getAuthorizations()), DataIndexUtils.getDataIndexRetrieval( baseOperations, adapterStore, indexMappingStore, internalAdapterStore, index, sanitizedQueryOptions.getFieldIdsAdapterPair(), sanitizedQueryOptions.getAggregation(), sanitizedQueryOptions.getAuthorizations(), baseOptions.getDataIndexBatchSize()), sanitizedQueryOptions.getAuthorizations()); return constraintsQuery.query( baseOperations, baseOptions, tempAdapterStore, mappingStore, internalAdapterStore, sanitizedQueryOptions.getMaxResolutionSubsamplingPerDimension(), sanitizedQueryOptions.getTargetResolutionPerDimensionForHierarchicalIndex(), sanitizedQueryOptions.getLimit(), sanitizedQueryOptions.getMaxRangeDecomposition(), delete); } protected CloseableIterator queryRowPrefix( final Index index, final byte[] partitionKey, final byte[] sortPrefix, final BaseQueryOptions sanitizedQueryOptions, final List> adapters, final PersistentAdapterStore tempAdapterStore, final boolean delete) { final Set adapterIds = adapters.stream().map(a -> a.getAdapterId()).collect(Collectors.toSet()); final BaseRowPrefixQuery prefixQuery = new BaseRowPrefixQuery<>( index, partitionKey, sortPrefix, (ScanCallback) sanitizedQueryOptions.getScanCallback(), InternalStatisticsHelper.getDifferingVisibilityCounts( index, adapterIds, tempAdapterStore, statisticsStore, sanitizedQueryOptions.getAuthorizations()), InternalStatisticsHelper.getVisibilityCounts( index, adapterIds, tempAdapterStore, statisticsStore, sanitizedQueryOptions.getAuthorizations()), DataIndexUtils.getDataIndexRetrieval( baseOperations, adapterStore, indexMappingStore, internalAdapterStore, index, sanitizedQueryOptions.getFieldIdsAdapterPair(), sanitizedQueryOptions.getAggregation(), sanitizedQueryOptions.getAuthorizations(), baseOptions.getDataIndexBatchSize()), sanitizedQueryOptions.getAuthorizations()); return prefixQuery.query( baseOperations, baseOptions, sanitizedQueryOptions.getMaxResolutionSubsamplingPerDimension(), sanitizedQueryOptions.getTargetResolutionPerDimensionForHierarchicalIndex(), tempAdapterStore, indexMappingStore, internalAdapterStore, sanitizedQueryOptions.getLimit(), sanitizedQueryOptions.getMaxRangeDecomposition(), delete); } protected CloseableIterator queryInsertionId( final InternalDataAdapter adapter, final Index index, final InsertionIdQuery query, final DedupeFilter filter, final BaseQueryOptions sanitizedQueryOptions, final PersistentAdapterStore tempAdapterStore, final boolean delete) { final DifferingVisibilityCountValue differingVisibilityCounts = InternalStatisticsHelper.getDifferingVisibilityCounts( index, Collections.singletonList(adapter.getAdapterId()), tempAdapterStore, statisticsStore, sanitizedQueryOptions.getAuthorizations()); final FieldVisibilityCountValue visibilityCounts = InternalStatisticsHelper.getVisibilityCounts( index, Collections.singletonList(adapter.getAdapterId()), tempAdapterStore, statisticsStore, sanitizedQueryOptions.getAuthorizations()); final BaseInsertionIdQuery q = new BaseInsertionIdQuery<>( adapter, index, query, (ScanCallback) sanitizedQueryOptions.getScanCallback(), filter, differingVisibilityCounts, visibilityCounts, DataIndexUtils.getDataIndexRetrieval( baseOperations, adapterStore, indexMappingStore, internalAdapterStore, index, sanitizedQueryOptions.getFieldIdsAdapterPair(), sanitizedQueryOptions.getAggregation(), sanitizedQueryOptions.getAuthorizations(), baseOptions.getDataIndexBatchSize()), sanitizedQueryOptions.getAuthorizations()); return q.query( baseOperations, baseOptions, tempAdapterStore, indexMappingStore, internalAdapterStore, sanitizedQueryOptions.getMaxResolutionSubsamplingPerDimension(), sanitizedQueryOptions.getTargetResolutionPerDimensionForHierarchicalIndex(), sanitizedQueryOptions.getLimit(), sanitizedQueryOptions.getMaxRangeDecomposition(), delete); } protected Writer createDataIndexWriter( final InternalDataAdapter adapter, final AdapterToIndexMapping indexMapping, final VisibilityHandler visibilityHandler, final DataStoreOperations baseOperations, final DataStoreOptions baseOptions, final IngestCallback callback, final Closeable closable) { return new BaseDataIndexWriter<>( adapter, indexMapping, visibilityHandler, baseOperations, baseOptions, callback, closable); } protected Writer createIndexWriter( final InternalDataAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index, final VisibilityHandler visibilityHandler, final DataStoreOperations baseOperations, final DataStoreOptions baseOptions, final IngestCallback callback, final Closeable closable) { return new BaseIndexWriter<>( adapter, indexMapping, index, visibilityHandler, baseOperations, baseOptions, callback, closable); } protected void initOnIndexWriterCreate( final InternalDataAdapter adapter, final Index index) {} @Override public DataTypeAdapter getType(final String typeName) { final InternalDataAdapter internalDataAdapter = getInternalAdapter(typeName); if (internalDataAdapter == null) { return null; } return internalDataAdapter.getAdapter(); } private InternalDataAdapter getInternalAdapter(final String typeName) { final Short internalAdapterId = internalAdapterStore.getAdapterId(typeName); if (internalAdapterId == null) { return null; } return adapterStore.getAdapter(internalAdapterId); } /** * Get all the adapters that have been used within this data store * * @return An array of the adapters used within this datastore. */ @Override public DataTypeAdapter[] getTypes() { final InternalDataAdapter[] adapters = adapterStore.getAdapters(); return Arrays.stream(adapters).map(InternalDataAdapter::getAdapter).toArray( DataTypeAdapter[]::new); } @Override public void addIndex(final Index index) { store(index); } @Override public Index[] getIndices() { return getIndices(null); } @Override public Index getIndex(final String indexName) { return indexStore.getIndex(indexName); } @Override public Index[] getIndices(final String typeName) { if (typeName == null) { final List indexList = new ArrayList<>(); try (CloseableIterator indexIt = indexStore.getIndices()) { while (indexIt.hasNext()) { indexList.add(indexIt.next()); } return indexList.toArray(new Index[0]); } } final Short internalAdapterId = internalAdapterStore.getAdapterId(typeName); if (internalAdapterId == null) { LOGGER.warn("Unable to find adapter '" + typeName + "' for indices"); return new Index[0]; } final AdapterToIndexMapping[] indices = indexMappingStore.getIndicesForAdapter(internalAdapterId); return Arrays.stream(indices).map(indexMapping -> indexMapping.getIndex(indexStore)).toArray( Index[]::new); } @Override public void addIndex(final String typeName, final Index... indices) { if (indices.length == 0) { LOGGER.warn("At least one index must be provided."); return; } final Short adapterId = internalAdapterStore.getAdapterId(typeName); if (adapterId == null) { LOGGER.warn( "DataTypeAdapter does not exist for type '" + typeName + "'. Add it using addType() and then add the indices again."); return; } else { final InternalDataAdapter adapter = adapterStore.getAdapter(adapterId); if (adapter == null) { LOGGER.warn( "DataTypeAdapter is undefined for type '" + typeName + "'. Add it using addType() and then add the indices again."); return; } final AdapterToIndexMapping[] existingMappings = indexMappingStore.getIndicesForAdapter(adapterId); if ((existingMappings != null) && (existingMappings.length > 0)) { // reduce the provided indices to only those that don't already // exist final Set indexNames = Arrays.stream(existingMappings).map(AdapterToIndexMapping::getIndexName).collect( Collectors.toSet()); final Index[] newIndices = Arrays.stream(indices).filter(i -> !indexNames.contains(i.getName())).toArray( size -> new Index[size]); if (newIndices.length > 0) { internalAddIndices(adapter, newIndices); try (Writer writer = createWriter(adapter, adapter.getVisibilityHandler(), false, newIndices)) { try ( // TODO what about authorizations final CloseableIterator it = query(QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).build())) { while (it.hasNext()) { writer.write(it.next()); } } } } else if (LOGGER.isInfoEnabled()) { LOGGER.info("Indices " + ArrayUtils.toString(indices) + " already added."); } } else { internalAddIndices(adapter, indices); } } } private void internalAddIndices(final InternalDataAdapter adapter, final Index[] indices) { for (final Index index : indices) { indexMappingStore.addAdapterIndexMapping( BaseDataStoreUtils.mapAdapterToIndex(adapter, index)); store(index); initOnIndexWriterCreate(adapter, index); } } @Override public void addType(final DataTypeAdapter dataTypeAdapter, final Index... initialIndices) { addTypeInternal(dataTypeAdapter, null, initialIndices); } @Override public void addType( final DataTypeAdapter dataTypeAdapter, final List> statistics, final Index... initialIndices) { addType(dataTypeAdapter, null, statistics, initialIndices); } @Override public void addType( final DataTypeAdapter dataTypeAdapter, final VisibilityHandler visibilityHandler, final List> statistics, final Index... initialIndices) { if (addTypeInternal(dataTypeAdapter, visibilityHandler, initialIndices)) { statistics.stream().forEach(stat -> statisticsStore.addStatistic(stat)); } } protected boolean addTypeInternal( final DataTypeAdapter dataTypeAdapter, final VisibilityHandler visibilityHandler, final Index... initialIndices) { // add internal adapter final InternalDataAdapter adapter = dataTypeAdapter.asInternalAdapter( internalAdapterStore.addTypeName(dataTypeAdapter.getTypeName()), visibilityHandler); final boolean newAdapter = !adapterStore.adapterExists(adapter.getAdapterId()); final Index[] initialIndicesUnique = Arrays.stream(initialIndices).distinct().toArray(size -> new Index[size]); internalAddIndices(adapter, initialIndicesUnique); store(adapter); return newAdapter; } /** Returns an index writer to perform batched write operations for the given typename */ @Override public Writer createWriter(final String typeName) { return createWriter(typeName, null); } /** Returns an index writer to perform batched write operations for the given typename */ @Override public Writer createWriter( final String typeName, final VisibilityHandler visibilityHandler) { final Short adapterId = internalAdapterStore.getAdapterId(typeName); if (adapterId == null) { LOGGER.warn( "DataTypeAdapter does not exist for type '" + typeName + "'. Add it using addType()."); return null; } final InternalDataAdapter adapter = (InternalDataAdapter) adapterStore.getAdapter(adapterId); if (adapter == null) { LOGGER.warn( "DataTypeAdapter is undefined for type '" + typeName + "'. Add it using addType()."); return null; } final AdapterToIndexMapping[] mappings = indexMappingStore.getIndicesForAdapter(adapterId); if ((mappings.length == 0) && !baseOptions.isSecondaryIndexing()) { LOGGER.warn( "No indices for type '" + typeName + "'. Add indices using addIndex(, )."); return null; } final Index[] indices = Arrays.stream(mappings).map(mapping -> mapping.getIndex(indexStore)).toArray(Index[]::new); return createWriter(adapter, visibilityHandler, true, indices); } @Override public void ingest(final String inputPath, final Index... index) { ingest(inputPath, null, index); } @Override public void ingest( final String inputPath, final IngestOptions options, final Index... index) { // Driver final BaseDataStoreIngestDriver driver = new BaseDataStoreIngestDriver( this, options == null ? IngestOptions.newBuilder().build() : options, index); // Execute if (!driver.runOperation(inputPath, null)) { throw new RuntimeException("Ingest failed to execute"); } } @Override public

R aggregate(final AggregationQuery query) { if (query == null) { LOGGER.warn("Aggregation must be defined"); return null; } R results = null; final Aggregation aggregation = query.getDataTypeQueryOptions().getAggregation(); try (CloseableIterator resultsIt = internalQuery( query.getQueryConstraints(), new BaseQueryOptions(query, adapterStore, internalAdapterStore), DeletionMode.DONT_DELETE)) { while (resultsIt.hasNext()) { final R next = resultsIt.next(); if (results == null) { results = next; } else { results = aggregation.merge(results, next); } } } if (results == null) { aggregation.clearResult(); return aggregation.getResult(); } else { return results; } } @SuppressWarnings("unchecked") protected , R> CloseableIterator internalQueryStatistics( final StatisticQuery query) { final List> statistics = Lists.newLinkedList(); if (query instanceof IndexStatisticQuery) { final IndexStatisticQuery statQuery = (IndexStatisticQuery) query; if (statQuery.indexName() == null) { final Index[] indices = getIndices(); for (final Index index : indices) { getIndexStatistics(index, statQuery, statistics); } } else { final Index index = indexStore.getIndex(statQuery.indexName()); if (index != null) { getIndexStatistics(index, statQuery, statistics); } } } else if (query instanceof DataTypeStatisticQuery) { final DataTypeStatisticQuery statQuery = (DataTypeStatisticQuery) query; if (statQuery.typeName() == null) { final DataTypeAdapter[] adapters = getTypes(); for (final DataTypeAdapter adapter : adapters) { getAdapterStatistics(adapter, statQuery, statistics); } } else { final DataTypeAdapter adapter = getType(statQuery.typeName()); if (adapter != null) { getAdapterStatistics(adapter, statQuery, statistics); } } } else if (query instanceof FieldStatisticQuery) { final FieldStatisticQuery statQuery = (FieldStatisticQuery) query; if (statQuery.typeName() == null) { final DataTypeAdapter[] adapters = getTypes(); for (final DataTypeAdapter adapter : adapters) { getFieldStatistics(adapter, statQuery, statistics); } } else { final DataTypeAdapter adapter = getType(statQuery.typeName()); if (adapter != null) { getFieldStatistics(adapter, statQuery, statistics); } } } if (query.binConstraints() != null) { final List>> iterators = new ArrayList<>(); for (final Statistic stat : statistics) { if (stat.getBinningStrategy() != null) { final ByteArrayConstraints bins = query.binConstraints().constraints(stat); // we really don't need to check if the binning strategy supports the class considering // the binning strategy won't return bin constraints if it doesn't support the object if ((bins != null) && ((bins.getBins().length > 0) || bins.isAllBins())) { iterators.add( statisticsStore.getStatisticValues( statistics.iterator(), bins, query.authorizations())); } } } return (CloseableIterator) new CloseableIteratorWrapper<>( () -> iterators.forEach(CloseableIterator::close), Iterators.concat(iterators.iterator())); } else { return (CloseableIterator) statisticsStore.getStatisticValues( statistics.iterator(), null, query.authorizations()); } } @Override public , R> CloseableIterator queryStatistics( final StatisticQuery query) { return internalQueryStatistics(query); } @Override public , R> V aggregateStatistics(final StatisticQuery query) { if (query.statisticType() == null) { LOGGER.error("Statistic Type must be provided for a statistical aggregation"); return null; } try (CloseableIterator values = internalQueryStatistics(query)) { V value = null; while (values.hasNext()) { if (value == null) { value = values.next(); } else { value.merge(values.next()); } } return value; } } @SuppressWarnings("unchecked") private , R> void getIndexStatistics( final Index index, final IndexStatisticQuery query, final List> statistics) { try (CloseableIterator> statsIter = statisticsStore.getIndexStatistics(index, query.statisticType(), query.tag())) { while (statsIter.hasNext()) { statistics.add((Statistic) statsIter.next()); } } } @SuppressWarnings("unchecked") private , R> void getAdapterStatistics( final DataTypeAdapter adapter, final DataTypeStatisticQuery query, final List> statistics) { try (CloseableIterator> statsIter = statisticsStore.getDataTypeStatistics(adapter, query.statisticType(), query.tag())) { while (statsIter.hasNext()) { statistics.add((Statistic) statsIter.next()); } } } @SuppressWarnings("unchecked") private , R> void getFieldStatistics( final DataTypeAdapter adapter, final FieldStatisticQuery query, final List> statistics) { try (CloseableIterator> statsIter = statisticsStore.getFieldStatistics( adapter, query.statisticType(), query.fieldName(), query.tag())) { while (statsIter.hasNext()) { statistics.add((Statistic) statsIter.next()); } } } @Override public void copyTo(final DataStore other) { if (other instanceof BaseDataStore) { // if we have access to datastoreoperations for "other" we can more // efficiently copy underlying GeoWaveRow and GeoWaveMetadata for (final MetadataType metadataType : MetadataType.values()) { try (MetadataWriter writer = ((BaseDataStore) other).baseOperations.createMetadataWriter(metadataType)) { final MetadataReader reader = baseOperations.createMetadataReader(metadataType); try (CloseableIterator it = reader.query(new MetadataQuery())) { while (it.hasNext()) { writer.write(it.next()); } } } catch (final Exception e) { LOGGER.error("Unable to write metadata on copy", e); } } final InternalDataAdapter[] adapters = adapterStore.getAdapters(); for (final InternalDataAdapter adapter : adapters) { final AdapterToIndexMapping[] mappings = indexMappingStore.getIndicesForAdapter(adapter.getAdapterId()); for (final AdapterToIndexMapping mapping : mappings) { final Index index = mapping.getIndex(indexStore); final boolean rowMerging = BaseDataStoreUtils.isRowMerging(adapter); final ReaderParamsBuilder bldr = new ReaderParamsBuilder<>( index, adapterStore, indexMappingStore, internalAdapterStore, rowMerging ? new GeoWaveRowMergingTransform( BaseDataStoreUtils.getRowMergingAdapter(adapter), adapter.getAdapterId()) : GeoWaveRowIteratorTransformer.NO_OP_TRANSFORMER); bldr.adapterIds(new short[] {adapter.getAdapterId()}); bldr.isClientsideRowMerging(rowMerging); try (RowReader reader = baseOperations.createReader(bldr.build())) { try (RowWriter writer = ((BaseDataStore) other).baseOperations.createWriter(index, adapter)) { while (reader.hasNext()) { writer.write(reader.next()); } } } catch (final Exception e) { LOGGER.error("Unable to write metadata on copy", e); } } } } else { final DataTypeAdapter[] sourceTypes = getTypes(); // add all the types that the destination store doesn't have yet final DataTypeAdapter[] destTypes = other.getTypes(); for (int i = 0; i < sourceTypes.length; i++) { boolean found = false; for (int k = 0; k < destTypes.length; k++) { if (destTypes[k].getTypeName().compareTo(sourceTypes[i].getTypeName()) == 0) { found = true; break; } } if (!found) { other.addType(sourceTypes[i]); } } // add the indices for each type for (int i = 0; i < sourceTypes.length; i++) { final String typeName = sourceTypes[i].getTypeName(); final short adapterId = internalAdapterStore.getAdapterId(typeName); final AdapterToIndexMapping[] indexMappings = indexMappingStore.getIndicesForAdapter(adapterId); final Index[] indices = Arrays.stream(indexMappings).map(mapping -> mapping.getIndex(indexStore)).toArray( Index[]::new); other.addIndex(typeName, indices); final QueryBuilder qb = QueryBuilder.newBuilder().addTypeName(typeName); try (CloseableIterator it = query(qb.build())) { try (final Writer writer = other.createWriter(typeName)) { while (it.hasNext()) { writer.write(it.next()); } } } } } } @Override public void copyTo(final DataStore other, final Query query) { // check for 'everything' query if (query == null) { copyTo(other); return; } final String[] typeNames = query.getDataTypeQueryOptions().getTypeNames(); final String indexName = query.getIndexQueryOptions().getIndexName(); final boolean isAllIndices = query.getIndexQueryOptions().isAllIndices(); final List> typesToCopy; // if typeNames are not specified, then it means 'everything' as well if (((typeNames == null) || (typeNames.length == 0))) { if ((query.getQueryConstraints() == null) || (query.getQueryConstraints() instanceof EverythingQuery)) { copyTo(other); return; } else { typesToCopy = Arrays.asList(getTypes()); } } else { // make sure the types requested exist in the source store (this) // before trying to copy! final DataTypeAdapter[] sourceTypes = getTypes(); typesToCopy = new ArrayList<>(); for (int i = 0; i < typeNames.length; i++) { boolean found = false; for (int k = 0; k < sourceTypes.length; k++) { if (sourceTypes[k].getTypeName().compareTo(typeNames[i]) == 0) { found = true; typesToCopy.add(sourceTypes[k]); break; } } if (!found) { throw new IllegalArgumentException( "Some type names specified in the query do not exist in the source database and thus cannot be copied."); } } } // if there is an index requested in the query, make sure it exists in // the source store before trying to copy as well! final Index[] sourceIndices = getIndices(); Index indexToCopy = null; if (!isAllIndices) { // just add the one index specified by the query // first make sure source index exists though boolean found = false; for (int i = 0; i < sourceIndices.length; i++) { if (sourceIndices[i].getName().compareTo(indexName) == 0) { found = true; indexToCopy = sourceIndices[i]; break; } } if (!found) { throw new IllegalArgumentException( "The index specified in the query does not exist in the source database and thus cannot be copied."); } // also make sure the types/index mapping for the query are legit for (int i = 0; i < typeNames.length; i++) { final short adapterId = internalAdapterStore.getAdapterId(typeNames[i]); final AdapterToIndexMapping[] indexMappings = indexMappingStore.getIndicesForAdapter(adapterId); found = false; for (int k = 0; k < indexMappings.length; k++) { if (indexMappings[k].getIndexName().compareTo(indexName) == 0) { found = true; break; } } if (!found) { throw new IllegalArgumentException( "The index " + indexName + " and the type " + typeNames[i] + " specified by the query are not associated in the source database"); } } } // add all the types that the destination store doesn't have yet final DataTypeAdapter[] destTypes = other.getTypes(); for (int i = 0; i < typesToCopy.size(); i++) { boolean found = false; for (int k = 0; k < destTypes.length; k++) { if (destTypes[k].getTypeName().compareTo(typesToCopy.get(i).getTypeName()) == 0) { found = true; break; } } if (!found) { other.addType(typesToCopy.get(i)); } } // add all the indices that the destination store doesn't have yet if (isAllIndices) { // in this case, all indices from the types requested by the query for (int i = 0; i < typesToCopy.size(); i++) { final String typeName = typesToCopy.get(i).getTypeName(); final short adapterId = internalAdapterStore.getAdapterId(typeName); final AdapterToIndexMapping[] indexMappings = indexMappingStore.getIndicesForAdapter(adapterId); final Index[] indices = Arrays.stream(indexMappings).map(mapping -> mapping.getIndex(indexStore)).toArray( Index[]::new); other.addIndex(typeName, indices); final QueryBuilder qb = QueryBuilder.newBuilder().addTypeName(typeName).constraints( query.getQueryConstraints()); try (CloseableIterator it = query(qb.build())) { try (Writer writer = other.createWriter(typeName)) { while (it.hasNext()) { writer.write(it.next()); } } } } } else { // otherwise, add just the one index to the types specified by the // query for (int i = 0; i < typesToCopy.size(); i++) { other.addIndex(typesToCopy.get(i).getTypeName(), indexToCopy); } // Write out / copy the data. We must do this on a per-type basis so // we can write appropriately for (int k = 0; k < typesToCopy.size(); k++) { final InternalDataAdapter adapter = adapterStore.getAdapter( internalAdapterStore.getAdapterId(typesToCopy.get(k).getTypeName())); final QueryBuilder qb = QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName( indexToCopy.getName()).constraints(query.getQueryConstraints()); try (CloseableIterator it = query(qb.build())) { try (Writer writer = other.createWriter(adapter.getTypeName())) { while (it.hasNext()) { writer.write(it.next()); } } } } } } @Override public void removeIndex(final String indexName) { // remove the given index for all types // this is a little convoluted and requires iterating over all the // adapters, getting each adapter's index map, checking if the index is // there, and // then mark it for removal from both the map and from the index store. // If this index is the only index remaining for a given type, then we // need // to throw an exception first (no deletion will occur). final Index index = indexStore.getIndex(indexName); if (index == null) { LOGGER.warn("Unable to remove index '" + indexName + "' because it was not found."); return; } final ArrayList markedAdapters = new ArrayList<>(); final InternalDataAdapter[] adapters = adapterStore.getAdapters(); for (final InternalDataAdapter dataAdapter : adapters) { final AdapterToIndexMapping[] indexMappings = indexMappingStore.getIndicesForAdapter(dataAdapter.getAdapterId()); for (int i = 0; i < indexMappings.length; i++) { if (indexMappings[i].getIndexName().equals(indexName) && !baseOptions.isSecondaryIndexing()) { // check if it is the only index for the current adapter if (indexMappings.length == 1) { throw new IllegalStateException( "Index removal failed. Adapters require at least one index."); } else { // mark the index for removal and continue looking // for // others markedAdapters.add(dataAdapter.getAdapterId()); continue; } } } } // take out the index from the data statistics, and mapping for (int i = 0; i < markedAdapters.size(); i++) { final short adapterId = markedAdapters.get(i); baseOperations.deleteAll(indexName, internalAdapterStore.getTypeName(adapterId), adapterId); indexMappingStore.remove(adapterId, indexName); } statisticsStore.removeStatistics(index); // remove the actual index indexStore.removeIndex(indexName); } @Override public void removeIndex(final String typeName, final String indexName) throws IllegalStateException { // First make sure the adapter exists and that this is not the only // index left for the given adapter. If it is, we should throw an // exception. final short adapterId = internalAdapterStore.getAdapterId(typeName); final AdapterToIndexMapping[] indexMappings = indexMappingStore.getIndicesForAdapter(adapterId); if (indexMappings.length == 0) { throw new IllegalArgumentException( "No adapter with typeName " + typeName + "could be found."); } if ((indexMappings.length == 1) && !baseOptions.isSecondaryIndexing()) { throw new IllegalStateException("Index removal failed. Adapters require at least one index."); } // Remove all the data for the adapter and index baseOperations.deleteAll(indexName, typeName, adapterId); // If this is the last adapter/type associated with the index, then we // can remove the actual index too. final Short[] adapters = getAdaptersForIndex(indexName); if (adapters.length == 1) { indexStore.removeIndex(indexName); } else { try (CloseableIterator> iter = statisticsStore.getIndexStatistics(getIndex(indexName), null, null)) { while (iter.hasNext()) { statisticsStore.removeTypeSpecificStatisticValues( (IndexStatistic) iter.next(), typeName); } } } // Finally, remove the mapping indexMappingStore.remove(adapterId, indexName); } @Override public void removeType(final String typeName) { // Removing a type requires removing the data associated with the type, // the index mapping for the type, and we also need to remove stats for // the type. final Short adapterId = internalAdapterStore.getAdapterId(typeName); if (adapterId != null) { final AdapterToIndexMapping[] indexMappings = indexMappingStore.getIndicesForAdapter(adapterId); // remove all the data for each index paired to this adapter for (int i = 0; i < indexMappings.length; i++) { baseOperations.deleteAll(indexMappings[i].getIndexName(), typeName, adapterId); } if (baseOptions.isSecondaryIndexing()) { baseOperations.deleteAll(DataIndexUtils.DATA_ID_INDEX.getName(), typeName, adapterId); } statisticsStore.removeStatistics(adapterStore.getAdapter(adapterId)); indexMappingStore.remove(adapterId); internalAdapterStore.remove(adapterId); adapterStore.removeAdapter(adapterId); } } @Override public void deleteAll() { deleteEverything(); } public IndexStore getIndexStore() { return indexStore; } public PersistentAdapterStore getAdapterStore() { return adapterStore; } public AdapterIndexMappingStore getIndexMappingStore() { return indexMappingStore; } public DataStoreOperations getBaseOperations() { return baseOperations; } public InternalAdapterStore getInternalAdapterStore() { return internalAdapterStore; } public boolean isReverseIterationSupported() { return false; } private void addStatistics( final Statistic>[] statistics, final boolean calculateStats) { if ((statistics == null) || (statistics.length == 0)) { return; } // grouping stats is separated from calculating stats primarily because regardless of whether // stats are calculated they should be validated before adding them to the statistics store final Pair>>, Map, List>>>> groupedStats = groupAndValidateStats(statistics, false); final Map>> indexStatsToAdd = groupedStats.getLeft(); final Map, List>>> otherStatsToAdd = groupedStats.getRight(); for (final List> indexStats : indexStatsToAdd.values()) { indexStats.forEach(indexStat -> statisticsStore.addStatistic(indexStat)); } for (final List>> otherStats : otherStatsToAdd.values()) { otherStats.forEach(statistic -> statisticsStore.addStatistic(statistic)); } if (calculateStats) { calcStats(indexStatsToAdd, otherStatsToAdd); } } private Pair>>, Map, List>>>> groupAndValidateStats( final Statistic>[] statistics, final boolean allowExisting) { final Map>> indexStatsToAdd = Maps.newHashMap(); final Map, List>>> otherStatsToAdd = Maps.newHashMap(); for (final Statistic> statistic : statistics) { if (!allowExisting && statisticsStore.exists(statistic)) { throw new IllegalArgumentException( "The statistic already exists. If adding it is still desirable, use a 'tag' to make the statistic unique."); } if (statistic instanceof IndexStatistic) { final IndexStatistic indexStat = (IndexStatistic) statistic; if (indexStat.getIndexName() == null) { throw new IllegalArgumentException("No index specified."); } final Index index = indexStore.getIndex(indexStat.getIndexName()); if (index == null) { throw new IllegalArgumentException("No index named " + indexStat.getIndexName() + "."); } if (!indexStatsToAdd.containsKey(index)) { indexStatsToAdd.put(index, Lists.newArrayList()); } indexStatsToAdd.get(index).add(indexStat); } else if (statistic instanceof DataTypeStatistic) { final DataTypeStatistic adapterStat = (DataTypeStatistic) statistic; if (adapterStat.getTypeName() == null) { throw new IllegalArgumentException("No type specified."); } final InternalDataAdapter adapter = getInternalAdapter(adapterStat.getTypeName()); if (adapter == null) { throw new IllegalArgumentException("No type named " + adapterStat.getTypeName() + "."); } if (!otherStatsToAdd.containsKey(adapter)) { otherStatsToAdd.put(adapter, Lists.newArrayList()); } otherStatsToAdd.get(adapter).add(adapterStat); } else if (statistic instanceof FieldStatistic) { final FieldStatistic fieldStat = (FieldStatistic) statistic; if (fieldStat.getTypeName() == null) { throw new IllegalArgumentException("No type specified."); } final InternalDataAdapter adapter = getInternalAdapter(fieldStat.getTypeName()); if (adapter == null) { throw new IllegalArgumentException("No type named " + fieldStat.getTypeName() + "."); } if (fieldStat.getFieldName() == null) { throw new IllegalArgumentException("No field specified."); } boolean foundMatch = false; final FieldDescriptor[] fields = adapter.getFieldDescriptors(); for (int i = 0; i < fields.length; i++) { if (fieldStat.getFieldName().equals(fields[i].fieldName())) { foundMatch = true; break; } } if (!foundMatch) { throw new IllegalArgumentException( "No field named " + fieldStat.getFieldName() + " was found on the type " + fieldStat.getTypeName() + "."); } if (!otherStatsToAdd.containsKey(adapter)) { otherStatsToAdd.put(adapter, Lists.newArrayList()); } otherStatsToAdd.get(adapter).add(fieldStat); } else { throw new IllegalArgumentException("Unrecognized statistic type."); } } return Pair.of(indexStatsToAdd, otherStatsToAdd); } @SuppressWarnings("unchecked") private void calcStats( final Map>> indexStatsToAdd, final Map, List>>> otherStatsToAdd) { for (final Entry>> indexStats : indexStatsToAdd.entrySet()) { final Index index = indexStats.getKey(); final ArrayList indexAdapters = new ArrayList<>(); final InternalDataAdapter[] adapters = adapterStore.getAdapters(); for (final InternalDataAdapter dataAdapter : adapters) { final AdapterToIndexMapping[] adapterIndexMap = indexMappingStore.getIndicesForAdapter(dataAdapter.getAdapterId()); for (int i = 0; i < adapterIndexMap.length; i++) { if (adapterIndexMap[i].getIndexName().equals(index.getName())) { indexAdapters.add(adapterIndexMap[i].getAdapterId()); break; } } } // Scan all adapters used on the index for (int i = 0; i < indexAdapters.size(); i++) { final short adapterId = indexAdapters.get(i); final InternalDataAdapter adapter = adapterStore.getAdapter(adapterId); final Query query = QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName( index.getName()).build(); final List>> statsToUpdate = Lists.newArrayList(indexStats.getValue()); if (otherStatsToAdd.containsKey(adapter)) { statsToUpdate.addAll(otherStatsToAdd.get(adapter)); // Adapter-specific stats only need to be computed once, so remove them once they've // been processed otherStatsToAdd.remove(adapter); } final AdapterToIndexMapping indexMapping = indexMappingStore.getMapping(adapterId, index.getName()); try (StatisticUpdateCallback updateCallback = new StatisticUpdateCallback<>( statsToUpdate, statisticsStore, index, indexMapping, adapter)) { try (CloseableIterator entryIt = this.query(query, (ScanCallback) updateCallback)) { while (entryIt.hasNext()) { entryIt.next(); } } } } } for (final Entry, List>>> otherStats : otherStatsToAdd.entrySet()) { final InternalDataAdapter adapter = otherStats.getKey(); final String typeName = adapter.getTypeName(); final Index[] indices = getIndices(typeName); if (indices.length == 0) { // If there are no indices, then there is nothing to calculate. return; } final Query query = QueryBuilder.newBuilder().addTypeName(typeName).indexName(indices[0].getName()).build(); final AdapterToIndexMapping indexMapping = indexMappingStore.getMapping(adapter.getAdapterId(), indices[0].getName()); try (StatisticUpdateCallback updateCallback = new StatisticUpdateCallback<>( otherStats.getValue(), statisticsStore, indices[0], indexMapping, adapter)) { try (CloseableIterator entryIt = this.query(query, (ScanCallback) updateCallback)) { while (entryIt.hasNext()) { entryIt.next(); } } } } } @Override public void removeStatistic(final Statistic... statistic) { final boolean removed = statisticsStore.removeStatistics(Arrays.asList(statistic).iterator()); if (!removed) { throw new IllegalArgumentException( "Statistic could not be removed because it was not found."); } } @Override public void addEmptyStatistic(final Statistic... statistic) { addStatistics(statistic, false); } @Override public void addStatistic(final Statistic... statistic) { addStatistics(statistic, true); } @Override public void recalcStatistic(final Statistic... statistic) { for (final Statistic stat : statistic) { if (!statisticsStore.exists(stat)) { throw new IllegalArgumentException("The statistic " + stat.toString() + " doesn't exist."); } } final Pair>>, Map, List>>>> groupedStats = groupAndValidateStats(statistic, true); // Remove old statistic values for (final Statistic stat : statistic) { statisticsStore.removeStatisticValues(stat); } calcStats(groupedStats.getLeft(), groupedStats.getRight()); } @Override public DataTypeStatistic[] getDataTypeStatistics(final String typeName) { final Short adapterId = internalAdapterStore.getAdapterId(typeName); if (adapterId == null) { throw new IllegalArgumentException(typeName + " doesn't exist"); } final List> retVal = new ArrayList<>(); try (CloseableIterator> it = statisticsStore.getDataTypeStatistics(adapterStore.getAdapter(adapterId), null, null)) { while (it.hasNext()) { retVal.add(it.next()); } } return retVal.toArray(new DataTypeStatistic[retVal.size()]); } @SuppressWarnings("unchecked") @Override public , R> DataTypeStatistic getDataTypeStatistic( final StatisticType statisticType, final String typeName, final String tag) { if (!(statisticType instanceof DataTypeStatisticType)) { throw new IllegalArgumentException("Statistic type must be a data type statistic."); } final Short adapterId = internalAdapterStore.getAdapterId(typeName); if (adapterId == null) { throw new IllegalArgumentException(typeName + " doesn't exist"); } final InternalDataAdapter adapter = adapterStore.getAdapter(adapterId); if (adapter == null) { throw new IllegalArgumentException(typeName + " is null"); } DataTypeStatistic retVal = null; if (tag == null) { retVal = internalGetDataTypeStatistic( (DataTypeStatisticType) statisticType, adapter, Statistic.DEFAULT_TAG); if (retVal == null) { retVal = internalGetDataTypeStatistic( (DataTypeStatisticType) statisticType, adapter, Statistic.INTERNAL_TAG); } if (retVal == null) { try (CloseableIterator> iter = (CloseableIterator>) statisticsStore.getDataTypeStatistics( adapter, statisticType, null)) { if (iter.hasNext()) { retVal = iter.next(); if (iter.hasNext()) { throw new IllegalArgumentException( "Multiple statistics with different tags were found. A tag must be specified."); } } } } } else { retVal = internalGetDataTypeStatistic((DataTypeStatisticType) statisticType, adapter, tag); } return retVal; } private , R> DataTypeStatistic internalGetDataTypeStatistic( final DataTypeStatisticType statisticType, final DataTypeAdapter adapter, final String tag) { final StatisticId statId = DataTypeStatistic.generateStatisticId(adapter.getTypeName(), statisticType, tag); return (DataTypeStatistic) statisticsStore.getStatisticById(statId); } @Override public IndexStatistic[] getIndexStatistics(final String indexName) { final Index index = getIndex(indexName); if (index == null) { throw new IllegalArgumentException(indexName + " doesn't exist"); } final List> retVal = new ArrayList<>(); try (CloseableIterator> it = statisticsStore.getIndexStatistics(index, null, null)) { while (it.hasNext()) { retVal.add(it.next()); } } return retVal.toArray(new IndexStatistic[retVal.size()]); } @SuppressWarnings("unchecked") @Override public , R> IndexStatistic getIndexStatistic( final StatisticType statisticType, final String indexName, final String tag) { if (!(statisticType instanceof IndexStatisticType)) { throw new IllegalArgumentException("Statistic type must be an index statistic."); } final Index index = getIndex(indexName); if (index == null) { throw new IllegalArgumentException(indexName + " doesn't exist"); } IndexStatistic retVal = null; if (tag == null) { retVal = internalGetIndexStatistic( (IndexStatisticType) statisticType, index, Statistic.DEFAULT_TAG); if (retVal == null) { retVal = internalGetIndexStatistic( (IndexStatisticType) statisticType, index, Statistic.INTERNAL_TAG); } if (retVal == null) { try (CloseableIterator> iter = (CloseableIterator>) statisticsStore.getIndexStatistics( index, statisticType, null)) { if (iter.hasNext()) { retVal = iter.next(); if (iter.hasNext()) { throw new IllegalArgumentException( "Multiple statistics with different tags were found. A tag must be specified."); } } } } } else { retVal = internalGetIndexStatistic((IndexStatisticType) statisticType, index, tag); } return retVal; } private , R> IndexStatistic internalGetIndexStatistic( final IndexStatisticType statisticType, final Index index, final String tag) { final StatisticId statId = IndexStatistic.generateStatisticId(index.getName(), statisticType, tag); return (IndexStatistic) statisticsStore.getStatisticById(statId); } @Override public FieldStatistic[] getFieldStatistics(final String typeName, final String fieldName) { final Short adapterId = internalAdapterStore.getAdapterId(typeName); if (adapterId == null) { throw new IllegalArgumentException(typeName + " doesn't exist"); } final List> retVal = new ArrayList<>(); try (CloseableIterator> it = statisticsStore.getFieldStatistics( adapterStore.getAdapter(adapterId), null, fieldName, null)) { while (it.hasNext()) { retVal.add(it.next()); } } return retVal.toArray(new FieldStatistic[retVal.size()]); } @SuppressWarnings("unchecked") @Override public , R> FieldStatistic getFieldStatistic( final StatisticType statisticType, final String typeName, final String fieldName, final String tag) { if (!(statisticType instanceof FieldStatisticType)) { throw new IllegalArgumentException("Statistic type must be a field statistic."); } final Short adapterId = internalAdapterStore.getAdapterId(typeName); if (adapterId == null) { throw new IllegalArgumentException(typeName + " doesn't exist"); } final InternalDataAdapter adapter = adapterStore.getAdapter(adapterId); if (adapter == null) { throw new IllegalArgumentException(typeName + " is null"); } FieldStatistic retVal = null; if (tag == null) { retVal = internalGetFieldStatistic( (FieldStatisticType) statisticType, adapter, fieldName, Statistic.DEFAULT_TAG); if (retVal == null) { retVal = internalGetFieldStatistic( (FieldStatisticType) statisticType, adapter, fieldName, Statistic.INTERNAL_TAG); } if (retVal == null) { try (CloseableIterator> iter = (CloseableIterator>) statisticsStore.getFieldStatistics( adapter, statisticType, fieldName, null)) { if (iter.hasNext()) { retVal = iter.next(); if (iter.hasNext()) { throw new IllegalArgumentException( "Multiple statistics with different tags were found. A tag must be specified."); } } } } } else { retVal = internalGetFieldStatistic((FieldStatisticType) statisticType, adapter, fieldName, tag); } return retVal; } private , R> FieldStatistic internalGetFieldStatistic( final FieldStatisticType statisticType, final DataTypeAdapter adapter, final String fieldName, final String tag) { final StatisticId statId = FieldStatistic.generateStatisticId(adapter.getTypeName(), statisticType, fieldName, tag); return (FieldStatistic) statisticsStore.getStatisticById(statId); } @SuppressWarnings("unchecked") @Override public , R> R getStatisticValue( final Statistic stat, BinConstraints binConstraints) { if (stat == null) { throw new IllegalArgumentException("Statistic must be non-null"); } if (binConstraints == null) { LOGGER.warn("Constraints are null, assuming all bins should match."); binConstraints = BinConstraints.allBins(); } try (CloseableIterator values = (CloseableIterator) statisticsStore.getStatisticValues( Iterators.forArray(stat), binConstraints.constraints(stat))) { final V value = stat.createEmpty(); while (values.hasNext()) { value.merge(values.next()); } return value.getValue(); } } @SuppressWarnings("unchecked") @Override public , R> CloseableIterator> getBinnedStatisticValues( final Statistic stat, BinConstraints binConstraints) { if (stat == null) { throw new IllegalArgumentException("Statistic must be non-null"); } if (binConstraints == null) { LOGGER.warn("Constraints are null, assuming all bins should match."); binConstraints = BinConstraints.allBins(); } final CloseableIterator values = (CloseableIterator) statisticsStore.getStatisticValues( Iterators.forArray(stat), binConstraints.constraints(stat)); return new CloseableIteratorWrapper<>( values, Iterators.transform(values, (v) -> Pair.of(v.getBin(), v.getValue()))); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/base/BaseDataStoreUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.base; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Optional; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; import java.util.function.BiFunction; import java.util.function.Function; import java.util.stream.Collectors; import javax.annotation.Nullable; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; import org.locationtech.geowave.core.index.CustomIndexStrategy; import org.locationtech.geowave.core.index.IndexDimensionHint; import org.locationtech.geowave.core.index.IndexUtils; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.CloseableIterator.Wrapper; import org.locationtech.geowave.core.store.DataStoreProperty; import org.locationtech.geowave.core.store.PropertyStore; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.AdapterPersistenceEncoding; import org.locationtech.geowave.core.store.adapter.AsyncPersistenceEncoding; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.adapter.FullAsyncPersistenceEncoding; import org.locationtech.geowave.core.store.adapter.IndexedAdapterPersistenceEncoding; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.LazyReadPersistenceEncoding; import org.locationtech.geowave.core.store.adapter.PartialAsyncPersistenceEncoding; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter; import org.locationtech.geowave.core.store.adapter.TransientAdapterStore; import org.locationtech.geowave.core.store.adapter.exceptions.AdapterException; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.api.AttributeIndex; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.IndexFieldMapper; import org.locationtech.geowave.core.store.api.IndexFieldMapper.IndexFieldOptions; import org.locationtech.geowave.core.store.api.VisibilityHandler; import org.locationtech.geowave.core.store.base.IntermediaryWriteEntryInfo.FieldInfo; import org.locationtech.geowave.core.store.base.dataidx.BatchDataIndexRetrieval; import org.locationtech.geowave.core.store.base.dataidx.DataIndexRetrieval; import org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils; import org.locationtech.geowave.core.store.callback.ScanCallback; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.data.DataWriter; import org.locationtech.geowave.core.store.data.field.FieldWriter; import org.locationtech.geowave.core.store.data.visibility.VisibilityComposer; import org.locationtech.geowave.core.store.dimension.NumericDimensionField; import org.locationtech.geowave.core.store.entities.GeoWaveMetadata; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.entities.GeoWaveValue; import org.locationtech.geowave.core.store.entities.GeoWaveValueImpl; import org.locationtech.geowave.core.store.flatten.BitmaskUtils; import org.locationtech.geowave.core.store.flatten.BitmaskedPairComparator; import org.locationtech.geowave.core.store.index.CommonIndexModel; import org.locationtech.geowave.core.store.index.IndexFieldMapperRegistry; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.operations.DataStoreOperations; import org.locationtech.geowave.core.store.operations.MetadataQuery; import org.locationtech.geowave.core.store.operations.MetadataReader; import org.locationtech.geowave.core.store.operations.MetadataType; import org.locationtech.geowave.core.store.query.aggregate.CommonIndexAggregation; import org.locationtech.geowave.core.store.query.constraints.AdapterAndIndexBasedQueryConstraints; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.geowave.core.store.query.filter.QueryFilter; import org.locationtech.geowave.core.store.statistics.DefaultStatisticsProvider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.ParameterException; import com.beust.jcommander.internal.Maps; import com.google.common.base.Suppliers; import com.google.common.collect.Collections2; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import com.google.common.collect.Sets; public class BaseDataStoreUtils { private static final Logger LOGGER = LoggerFactory.getLogger(BaseDataStoreUtils.class); public static final String DATA_VERSION_PROPERTY = "DATA_VERSION"; public static final String GLOBAL_VISIBILITY_PROPERTY = "GLOBAL_VISIBILITY"; public static final Integer DATA_VERSION = 1; public static void verifyCLIVersion( final String storeName, final DataStorePluginOptions options) { final DataStoreOperations operations = options.createDataStoreOperations(); final PropertyStore propertyStore = options.createPropertyStore(); final DataStoreProperty storeVersionProperty = propertyStore.getProperty(DATA_VERSION_PROPERTY); if ((storeVersionProperty == null) && !hasMetadata(operations, MetadataType.ADAPTER) && !hasMetadata(operations, MetadataType.INDEX)) { // Nothing has been loaded into the store yet return; } final int storeVersion = storeVersionProperty == null ? 0 : (int) storeVersionProperty.getValue(); if (storeVersion < DATA_VERSION) { throw new ParameterException( "The data store '" + storeName + "' is using an older serialization format. Either use an older " + "version of the CLI that is compatible with the data store, or migrate the data " + "store to a later version using the `geowave util migrate` command."); } else if (storeVersion > DATA_VERSION) { throw new ParameterException( "The data store '" + storeName + "' is using a newer serialization format. Please update to a " + "newer version of the CLI that is compatible with the data store."); } } public static boolean hasMetadata( final DataStoreOperations operations, final MetadataType metadataType) { try { if (!operations.metadataExists(metadataType)) { return false; } } catch (final IOException e) { LOGGER.warn("Error while checking existence of metadata table", e); } final MetadataReader reader = operations.createMetadataReader(metadataType); try (CloseableIterator it = reader.query(new MetadataQuery(null))) { return it.hasNext(); } } public static GeoWaveRow[] getGeoWaveRows( final T entry, final InternalDataAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index, final VisibilityHandler visibilityHandler) { return getWriteInfo( entry, adapter, indexMapping, index, visibilityHandler, false, false, true).getRows(); } public static CloseableIterator aggregate( final CloseableIterator it, final Aggregation aggregationFunction, final DataTypeAdapter adapter) { if ((it != null) && it.hasNext()) { try { synchronized (aggregationFunction) { aggregationFunction.clearResult(); while (it.hasNext()) { final Object input = it.next(); if (input != null) { aggregationFunction.aggregate(adapter, input); } } } } finally { it.close(); } return new Wrapper<>(Iterators.singletonIterator(aggregationFunction.getResult())); } return new CloseableIterator.Empty<>(); } /** * Basic method that decodes a native row Currently overridden by Accumulo and HBase; Unification * in progress * *

Override this method if you can't pass in a GeoWaveRow! * * @throws AdapterException */ public static Object decodeRow( final GeoWaveRow geowaveRow, final QueryFilter[] clientFilters, final InternalDataAdapter adapter, final AdapterToIndexMapping indexMapping, final PersistentAdapterStore adapterStore, final AdapterIndexMappingStore mappingStore, final Index index, final ScanCallback scanCallback, final byte[] fieldSubsetBitmask, final boolean decodeRow, final DataIndexRetrieval dataIndexRetrieval) throws AdapterException { final short internalAdapterId = geowaveRow.getAdapterId(); if ((adapter == null) && (adapterStore == null)) { final String msg = "Could not decode row from iterator. Either adapter or adapter store must be non-null."; LOGGER.error(msg); throw new AdapterException(msg); } final IntermediaryReadEntryInfo decodePackage = new IntermediaryReadEntryInfo(index, decodeRow); if (!decodePackage.setOrRetrieveAdapter(adapter, internalAdapterId, adapterStore)) { final String msg = "Could not retrieve adapter " + internalAdapterId + " from adapter store."; LOGGER.error(msg); throw new AdapterException(msg); } if (!decodePackage.setOrRetrieveIndexMapping(indexMapping, internalAdapterId, mappingStore)) { final String msg = "Could not retrieve adapter index mapping for adapter " + internalAdapterId; LOGGER.error(msg); throw new AdapterException(msg); } // Verify the adapter matches the data if (!decodePackage.isAdapterVerified()) { if (!decodePackage.verifyAdapter(internalAdapterId)) { final String msg = "Adapter verify failed: adapter does not match data."; LOGGER.error(msg); throw new AdapterException(msg); } } return getDecodedRow( geowaveRow, decodePackage, fieldSubsetBitmask, clientFilters, scanCallback, decodePackage.adapterSupportsDataIndex() ? dataIndexRetrieval : null); } /** * build a persistence encoding object first, pass it through the client filters and if its * accepted, use the data adapter to decode the persistence model into the native data type */ private static Object getDecodedRow( final GeoWaveRow row, final IntermediaryReadEntryInfo decodePackage, final byte[] fieldSubsetBitmask, final QueryFilter[] clientFilters, final ScanCallback scanCallback, final DataIndexRetrieval dataIndexRetrieval) { final boolean isSecondaryIndex = dataIndexRetrieval != null; final IndexedAdapterPersistenceEncoding encodedRow; if (isSecondaryIndex) { // this implies its a Secondary Index and the actual values must be looked up if (dataIndexRetrieval instanceof BatchDataIndexRetrieval) { if (decodePackage.getIndex().getIndexModel().useInSecondaryIndex()) { encodedRow = new PartialAsyncPersistenceEncoding( decodePackage.getDataAdapter().getAdapterId(), row.getDataId(), row.getPartitionKey(), row.getSortKey(), row.getNumberOfDuplicates(), (BatchDataIndexRetrieval) dataIndexRetrieval, decodePackage.getDataAdapter(), decodePackage.getIndex().getIndexModel(), decodePackage.getIndexMapping(), fieldSubsetBitmask, Suppliers.memoize( () -> dataIndexRetrieval.getData( decodePackage.getDataAdapter().getAdapterId(), row.getDataId()))); } else { encodedRow = new FullAsyncPersistenceEncoding( decodePackage.getDataAdapter().getAdapterId(), row.getDataId(), row.getPartitionKey(), row.getSortKey(), row.getNumberOfDuplicates(), (BatchDataIndexRetrieval) dataIndexRetrieval); } } else { encodedRow = new LazyReadPersistenceEncoding( decodePackage.getDataAdapter().getAdapterId(), row.getDataId(), row.getPartitionKey(), row.getSortKey(), row.getNumberOfDuplicates(), decodePackage.getDataAdapter(), decodePackage.getIndex().getIndexModel(), decodePackage.getIndexMapping(), fieldSubsetBitmask, Suppliers.memoize( () -> dataIndexRetrieval.getData( decodePackage.getDataAdapter().getAdapterId(), row.getDataId()))); } } else { encodedRow = new LazyReadPersistenceEncoding( decodePackage.getDataAdapter().getAdapterId(), row.getDataId(), row.getPartitionKey(), row.getSortKey(), row.getNumberOfDuplicates(), decodePackage.getDataAdapter(), decodePackage.getIndex().getIndexModel(), decodePackage.getIndexMapping(), fieldSubsetBitmask, row.getFieldValues(), false); } final BiFunction function = ((r, initialFilter) -> { final int i = clientFilterProgress( clientFilters, decodePackage.getIndex().getIndexModel(), r, initialFilter); if (i < 0) { if (!decodePackage.isDecodeRow()) { return r; } final T decodedRow = decodePackage.getDataAdapter().decode( r, decodePackage.getIndexMapping(), isSecondaryIndex ? DataIndexUtils.DATA_ID_INDEX : decodePackage.getIndex()); if (r.isAsync()) { return i; } if ((scanCallback != null)) { scanCallback.entryScanned(decodedRow, row); } return decodedRow; } if (r.isAsync()) { return i; } return null; }); final Object obj = function.apply(encodedRow, 0); if ((obj instanceof Integer) && encodedRow.isAsync()) { // by re-applying the function, client filters should not be called multiple times for the // same instance (beware of stateful filters such as dedupe filter). this method attempts to // maintain progress of the filter chain so that any successful filters prior to retrieving // the data will not need to be repeated return (((AsyncPersistenceEncoding) encodedRow).getFieldValuesFuture().thenApply( fv -> new LazyReadPersistenceEncoding( decodePackage.getDataAdapter().getAdapterId(), row.getDataId(), row.getPartitionKey(), row.getSortKey(), row.getNumberOfDuplicates(), decodePackage.getDataAdapter(), decodePackage.getIndex().getIndexModel(), decodePackage.getIndexMapping(), fieldSubsetBitmask, fv, true))).thenApply((r) -> function.apply(r, (Integer) obj)); } return obj; } /** * * @return returns -1 if all client filters have accepted the row, otherwise returns how many * client filters have accepted */ private static int clientFilterProgress( final QueryFilter[] clientFilters, final CommonIndexModel indexModel, final IndexedAdapterPersistenceEncoding encodedRow, final int initialFilter) { if ((clientFilters == null) || (initialFilter < 0)) { return -1; } for (int i = initialFilter; i < clientFilters.length; i++) { if (!clientFilters[i].accept(indexModel, encodedRow)) { return i; } } return -1; } public static byte[] getFieldBitmask( final Pair> fieldIdsAdapterPair, final Index index) { if ((fieldIdsAdapterPair != null) && (fieldIdsAdapterPair.getLeft() != null)) { return BitmaskUtils.generateFieldSubsetBitmask( index.getIndexModel(), fieldIdsAdapterPair.getLeft(), fieldIdsAdapterPair.getRight()); } return null; } private static void addIndexFieldVisibility( final T entry, final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final VisibilityHandler visibilityHandler, final String indexField, final VisibilityComposer baseVisibility) { final String[] adapterFields = indexMapping.getMapperForIndexField(indexField).getAdapterFields(); for (final String adapterField : adapterFields) { final String adapterFieldVisibility = visibilityHandler.getVisibility(adapter, entry, adapterField); baseVisibility.addVisibility(adapterFieldVisibility); } } protected static IntermediaryWriteEntryInfo getWriteInfo( final T entry, final InternalDataAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index, final VisibilityHandler visibilityHandler, final boolean secondaryIndex, final boolean dataIdIndex, final boolean visibilityEnabled) { final CommonIndexModel indexModel = index.getIndexModel(); final short internalAdapterId = adapter.getAdapterId(); final byte[] dataId = adapter.getDataId(entry); final AdapterPersistenceEncoding encodedData = adapter.encode(entry, indexMapping, index); if (encodedData == null) { // The entry could not be encoded to the index, but this could be due to a null value in one // of the index fields, which is possible in attribute indices LOGGER.info( "Indexing failed to produce insertion ids; entry [" + StringUtils.stringFromBinary(adapter.getDataId(entry)) + "] not saved for index '" + index.getName() + "'."); return new IntermediaryWriteEntryInfo( dataId, internalAdapterId, new InsertionIds(), new GeoWaveValueImpl[0]); } final InsertionIds insertionIds; if (index instanceof CustomIndexStrategy) { insertionIds = ((CustomIndexStrategy) index).getInsertionIds(entry); } else { insertionIds = dataIdIndex ? null : encodedData.getInsertionIds(index); } if (dataIdIndex) { return getWriteInfoDataIDIndex( entry, dataId, encodedData, adapter, indexMapping, index, visibilityHandler, visibilityEnabled); } if (insertionIds.isEmpty()) { // we can allow some entries to not be indexed within every index for flexibility, and // therefore this should just be info level LOGGER.info( "Indexing failed to produce insertion ids; entry [" + StringUtils.stringFromBinary(adapter.getDataId(entry)) + "] not saved for index '" + index.getName() + "'."); return new IntermediaryWriteEntryInfo( dataId, internalAdapterId, insertionIds, new GeoWaveValueImpl[0]); } final VisibilityComposer commonIndexVisibility = new VisibilityComposer(); if (visibilityEnabled && (visibilityHandler != null)) { for (final Entry fieldValue : encodedData.getCommonData().getValues().entrySet()) { addIndexFieldVisibility( entry, adapter, indexMapping, visibilityHandler, fieldValue.getKey(), commonIndexVisibility); } } if (secondaryIndex && DataIndexUtils.adapterSupportsDataIndex(adapter)) { return new IntermediaryWriteEntryInfo( dataId, internalAdapterId, insertionIds, new GeoWaveValue[] { new GeoWaveValueImpl( new byte[0], StringUtils.stringToBinary(commonIndexVisibility.composeVisibility()), new byte[0])}); } final List> fieldInfoList = new ArrayList<>(); addCommonFields( adapter, indexMapping, entry, index, indexModel, visibilityHandler, encodedData, visibilityEnabled, fieldInfoList); for (final Entry fieldValue : encodedData.getAdapterExtendedData().getValues().entrySet()) { if (fieldValue.getValue() != null) { final FieldInfo fieldInfo = getFieldInfo( adapter, adapter, indexMapping, fieldValue.getKey(), fieldValue.getValue(), entry, visibilityHandler, visibilityEnabled, false); if (fieldInfo != null) { fieldInfoList.add(fieldInfo); } } } return new IntermediaryWriteEntryInfo( dataId, internalAdapterId, insertionIds, BaseDataStoreUtils.composeFlattenedFields( fieldInfoList, indexModel, adapter, commonIndexVisibility, dataIdIndex)); } protected static IntermediaryWriteEntryInfo getWriteInfoDataIDIndex( final T entry, final byte[] dataId, final AdapterPersistenceEncoding encodedData, final InternalDataAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index, final VisibilityHandler visibilityHandler, final boolean visibilityEnabled) { final List> fieldInfoList = new ArrayList<>(); addCommonFields( adapter, indexMapping, entry, index, index.getIndexModel(), visibilityHandler, encodedData, visibilityEnabled, fieldInfoList); for (final Entry fieldValue : encodedData.getAdapterExtendedData().getValues().entrySet()) { if (fieldValue.getValue() != null) { final FieldInfo fieldInfo = getFieldInfo( adapter, adapter, indexMapping, fieldValue.getKey(), fieldValue.getValue(), entry, visibilityHandler, visibilityEnabled, false); if (fieldInfo != null) { fieldInfoList.add(fieldInfo); } } } return new IntermediaryWriteEntryInfo( dataId, adapter.getAdapterId(), null, BaseDataStoreUtils.composeFlattenedFields( fieldInfoList, index.getIndexModel(), adapter, null, true)); } private static void addCommonFields( final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final T entry, final Index index, final CommonIndexModel indexModel, final VisibilityHandler visibilityHandler, final AdapterPersistenceEncoding encodedData, final boolean visibilityEnabled, final List> fieldInfoList) { for (final Entry fieldValue : encodedData.getCommonData().getValues().entrySet()) { final FieldInfo fieldInfo = getFieldInfo( indexModel, adapter, indexMapping, fieldValue.getKey(), fieldValue.getValue(), entry, visibilityHandler, visibilityEnabled, true); if (fieldInfo != null) { fieldInfoList.add(fieldInfo); } } } /** * This method combines all FieldInfos that share a common visibility into a single FieldInfo * * @param originalList * @return a new list of composite FieldInfos */ private static GeoWaveValue[] composeFlattenedFields( final List> originalList, final CommonIndexModel model, final InternalDataAdapter writableAdapter, final VisibilityComposer commonIndexVisibility, final boolean dataIdIndex) { if (originalList.isEmpty()) { return new GeoWaveValue[0]; } final Map>>> vizToFieldMap = new LinkedHashMap<>(); // organize FieldInfos by unique visibility if (dataIdIndex) { final List>> fieldsWithPositions = (List) originalList.stream().map(fieldInfo -> { final int fieldPosition = writableAdapter.getPositionOfOrderedField(model, fieldInfo.getFieldId()); return (Pair) Pair.of(fieldPosition, fieldInfo); }).collect(Collectors.toList()); final VisibilityComposer combinedVisibility = new VisibilityComposer(); for (final FieldInfo fieldValue : originalList) { combinedVisibility.addVisibility(fieldValue.getVisibility()); } vizToFieldMap.put(combinedVisibility.composeVisibility(), fieldsWithPositions); } else { boolean sharedVisibility = false; for (final FieldInfo fieldInfo : originalList) { int fieldPosition = writableAdapter.getPositionOfOrderedField(model, fieldInfo.getFieldId()); if (fieldPosition == -1) { // this is just a fallback for unexpected failures fieldPosition = writableAdapter.getPositionOfOrderedField(model, fieldInfo.getFieldId()); } final VisibilityComposer currentComposer = new VisibilityComposer(commonIndexVisibility); currentComposer.addVisibility(fieldInfo.getVisibility()); final String currViz = currentComposer.composeVisibility(); if (vizToFieldMap.containsKey(currViz)) { sharedVisibility = true; final List>> listForViz = vizToFieldMap.get(currViz); listForViz.add(new ImmutablePair>(fieldPosition, fieldInfo)); } else { final List>> listForViz = new LinkedList<>(); listForViz.add(new ImmutablePair>(fieldPosition, fieldInfo)); vizToFieldMap.put(currViz, listForViz); } } if (!sharedVisibility) { // at a minimum, must return transformed (bitmasked) fieldInfos final GeoWaveValue[] bitmaskedValues = new GeoWaveValue[vizToFieldMap.size()]; int i = 0; for (final List>> list : vizToFieldMap.values()) { // every list must have exactly one element final Pair> fieldInfo = list.get(0); bitmaskedValues[i++] = new GeoWaveValueImpl( BitmaskUtils.generateCompositeBitmask(fieldInfo.getLeft()), StringUtils.stringToBinary(fieldInfo.getRight().getVisibility()), fieldInfo.getRight().getWrittenValue()); } return bitmaskedValues; } } if (vizToFieldMap.size() == 1) { return new GeoWaveValue[] {entryToValue(vizToFieldMap.entrySet().iterator().next())}; } else { final List retVal = new ArrayList<>(vizToFieldMap.size()); for (final Entry>>> entry : vizToFieldMap.entrySet()) { retVal.add(entryToValue(entry)); } return retVal.toArray(new GeoWaveValue[0]); } } private static GeoWaveValue entryToValue( final Entry>>> entry) { final SortedSet fieldPositions = new TreeSet<>(); final List>> fieldInfoList = entry.getValue(); final byte[] combinedValue = fieldInfoList.size() > 1 ? combineValues(fieldInfoList) : fieldInfoList.size() > 0 ? fieldInfoList.get(0).getRight().getWrittenValue() : new byte[0]; fieldInfoList.stream().forEach(p -> fieldPositions.add(p.getLeft())); final byte[] compositeBitmask = BitmaskUtils.generateCompositeBitmask(fieldPositions); return new GeoWaveValueImpl( compositeBitmask, StringUtils.stringToBinary(entry.getKey()), combinedValue); } private static byte[] combineValues(final List>> fieldInfoList) { int totalLength = 0; Collections.sort(fieldInfoList, new BitmaskedPairComparator()); final List fieldInfoBytesList = new ArrayList<>(fieldInfoList.size()); for (final Pair> fieldInfoPair : fieldInfoList) { final FieldInfo fieldInfo = fieldInfoPair.getRight(); final ByteBuffer fieldInfoBytes = ByteBuffer.allocate( VarintUtils.unsignedIntByteLength(fieldInfo.getWrittenValue().length) + fieldInfo.getWrittenValue().length); VarintUtils.writeUnsignedInt(fieldInfo.getWrittenValue().length, fieldInfoBytes); fieldInfoBytes.put(fieldInfo.getWrittenValue()); fieldInfoBytesList.add(fieldInfoBytes.array()); totalLength += fieldInfoBytes.array().length; } final ByteBuffer allFields = ByteBuffer.allocate(totalLength); for (final byte[] bytes : fieldInfoBytesList) { allFields.put(bytes); } return allFields.array(); } private static FieldInfo getFieldInfo( final DataWriter dataWriter, final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final String fieldName, final Object fieldValue, final T entry, final VisibilityHandler visibilityHandler, final boolean visibilityEnabled, final boolean indexField) { final FieldWriter fieldWriter = dataWriter.getWriter(fieldName); if (fieldWriter != null) { final VisibilityComposer visibilityComposer = new VisibilityComposer(); if (visibilityEnabled && (visibilityHandler != null)) { if (indexField) { addIndexFieldVisibility( entry, adapter, indexMapping, visibilityHandler, fieldName, visibilityComposer); } else { visibilityComposer.addVisibility( visibilityHandler.getVisibility(adapter, entry, fieldName)); } } return new FieldInfo( fieldName, fieldWriter.writeField(fieldValue), visibilityComposer.composeVisibility()); } else if (fieldValue != null) { LOGGER.warn( "Data writer of class " + dataWriter.getClass() + " does not support field for " + fieldValue); } return null; } private static void sortInPlace(final List> input) { Collections.sort(input, new Comparator>() { @Override public int compare(final Pair o1, final Pair o2) { if (o1.getKey() == null) { if (o2.getKey() == null) { return 0; } return 1; } if (o2.getKey() == null) { return -1; } return o1.getKey().getName().compareTo(o2.getKey().getName()); } }); } @SuppressWarnings({"unchecked", "rawtypes"}) public static AdapterToIndexMapping mapAdapterToIndex( final InternalDataAdapter adapter, final Index index) { // Build up a list of index field mappers final Map> mappers = Maps.newHashMap(); // Get index model dimensions final NumericDimensionField[] dimensions = index.getIndexModel().getDimensions(); // Map dimensions to index fields final Map> indexFields = Arrays.stream(dimensions).collect( Collectors.groupingBy( dim -> dim.getFieldName(), Collectors.mapping(dim -> dim, Collectors.toList()))); // Get adapter fields final FieldDescriptor[] adapterFields = adapter.getFieldDescriptors(); for (final Entry> indexField : indexFields.entrySet()) { // Get the hints used by all dimensions of the field final Set dimensionHints = Sets.newHashSet(); indexField.getValue().forEach(dim -> dimensionHints.addAll(dim.getDimensionHints())); final Class indexFieldClass = indexField.getValue().get(0).getFieldClass(); final String indexFieldName = indexField.getKey(); final IndexFieldOptions indexFieldOptions = indexField.getValue().get(0).getIndexFieldOptions(); // Get available mappers for the field class final List> availableMappers = IndexFieldMapperRegistry.instance().getAvailableMappers(indexFieldClass); if (availableMappers.size() == 0) { throw new IllegalArgumentException( "No index field mappers were found for the type: " + indexFieldClass.getName()); } final List> hintedFields; if (index instanceof AttributeIndex) { // Only check the field that is set for the attribute index hintedFields = Lists.newArrayList( adapter.getFieldDescriptor(((AttributeIndex) index).getAttributeName())); } else { // Get any adapter fields that have been hinted for this field hintedFields = Arrays.stream(adapterFields).filter( field -> dimensionHints.stream().anyMatch(field.indexHints()::contains)).collect( Collectors.toList()); } if (hintedFields.size() > 0) { final Class hintedFieldClass = hintedFields.get(0).bindingClass(); for (int i = 1; i < hintedFields.size(); i++) { if (!hintedFieldClass.equals(hintedFields.get(i).bindingClass())) { throw new IllegalArgumentException("All hinted fields must be of the same type."); } } boolean mapperFound = false; // Find a mapper that matches for (final IndexFieldMapper mapper : availableMappers) { if (mapper.isCompatibleWith(hintedFieldClass) && (mapper.adapterFieldCount() == hintedFields.size())) { mapper.init(indexField.getKey(), (List) hintedFields, indexFieldOptions); mappers.put(indexField.getKey(), mapper); mapperFound = true; break; } } if (!mapperFound) { throw new IllegalArgumentException( "No registered index field mappers were found for the type: " + hintedFieldClass.getName() + "[" + hintedFields.size() + "] -> " + indexFieldClass.getName()); } } else { // Attempt to infer the field to use // See if there are any suggested fields boolean mapperFound = false; for (final IndexFieldMapper mapper : availableMappers) { final Set suggestedFieldNames = mapper.getLowerCaseSuggestedFieldNames(); final List> matchingFields = Arrays.stream(adapterFields).filter( field -> mapper.isCompatibleWith(field.bindingClass()) && suggestedFieldNames.contains(field.fieldName().toLowerCase())).collect( Collectors.toList()); if (matchingFields.size() >= mapper.adapterFieldCount()) { mapperFound = true; mapper.init( indexFieldName, (List) matchingFields.stream().limit(mapper.adapterFieldCount()).collect( Collectors.toList()), indexFieldOptions); mappers.put(indexFieldName, mapper); break; } } // See if a direct mapper is available if (!mapperFound) { for (final FieldDescriptor fieldDescriptor : adapterFields) { if (fieldDescriptor.bindingClass().equals(indexFieldClass)) { final Optional> matchingMapper = availableMappers.stream().filter( mapper -> mapper.isCompatibleWith(fieldDescriptor.bindingClass()) && (mapper.adapterFieldCount() == 1)).findFirst(); if (matchingMapper.isPresent()) { final IndexFieldMapper mapper = matchingMapper.get(); mapperFound = true; mapper.init( indexFieldName, (List) Lists.newArrayList(fieldDescriptor), indexFieldOptions); mappers.put(indexFieldName, mapper); break; } } } } // Check other mappers if (!mapperFound) { for (final IndexFieldMapper mapper : availableMappers) { final List> matchingFields = Arrays.stream(adapterFields).filter( field -> mapper.isCompatibleWith(field.bindingClass())).collect( Collectors.toList()); if (matchingFields.size() >= mapper.adapterFieldCount()) { mapperFound = true; mapper.init( indexFieldName, (List) matchingFields.stream().limit(mapper.adapterFieldCount()).collect( Collectors.toList()), indexFieldOptions); mappers.put(indexFieldName, mapper); break; } } } if (!mapperFound) { throw new IllegalArgumentException( "No suitable index field mapper could be found for the index field " + indexFieldName); } } } return new AdapterToIndexMapping( adapter.getAdapterId(), index.getName(), mappers.values().stream().collect(Collectors.toList())); } public static List>> combineByIndex(final List> input) { final List>> result = new ArrayList<>(); sortInPlace(input); List valueSet = new ArrayList<>(); Pair last = null; for (final Pair item : input) { if ((last != null) && (item.getKey() != null) && ((last.getKey() == null) || !last.getKey().getName().equals(item.getKey().getName()))) { result.add(Pair.of(last.getLeft(), valueSet)); valueSet = new ArrayList<>(); } valueSet.add(item.getValue()); last = item; } if (last != null) { result.add(Pair.of(last.getLeft(), valueSet)); } return result; } public static List>>> chooseBestIndex( final List>>> indexAdapterPairList, final QueryConstraints query, final AdapterIndexMappingStore mappingStore) { return chooseBestIndex(indexAdapterPairList, mappingStore, query, Function.identity()); } public static List>> chooseBestIndex( final List>> indexAdapterPairList, final AdapterIndexMappingStore mappingStore, final QueryConstraints query, final Function> adapterLookup) throws IllegalArgumentException { if (indexAdapterPairList.size() <= 1) { return indexAdapterPairList; } if ((query != null) && query.indexMustBeSpecified()) { throw new IllegalArgumentException("Query constraint requires specifying exactly one index"); } final Map> indicesPerAdapter = new HashMap<>(); for (final Pair> pair : indexAdapterPairList) { for (final T adapter : pair.getRight()) { List indices = indicesPerAdapter.get(adapter); if (indices == null) { indices = new ArrayList<>(); indicesPerAdapter.put(adapter, indices); } indices.add(pair.getLeft()); } } final Map> retVal = new HashMap<>(); for (final Entry> e : indicesPerAdapter.entrySet()) { final Index index = query == null ? e.getValue().get(0) : chooseBestIndex( e.getValue().toArray(new Index[0]), query, adapterLookup.apply(e.getKey()), mappingStore); List adapters = retVal.get(index); if (adapters == null) { adapters = new ArrayList<>(); retVal.put(index, adapters); } adapters.add(e.getKey()); } return retVal.entrySet().stream().map(e -> Pair.of(e.getKey(), e.getValue())).collect( Collectors.toList()); } public static Index chooseBestIndex( final Index[] indices, final QueryConstraints query, final InternalDataAdapter adapter, final AdapterIndexMappingStore mappingStore) { final boolean isConstraintsAdapterIndexSpecific = query instanceof AdapterAndIndexBasedQueryConstraints; Index nextIdx = null; int i = 0; double bestIndexBitsUsed = -1; int bestIndexDimensionCount = -1; Index bestIdx = null; while (i < indices.length) { nextIdx = indices[i++]; if ((nextIdx == null) || (nextIdx.getIndexStrategy() == null) || (nextIdx.getIndexStrategy().getOrderedDimensionDefinitions() == null) || (nextIdx.getIndexStrategy().getOrderedDimensionDefinitions().length == 0)) { continue; } QueryConstraints adapterIndexConstraints; if (isConstraintsAdapterIndexSpecific) { adapterIndexConstraints = ((AdapterAndIndexBasedQueryConstraints) query).createQueryConstraints( adapter, nextIdx, mappingStore.getMapping(adapter.getAdapterId(), nextIdx.getName())); if (adapterIndexConstraints == null) { continue; } } else { adapterIndexConstraints = query; } final List queryRanges = adapterIndexConstraints.getIndexConstraints(nextIdx); final int currentDimensionCount = nextIdx.getIndexStrategy().getOrderedDimensionDefinitions().length; if (IndexUtils.isFullTableScan(queryRanges) || !queryRangeDimensionsMatch(currentDimensionCount, queryRanges)) { // keep this is as a default in case all indices // result in a full table scan if (bestIdx == null) { bestIdx = nextIdx; } } else { double currentBitsUsed = 0; if (currentDimensionCount >= bestIndexDimensionCount) { for (final MultiDimensionalNumericData qr : queryRanges) { final double[] dataRangePerDimension = new double[qr.getDimensionCount()]; for (int d = 0; d < dataRangePerDimension.length; d++) { dataRangePerDimension[d] = qr.getMaxValuesPerDimension()[d] - qr.getMinValuesPerDimension()[d]; } currentBitsUsed += IndexUtils.getDimensionalBitsUsed( nextIdx.getIndexStrategy(), dataRangePerDimension); } if ((currentDimensionCount > bestIndexDimensionCount) || (currentBitsUsed > bestIndexBitsUsed)) { bestIndexBitsUsed = currentBitsUsed; bestIndexDimensionCount = currentDimensionCount; bestIdx = nextIdx; } } } } if ((bestIdx == null) && (indices.length > 0)) { bestIdx = indices[0]; } return bestIdx; } private static boolean queryRangeDimensionsMatch( final int indexDimensions, final List queryRanges) { for (final MultiDimensionalNumericData qr : queryRanges) { if (qr.getDimensionCount() != indexDimensions) { return false; } } return true; } public static List>> getAdaptersWithMinimalSetOfIndices( final @Nullable String[] typeNames, final @Nullable String indexName, final TransientAdapterStore adapterStore, final InternalAdapterStore internalAdapterStore, final AdapterIndexMappingStore adapterIndexMappingStore, final IndexStore indexStore, final QueryConstraints constraints) throws IOException { return chooseBestIndex( reduceIndicesAndGroupByIndex( compileIndicesForAdapters( typeNames, indexName, adapterStore, internalAdapterStore, adapterIndexMappingStore, indexStore)), adapterIndexMappingStore, constraints, adapterId -> { final String typeName = internalAdapterStore.getTypeName(adapterId); if (typeName != null) { final DataTypeAdapter adapter = adapterStore.getAdapter(typeName); if (adapter != null) { return adapter.asInternalAdapter(adapterId); } } return null; }); } private static List> compileIndicesForAdapters( final @Nullable String[] typeNames, final @Nullable String indexName, final TransientAdapterStore adapterStore, final InternalAdapterStore internalAdapterStore, final AdapterIndexMappingStore adapterIndexMappingStore, final IndexStore indexStore) throws IOException { Collection adapterIds; if ((typeNames == null) || (typeNames.length == 0)) { adapterIds = Arrays.asList(ArrayUtils.toObject(internalAdapterStore.getAdapterIds())); } else { adapterIds = Collections2.filter( Lists.transform( Arrays.asList(typeNames), typeName -> internalAdapterStore.getAdapterId(typeName)), adapterId -> adapterId != null); } final List> result = new ArrayList<>(); for (final Short adapterId : adapterIds) { final AdapterToIndexMapping[] indices = adapterIndexMappingStore.getIndicesForAdapter(adapterId); if ((indexName != null) && Arrays.stream(indices).anyMatch(mapping -> mapping.getIndexName().equals(indexName))) { result.add(Pair.of(indexStore.getIndex(indexName), adapterId)); } else if (indices.length > 0) { for (final AdapterToIndexMapping mapping : indices) { final Index pIndex = mapping.getIndex(indexStore); // this could happen if persistent was turned off if (pIndex != null) { result.add(Pair.of(pIndex, adapterId)); } } } } return result; } protected static List>> reduceIndicesAndGroupByIndex( final List> input) { final Map> result = Maps.newHashMap(); input.forEach(pair -> { if (!result.containsKey(pair.getLeft())) { result.put(pair.getLeft(), Lists.newArrayList()); } result.get(pair.getLeft()).add(pair.getRight()); }); return result.entrySet().stream().map( entry -> Pair.of(entry.getKey(), entry.getValue())).collect(Collectors.toList()); } public static DefaultStatisticsProvider getDefaultStatisticsProvider( final DataTypeAdapter adapter) { if (adapter instanceof InternalDataAdapter) { return getDefaultStatisticsProvider(((InternalDataAdapter) adapter).getAdapter()); } return adapter instanceof DefaultStatisticsProvider ? (DefaultStatisticsProvider) adapter : null; } public static RowMergingDataAdapter getRowMergingAdapter(final DataTypeAdapter adapter) { if (adapter instanceof InternalDataAdapter) { return getRowMergingAdapter(((InternalDataAdapter) adapter).getAdapter()); } return adapter instanceof RowMergingDataAdapter ? (RowMergingDataAdapter) adapter : null; } public static boolean isRowMerging(final DataTypeAdapter adapter) { if (adapter instanceof InternalDataAdapter) { return isRowMerging(((InternalDataAdapter) adapter).getAdapter()); } return (adapter instanceof RowMergingDataAdapter) && (((RowMergingDataAdapter) adapter).getTransform() != null); } public static boolean isRowMerging( final PersistentAdapterStore adapterStore, final short[] adapterIds) { if (adapterIds != null) { for (final short adapterId : adapterIds) { if (isRowMerging(adapterStore.getAdapter(adapterId).getAdapter())) { return true; } } } else { final InternalDataAdapter[] adapters = adapterStore.getAdapters(); for (final InternalDataAdapter adapter : adapters) { if (isRowMerging(adapter.getAdapter())) { return true; } } } return false; } public static boolean isAggregation( final Pair, Aggregation> aggregation) { return (aggregation != null) && (aggregation.getRight() != null); } public static boolean isCommonIndexAggregation( final Pair, Aggregation> aggregation) { return isAggregation(aggregation) && (aggregation.getRight() instanceof CommonIndexAggregation); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/base/BaseFilteredIndexQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.base; import java.io.IOException; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.annotation.Nullable; import org.apache.commons.lang3.tuple.Pair; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.CloseableIteratorWrapper; import org.locationtech.geowave.core.store.DataStoreOptions; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.base.dataidx.DataIndexRetrieval; import org.locationtech.geowave.core.store.callback.ScanCallback; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer; import org.locationtech.geowave.core.store.operations.DataStoreOperations; import org.locationtech.geowave.core.store.operations.RowReader; import org.locationtech.geowave.core.store.query.filter.QueryFilter; import org.locationtech.geowave.core.store.statistics.index.DifferingVisibilityCountStatistic.DifferingVisibilityCountValue; import org.locationtech.geowave.core.store.statistics.index.FieldVisibilityCountStatistic.FieldVisibilityCountValue; import org.locationtech.geowave.core.store.util.GeoWaveRowIteratorFactory; import org.locationtech.geowave.core.store.util.MergingEntryIterator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Iterators; abstract class BaseFilteredIndexQuery extends BaseQuery { protected List clientFilters; private static final Logger LOGGER = LoggerFactory.getLogger(BaseFilteredIndexQuery.class); public BaseFilteredIndexQuery( final short[] adapterIds, final Index index, final ScanCallback scanCallback, final Pair> fieldIdsAdapterPair, final DifferingVisibilityCountValue differingVisibilityCounts, final FieldVisibilityCountValue visibilityCounts, final DataIndexRetrieval dataIndexRetrieval, final String... authorizations) { super( adapterIds, index, fieldIdsAdapterPair, scanCallback, differingVisibilityCounts, visibilityCounts, dataIndexRetrieval, authorizations); } protected List getClientFilters() { return clientFilters; } @SuppressWarnings({"unchecked", "rawtypes"}) public CloseableIterator query( final DataStoreOperations datastoreOperations, final DataStoreOptions options, final PersistentAdapterStore adapterStore, final AdapterIndexMappingStore mappingStore, final InternalAdapterStore internalAdapterStore, final double[] maxResolutionSubsamplingPerDimension, final double[] targetResolutionPerDimensionForHierarchicalIndex, final Integer limit, final Integer queryMaxRangeDecomposition, final boolean delete) { final RowReader reader = getReader( datastoreOperations, options, adapterStore, mappingStore, internalAdapterStore, maxResolutionSubsamplingPerDimension, targetResolutionPerDimensionForHierarchicalIndex, limit, queryMaxRangeDecomposition, getRowTransformer( options, adapterStore, mappingStore, maxResolutionSubsamplingPerDimension, !isCommonIndexAggregation()), delete); if (reader == null) { return new CloseableIterator.Empty(); } Iterator it = reader; if ((limit != null) && (limit > 0)) { it = Iterators.limit(it, limit); } return new CloseableIteratorWrapper(reader, it); } @Override protected RowReader getReader( final DataStoreOperations datastoreOperations, final DataStoreOptions options, final PersistentAdapterStore adapterStore, final AdapterIndexMappingStore mappingStore, final InternalAdapterStore internalAdapterStore, final double[] maxResolutionSubsamplingPerDimension, final double[] targetResolutionPerDimensionForHierarchicalIndex, final Integer limit, final Integer queryMaxRangeDecomposition, final GeoWaveRowIteratorTransformer rowTransformer, final boolean delete) { boolean exists = false; try { exists = datastoreOperations.indexExists(index.getName()); } catch (final IOException e) { LOGGER.error("Table does not exist", e); } if (!exists) { LOGGER.warn("Table does not exist " + index.getName()); return null; } return super.getReader( datastoreOperations, options, adapterStore, mappingStore, internalAdapterStore, maxResolutionSubsamplingPerDimension, targetResolutionPerDimensionForHierarchicalIndex, limit, queryMaxRangeDecomposition, rowTransformer, delete); } protected Map getMergingAdapters( final PersistentAdapterStore adapterStore) { final Map mergingAdapters = new HashMap<>(); for (final Short adapterId : adapterIds) { final DataTypeAdapter adapter = adapterStore.getAdapter(adapterId).getAdapter(); if ((adapter instanceof RowMergingDataAdapter) && (((RowMergingDataAdapter) adapter).getTransform() != null)) { mergingAdapters.put(adapterId, (RowMergingDataAdapter) adapter); } } return mergingAdapters; } private GeoWaveRowIteratorTransformer getRowTransformer( final DataStoreOptions options, final PersistentAdapterStore adapterStore, final AdapterIndexMappingStore mappingStore, final double[] maxResolutionSubsamplingPerDimension, final boolean decodePersistenceEncoding) { final @Nullable QueryFilter[] clientFilters = getClientFilters(options); final DataIndexRetrieval dataIndexRetrieval = getDataIndexRetrieval(); if ((options == null) || options.requiresClientSideMerging()) { final Map mergingAdapters = getMergingAdapters(adapterStore); if (!mergingAdapters.isEmpty()) { return new GeoWaveRowIteratorTransformer() { @SuppressWarnings({"rawtypes", "unchecked"}) @Override public Iterator apply(final Iterator input) { return new MergingEntryIterator( adapterStore, mappingStore, index, input, clientFilters, scanCallback, mergingAdapters, maxResolutionSubsamplingPerDimension, dataIndexRetrieval); } }; } } return new GeoWaveRowIteratorTransformer() { @SuppressWarnings({"rawtypes", "unchecked"}) @Override public Iterator apply(final Iterator input) { return (Iterator) GeoWaveRowIteratorFactory.iterator( adapterStore, mappingStore, index, input, clientFilters, scanCallback, getFieldBitmask(), // Don't do client side subsampling if server side is // enabled. ((options != null) && options.isServerSideLibraryEnabled()) ? null : maxResolutionSubsamplingPerDimension, decodePersistenceEncoding, dataIndexRetrieval); } }; } @Override protected QueryFilter[] getClientFilters(final DataStoreOptions options) { final List internalClientFilters = getClientFiltersList(options); return internalClientFilters.isEmpty() ? null : internalClientFilters.toArray(new QueryFilter[0]); } protected List getClientFiltersList(final DataStoreOptions options) { return clientFilters; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/base/BaseIndexWriter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.base; import java.io.Closeable; import java.io.Flushable; import java.io.IOException; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.DataStoreOptions; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.VisibilityHandler; import org.locationtech.geowave.core.store.api.WriteResults; import org.locationtech.geowave.core.store.api.Writer; import org.locationtech.geowave.core.store.callback.IngestCallback; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.entities.GeoWaveValue; import org.locationtech.geowave.core.store.operations.DataStoreOperations; import org.locationtech.geowave.core.store.operations.RowWriter; import org.locationtech.geowave.core.store.util.DataStoreUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; class BaseIndexWriter implements Writer { private static final Logger LOGGER = LoggerFactory.getLogger(BaseIndexWriter.class); protected final Index index; protected final DataStoreOperations operations; protected final DataStoreOptions options; protected final IngestCallback callback; protected RowWriter writer; protected final InternalDataAdapter adapter; protected final AdapterToIndexMapping indexMapping; protected final VisibilityHandler visibilityHandler; final Closeable closable; public BaseIndexWriter( final InternalDataAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index, final VisibilityHandler visibilityHandler, final DataStoreOperations operations, final DataStoreOptions options, final IngestCallback callback, final Closeable closable) { this.operations = operations; this.options = options; this.index = index; this.callback = callback; this.adapter = adapter; this.closable = closable; this.indexMapping = indexMapping; this.visibilityHandler = visibilityHandler; } @Override public Index[] getIndices() { return new Index[] {index}; } @Override public WriteResults write(final T entry) { return write(entry, visibilityHandler); } @Override public WriteResults write(final T entry, final VisibilityHandler visibilityHandler) { IntermediaryWriteEntryInfo entryInfo; ensureOpen(); if (writer == null) { LOGGER.error("Null writer - empty list returned"); return new WriteResults(); } entryInfo = BaseDataStoreUtils.getWriteInfo( entry, adapter, indexMapping, index, visibilityHandler, options.isSecondaryIndexing(), false, options.isVisibilityEnabled()); verifyVisibility(visibilityHandler, entryInfo); final GeoWaveRow[] rows = entryInfo.getRows(); writer.write(rows); callback.entryIngested(entry, rows); return new WriteResults(index.getName(), entryInfo.getInsertionIds()); } @Override public void close() { try { closable.close(); } catch (final IOException e) { LOGGER.error("Cannot close callbacks", e); } // thread safe close closeInternal(); } @Override public synchronized void flush() { // thread safe flush of the writers if (writer != null) { writer.flush(); } if (this.callback instanceof Flushable) { try { ((Flushable) callback).flush(); } catch (final IOException e) { LOGGER.error("Cannot flush callbacks", e); } } } private void verifyVisibility( final VisibilityHandler visibilityHandler, final IntermediaryWriteEntryInfo ingestInfo) { if (visibilityHandler != DataStoreUtils.UNCONSTRAINED_VISIBILITY) { for (final GeoWaveValue value : ingestInfo.getValues()) { if ((value.getVisibility() != null) && (value.getVisibility().length > 0)) { if (!operations.ensureAuthorizations( null, StringUtils.stringFromBinary(value.getVisibility()))) { LOGGER.error( "Unable to set authorizations for ingested visibility '" + StringUtils.stringFromBinary(value.getVisibility()) + "'"); } } } } } protected synchronized void closeInternal() { if (writer != null) { try { writer.close(); writer = null; } catch (final Exception e) { LOGGER.warn("Unable to close writer", e); } } } @SuppressFBWarnings(justification = "This is intentional to avoid unnecessary sync") protected void ensureOpen() { if (writer == null) { synchronized (this) { if (writer == null) { try { writer = operations.createWriter(index, adapter); } catch (final Exception e) { LOGGER.error("Unable to open writer", e); } } } } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/base/BaseInsertionIdQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.base; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.QueryRanges; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.base.dataidx.DataIndexRetrieval; import org.locationtech.geowave.core.store.callback.ScanCallback; import org.locationtech.geowave.core.store.query.constraints.InsertionIdQuery; import org.locationtech.geowave.core.store.query.filter.DedupeFilter; import org.locationtech.geowave.core.store.statistics.index.DifferingVisibilityCountStatistic.DifferingVisibilityCountValue; import org.locationtech.geowave.core.store.statistics.index.FieldVisibilityCountStatistic.FieldVisibilityCountValue; import com.google.common.collect.Lists; /** Represents a query operation for a specific set of row IDs. */ class BaseInsertionIdQuery extends BaseConstraintsQuery { private final QueryRanges ranges; public BaseInsertionIdQuery( final InternalDataAdapter adapter, final Index index, final InsertionIdQuery query, final ScanCallback scanCallback, final DedupeFilter dedupeFilter, final DifferingVisibilityCountValue differingVisibilityCounts, final FieldVisibilityCountValue visibilityCounts, final DataIndexRetrieval dataIndexRetrieval, final String[] authorizations) { super( new short[] {adapter.getAdapterId()}, index, query, dedupeFilter, scanCallback, null, null, null, null, differingVisibilityCounts, visibilityCounts, dataIndexRetrieval, authorizations); this.ranges = new InsertionIds( query.getPartitionKey(), Lists.newArrayList(query.getSortKey())).asQueryRanges(); } @Override protected QueryRanges getRanges( final int maxRangeDecomposition, final double[] targetResolutionPerDimensionForHierarchicalIndex) { return ranges; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/base/BaseQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.base; import java.util.ArrayList; import java.util.List; import org.apache.commons.lang3.tuple.Pair; import org.locationtech.geowave.core.index.MultiDimensionalCoordinateRangesArray; import org.locationtech.geowave.core.index.QueryRanges; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.store.DataStoreOptions; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.base.dataidx.DataIndexRetrieval; import org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils; import org.locationtech.geowave.core.store.callback.ScanCallback; import org.locationtech.geowave.core.store.callback.ScanCallbackList; import org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer; import org.locationtech.geowave.core.store.operations.DataStoreOperations; import org.locationtech.geowave.core.store.operations.Deleter; import org.locationtech.geowave.core.store.operations.ReaderParams; import org.locationtech.geowave.core.store.operations.ReaderParamsBuilder; import org.locationtech.geowave.core.store.operations.RowReader; import org.locationtech.geowave.core.store.query.filter.QueryFilter; import org.locationtech.geowave.core.store.statistics.index.DifferingVisibilityCountStatistic.DifferingVisibilityCountValue; import org.locationtech.geowave.core.store.statistics.index.FieldVisibilityCountStatistic.FieldVisibilityCountValue; /** * This class is used internally to perform query operations against a base data store. The query is * defined by the set of parameters passed into the constructor. */ abstract class BaseQuery { protected short[] adapterIds; protected final Index index; protected final Pair> fieldIdsAdapterPair; protected final DifferingVisibilityCountValue differingVisibilityCounts; protected final FieldVisibilityCountValue visibilityCounts; protected final String[] authorizations; protected final ScanCallbackList scanCallback; private final DataIndexRetrieval dataIndexRetrieval; public BaseQuery( final Index index, final ScanCallback scanCallback, final DifferingVisibilityCountValue differingVisibilityCounts, final FieldVisibilityCountValue visibilityCounts, final DataIndexRetrieval dataIndexRetrieval, final String... authorizations) { this( null, index, null, scanCallback, differingVisibilityCounts, visibilityCounts, dataIndexRetrieval, authorizations); } public BaseQuery( final short[] adapterIds, final Index index, final Pair> fieldIdsAdapterPair, final ScanCallback scanCallback, final DifferingVisibilityCountValue differingVisibilityCounts, final FieldVisibilityCountValue visibilityCounts, final DataIndexRetrieval dataIndexRetrieval, final String... authorizations) { this.adapterIds = adapterIds; this.index = index; this.fieldIdsAdapterPair = fieldIdsAdapterPair; this.differingVisibilityCounts = differingVisibilityCounts; this.visibilityCounts = visibilityCounts; this.authorizations = authorizations; final List> callbacks = new ArrayList<>(); if (scanCallback != null) { callbacks.add(scanCallback); } this.scanCallback = new ScanCallbackList(callbacks); this.dataIndexRetrieval = dataIndexRetrieval; } protected RowReader getReader( final DataStoreOperations operations, final DataStoreOptions options, final PersistentAdapterStore adapterStore, final AdapterIndexMappingStore mappingStore, final InternalAdapterStore internalAdapterStore, final double[] maxResolutionSubsamplingPerDimension, final double[] targetResolutionPerDimensionForHierarchicalIndex, final Integer limit, final Integer queryMaxRangeDecomposition, final GeoWaveRowIteratorTransformer rowTransformer, final boolean delete) { final int maxRangeDecomposition; if (queryMaxRangeDecomposition != null) { maxRangeDecomposition = queryMaxRangeDecomposition; } else { maxRangeDecomposition = isAggregation() ? options.getAggregationMaxRangeDecomposition() : options.getMaxRangeDecomposition(); } final ReaderParams readerParams = new ReaderParamsBuilder<>( index, adapterStore, mappingStore, internalAdapterStore, rowTransformer) // .adapterIds(adapterIds) // .maxResolutionSubsamplingPerDimension(maxResolutionSubsamplingPerDimension) // .aggregation(getAggregation()) // .fieldSubsets(getFieldSubsets()) // .isMixedVisibility(isMixedVisibilityRows()) // .isAuthorizationsLimiting(isAuthorizationsLimiting()) // .isServersideAggregation(isServerSideAggregation(options)) // .isClientsideRowMerging(isRowMerging(adapterStore)) // .queryRanges( getRanges( maxRangeDecomposition, targetResolutionPerDimensionForHierarchicalIndex)) // .filter(getServerFilter(options)) // .limit(limit) // .maxRangeDecomposition(maxRangeDecomposition) // .coordinateRanges(getCoordinateRanges()) // .constraints(getConstraints()) // .additionalAuthorizations(getAdditionalAuthorizations()) // .build(); // if (delete) { scanCallback.waitUntilCallbackAdded(); final Deleter deleter = operations.createDeleter(readerParams); scanCallback.addScanCallback((ScanCallback) deleter); return deleter; } return operations.createReader(readerParams); } public boolean isRowMerging(final PersistentAdapterStore adapterStore) { return BaseDataStoreUtils.isRowMerging(adapterStore, adapterIds); } public boolean isServerSideAggregation(final DataStoreOptions options) { return ((options != null) && options.isServerSideLibraryEnabled() && isAggregation()); } public boolean isAggregation() { return BaseDataStoreUtils.isAggregation(getAggregation()); } public List getCoordinateRanges() { return null; } public List getConstraints() { return null; } protected abstract QueryRanges getRanges( int maxRangeDecomposition, double[] targetResolutionPerDimensionForHierarchicalIndex); protected Pair, Aggregation> getAggregation() { return null; } protected Pair> getFieldSubsets() { return fieldIdsAdapterPair; } protected byte[] getFieldBitmask() { return BaseDataStoreUtils.getFieldBitmask( fieldIdsAdapterPair, dataIndexRetrieval != null ? DataIndexUtils.DATA_ID_INDEX : index); } protected boolean isAuthorizationsLimiting() { return (visibilityCounts == null) || visibilityCounts.isAuthorizationsLimiting(authorizations); } protected boolean isMixedVisibilityRows() { return (differingVisibilityCounts == null) || differingVisibilityCounts.isAnyEntryDifferingFieldVisiblity(); } public String[] getAdditionalAuthorizations() { return authorizations; } public DataIndexRetrieval getDataIndexRetrieval() { return dataIndexRetrieval; } public QueryFilter getServerFilter(final DataStoreOptions options) { return null; } protected QueryFilter[] getClientFilters(final DataStoreOptions options) { return null; } protected boolean isCommonIndexAggregation() { return false; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/base/BaseQueryOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.base; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.api.AggregationQuery; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.Query; import org.locationtech.geowave.core.store.callback.ScanCallback; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.query.constraints.OptimalExpressionQuery; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.geowave.core.store.query.options.AggregateTypeQueryOptions; import org.locationtech.geowave.core.store.query.options.CommonQueryOptions; import org.locationtech.geowave.core.store.query.options.DataTypeQueryOptions; import org.locationtech.geowave.core.store.query.options.FilterByTypeQueryOptions; import org.locationtech.geowave.core.store.query.options.IndexQueryOptions; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.locationtech.geowave.core.store.util.DataStoreUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Collections2; import com.google.common.collect.Lists; public class BaseQueryOptions { private static Logger LOGGER = LoggerFactory.getLogger(BaseQueryOptions.class); private static ScanCallback DEFAULT_CALLBACK = new ScanCallback() { @Override public void entryScanned(final Object entry, final GeoWaveRow row) {} }; @edu.umd.cs.findbugs.annotations.SuppressFBWarnings(value = {"SE_TRANSIENT_FIELD_NOT_RESTORED"}) private Collection> adapters = null; private short[] adapterIds = null; private String indexName = null; private transient Index index = null; private Pair, Aggregation> aggregationAdapterPair; private Integer limit = -1; private Integer maxRangeDecomposition = null; private double[] maxResolutionSubsamplingPerDimension = null; private double[] targetResolutionPerDimensionForHierarchicalIndex = null; private transient ScanCallback scanCallback = DEFAULT_CALLBACK; private String[] authorizations = new String[0]; private Pair> fieldIdsAdapterPair; private boolean nullId = false; public BaseQueryOptions( final Query query, final PersistentAdapterStore adapterStore, final InternalAdapterStore internalAdapterStore) { this(query, adapterStore, internalAdapterStore, null); } public BaseQueryOptions( final AggregationQuery query, final PersistentAdapterStore adapterStore, final InternalAdapterStore internalAdapterStore) { this( query.getCommonQueryOptions(), query.getDataTypeQueryOptions(), query.getIndexQueryOptions(), adapterStore, internalAdapterStore, null); } public BaseQueryOptions( final Query query, final PersistentAdapterStore adapterStore, final InternalAdapterStore internalAdapterStore, final ScanCallback scanCallback) { this( query.getCommonQueryOptions(), query.getDataTypeQueryOptions(), query.getIndexQueryOptions(), adapterStore, internalAdapterStore, scanCallback); } public BaseQueryOptions( final CommonQueryOptions commonOptions, final DataTypeQueryOptions typeOptions, final IndexQueryOptions indexOptions, final PersistentAdapterStore adapterStore, final InternalAdapterStore internalAdapterStore) { this(commonOptions, typeOptions, indexOptions, adapterStore, internalAdapterStore, null); } public BaseQueryOptions( final CommonQueryOptions commonOptions, final DataTypeQueryOptions typeOptions, final IndexQueryOptions indexOptions, final PersistentAdapterStore adapterStore, final InternalAdapterStore internalAdapterStore, final ScanCallback scanCallback) { if (scanCallback != null) { this.scanCallback = scanCallback; } indexName = indexOptions.getIndexName(); limit = commonOptions.getLimit(); maxRangeDecomposition = (Integer) commonOptions.getHints().get(DataStoreUtils.MAX_RANGE_DECOMPOSITION); maxResolutionSubsamplingPerDimension = (double[]) commonOptions.getHints().get( DataStoreUtils.MAX_RESOLUTION_SUBSAMPLING_PER_DIMENSION); targetResolutionPerDimensionForHierarchicalIndex = (double[]) commonOptions.getHints().get( DataStoreUtils.TARGET_RESOLUTION_PER_DIMENSION_FOR_HIERARCHICAL_INDEX); authorizations = commonOptions.getAuthorizations(); if ((typeOptions instanceof AggregateTypeQueryOptions) && (((AggregateTypeQueryOptions) typeOptions).getAggregation() != null)) { // TODO issue #1439 addresses being able to handle multiple types // within a single aggregation // it seems that the best approach would check if its a // commonindexaggregation and then it can be done with a single // query with simply adapter IDs rather than even needing adapters, // but if its not commonindexaggregation it would require multiple // adapters either in the context of a single query or multiple // queries, one per adapter and then aggregating the final result // for now let's just assume a single type name and get the adapter, // rather than just type name (which type name would be sufficient // for commonindexaggregation) if (typeOptions.getTypeNames().length == 1) { final String typeName = typeOptions.getTypeNames()[0]; final Short adapterId = internalAdapterStore.getAdapterId(typeName); if (adapterId != null) { final InternalDataAdapter adapter = adapterStore.getAdapter(adapterId); final Aggregation agg = ((AggregateTypeQueryOptions) typeOptions).getAggregation(); aggregationAdapterPair = new ImmutablePair<>(adapter, agg); } else { throw new IllegalArgumentException("Type name " + typeName + " does not exist"); } } else { // TODO GEOWAVE issue #1439 should tackle this case throw new IllegalArgumentException("Single type name supported currently"); } } else if ((typeOptions instanceof FilterByTypeQueryOptions) && (((FilterByTypeQueryOptions) typeOptions).getFieldNames() != null) && (((FilterByTypeQueryOptions) typeOptions).getFieldNames().length > 0) && (typeOptions.getTypeNames().length > 0)) { // filter by type for field subsetting only allows a single type // name final String typeName = typeOptions.getTypeNames()[0]; if (typeName != null) { final Short adapterId = internalAdapterStore.getAdapterId(typeName); if (adapterId != null) { final InternalDataAdapter adapter = adapterStore.getAdapter(adapterId); fieldIdsAdapterPair = new ImmutablePair<>( ((FilterByTypeQueryOptions) typeOptions).getFieldNames(), adapter); } else { throw new IllegalArgumentException("Type name " + typeName + " does not exist"); } } else { throw new IllegalArgumentException("Type name cannot be null for field subsetting"); } } if ((typeOptions != null) && (typeOptions.getTypeNames() != null) && (typeOptions.getTypeNames().length > 0)) { adapterIds = ArrayUtils.toPrimitive( Collections2.filter( Lists.transform( Arrays.asList(typeOptions.getTypeNames()), internalAdapterStore::getAdapterId), input -> { if (input == null) { nullId = true; return false; } return true; }).toArray(new Short[0])); } } public boolean isAllAdapters() { // TODO what about field ID subsetting and aggregation which implicitly // filters by adapter return ((adapterIds == null) || (adapterIds.length == 0)); } /** * Return the set of adapter/index associations. If the adapters are not provided, then look up * all of them. If the index is not provided, then look up all of them. * *

DataStores are responsible for selecting a single adapter/index per query. For deletions, * the Data Stores are interested in all the associations. * * @return the set of adapter/index associations */ public List>>> getIndicesForAdapters( final PersistentAdapterStore adapterStore, final AdapterIndexMappingStore adapterIndexMappingStore, final IndexStore indexStore) { return BaseDataStoreUtils.combineByIndex( compileIndicesForAdapters(adapterStore, adapterIndexMappingStore, indexStore, false)); } public InternalDataAdapter[] getAdaptersArray(final PersistentAdapterStore adapterStore) { if ((adapterIds != null) && (adapterIds.length != 0)) { if ((adapters == null) || adapters.isEmpty()) { adapters = new ArrayList<>(); for (final Short id : adapterIds) { if (id == null) { nullId = true; continue; } final InternalDataAdapter adapter = adapterStore.getAdapter(id); if (adapter != null) { adapters.add(adapter); } else { nullId = true; } } } return adapters.toArray(new InternalDataAdapter[0]); } if (nullId) { return new InternalDataAdapter[] {}; } if (adapterStore != null) { return adapterStore.getAdapters(); } return new InternalDataAdapter[0]; } public void setAdapterId(final Short adapterId) { if (adapterId != null) { adapterIds = new short[] {adapterId}; } } public short[] getAdapterIds() { return adapterIds; } public String getIndexName() { return indexName; } private List>> compileIndicesForAdapters( final PersistentAdapterStore adapterStore, final AdapterIndexMappingStore adapterIndexMappingStore, final IndexStore indexStore, final boolean constrainToIndex) { if ((adapterIds != null) && (adapterIds.length != 0)) { if ((adapters == null) || adapters.isEmpty()) { adapters = new ArrayList<>(); for (final short id : adapterIds) { final InternalDataAdapter adapter = adapterStore.getAdapter(id); if (adapter != null) { adapters.add(adapter); } } } } else if (!nullId && ((adapters == null) || adapters.isEmpty())) { adapters = Lists.newArrayList(adapterStore.getAdapters()); } else if (adapters == null) { adapters = Collections.emptyList(); } final List>> result = new ArrayList<>(); for (final InternalDataAdapter adapter : adapters) { final AdapterToIndexMapping[] indices = adapterIndexMappingStore.getIndicesForAdapter(adapter.getAdapterId()); if ((index != null) && constrainToIndex) { result.add(Pair.of(index, adapter)); } else if ((indexName != null) && Arrays.stream(indices).anyMatch(mapping -> mapping.getIndexName().equals(indexName)) && constrainToIndex) { if (index == null) { index = indexStore.getIndex(indexName); result.add(Pair.of(index, adapter)); } } else if (indices.length > 0) { boolean noIndices = true; for (final AdapterToIndexMapping mapping : indices) { final Index pIndex = mapping.getIndex(indexStore); // this could happen if persistent was turned off if (pIndex != null) { noIndices = false; result.add(Pair.of(pIndex, adapter)); } } if (noIndices) { // always at least add a null index to hint upstream callers that no index satisfies the // given adapter result.add(Pair.of(null, adapter)); } } else { // always at least add a null index to hint upstream callers that no index satisfies // the given adapter result.add(Pair.of(null, adapter)); } } return result; } public ScanCallback getScanCallback() { return scanCallback == null ? DEFAULT_CALLBACK : scanCallback; } /** @param scanCallback a function called for each item discovered per the query constraints */ public void setScanCallback(final ScanCallback scanCallback) { this.scanCallback = scanCallback; } /** @return the max range decomposition to use when computing query ranges */ public Integer getMaxRangeDecomposition() { return maxRangeDecomposition; } /** * a value of null indicates to use the data store configured default * * @param maxRangeDecomposition */ public void setMaxRangeDecomposition(final Integer maxRangeDecomposition) { this.maxRangeDecomposition = maxRangeDecomposition; } /** @return Limit the number of data items to return */ public Integer getLimit() { return limit; } /** * a value <= 0 or null indicates no limits * * @param limit */ public void setLimit(Integer limit) { if ((limit == null) || (limit == 0)) { limit = -1; } this.limit = limit; } /** * @return authorizations to apply to the query in addition to the authorizations assigned to the * data store as a whole. */ public String[] getAuthorizations() { return authorizations == null ? new String[0] : authorizations; } public void setAuthorizations(final String[] authorizations) { this.authorizations = authorizations; } public double[] getTargetResolutionPerDimensionForHierarchicalIndex() { return targetResolutionPerDimensionForHierarchicalIndex; } public void setTargetResolutionPerDimensionForHierarchicalIndex( final double[] targetResolutionPerDimensionForHierarchicalIndex) { this.targetResolutionPerDimensionForHierarchicalIndex = targetResolutionPerDimensionForHierarchicalIndex; } public void setMaxResolutionSubsamplingPerDimension( final double[] maxResolutionSubsamplingPerDimension) { this.maxResolutionSubsamplingPerDimension = maxResolutionSubsamplingPerDimension; } public double[] getMaxResolutionSubsamplingPerDimension() { return maxResolutionSubsamplingPerDimension; } public Pair, Aggregation> getAggregation() { return aggregationAdapterPair; } public void setAggregation( final Aggregation aggregation, final InternalDataAdapter adapter) { aggregationAdapterPair = new ImmutablePair<>(adapter, aggregation); } /** * This will get all relevant adapter index pairs and then select the best index for each adapter * given the constraint. Currently, it determines what is best by the index which can satisfy the * most dimensions of the given constraint. * */ public List>>> getBestQueryIndices( final PersistentAdapterStore adapterStore, final AdapterIndexMappingStore adapterIndexMappingStore, final IndexStore indexStore, final DataStatisticsStore statisticsStore, final QueryConstraints query) { if (query instanceof OptimalExpressionQuery) { return ((OptimalExpressionQuery) query).determineBestIndices( this, getAdaptersArray(adapterStore), adapterIndexMappingStore, indexStore, statisticsStore); } return BaseDataStoreUtils.chooseBestIndex( BaseDataStoreUtils.combineByIndex( compileIndicesForAdapters(adapterStore, adapterIndexMappingStore, indexStore, true)), query, adapterIndexMappingStore); } public boolean isAllIndices() { return indexName == null; } /** * @return a paring of fieldIds and their associated data adapter >>>>>>> wip: bitmask approach */ public Pair> getFieldIdsAdapterPair() { return fieldIdsAdapterPair; } public short[] getValidAdapterIds( final InternalAdapterStore adapterStore, final AdapterIndexMappingStore adapterIndexMappingStore) throws IOException { // Grab the list of adapter ids, either from the query (if included), // Or the whole list from the adapter store... final short[] adapterIds = getAdapterIds(adapterStore); // Then for each adapter, verify that it exists in the index-adapter // mapping final List validIds = new ArrayList<>(); for (final short adapterId : adapterIds) { final AdapterToIndexMapping mapping = adapterIndexMappingStore.getMapping(adapterId, indexName); if (mapping != null) { validIds.add(adapterId); } } return ArrayUtils.toPrimitive(validIds.toArray(new Short[0])); } public short[] getAdapterIds(final InternalAdapterStore adapterStore) { if ((adapterIds == null) || (adapterIds.length == 0)) { return adapterStore.getAdapterIds(); } else { return adapterIds; } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/base/BaseRowPrefixQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.base; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import org.locationtech.geowave.core.index.ByteArrayRange; import org.locationtech.geowave.core.index.QueryRanges; import org.locationtech.geowave.core.index.SinglePartitionQueryRanges; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.base.dataidx.DataIndexRetrieval; import org.locationtech.geowave.core.store.callback.ScanCallback; import org.locationtech.geowave.core.store.statistics.index.DifferingVisibilityCountStatistic.DifferingVisibilityCountValue; import org.locationtech.geowave.core.store.statistics.index.FieldVisibilityCountStatistic.FieldVisibilityCountValue; /** Represents a query operation using a row prefix. */ class BaseRowPrefixQuery extends AbstractBaseRowQuery { final QueryRanges queryRanges; public BaseRowPrefixQuery( final Index index, final byte[] partitionKey, final byte[] sortKeyPrefix, final ScanCallback scanCallback, final DifferingVisibilityCountValue differingVisibilityCounts, final FieldVisibilityCountValue visibilityCounts, final DataIndexRetrieval dataIndexRetrieval, final String[] authorizations) { super( index, authorizations, scanCallback, differingVisibilityCounts, visibilityCounts, dataIndexRetrieval); final ByteArrayRange sortKeyPrefixRange = new ByteArrayRange(sortKeyPrefix, sortKeyPrefix, false); final List ranges = new ArrayList<>(); final Collection sortKeys = Collections.singleton(sortKeyPrefixRange); ranges.add(new SinglePartitionQueryRanges(partitionKey, sortKeys)); queryRanges = new QueryRanges(ranges); } @Override protected QueryRanges getRanges( final int maxRangeDecomposition, final double[] targetResolutionPerDimensionForHierarchicalIndex) { return queryRanges; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/base/CastIterator.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.base; import java.util.Iterator; import org.locationtech.geowave.core.store.CloseableIterator; class CastIterator implements Iterator> { final Iterator> it; public CastIterator(final Iterator> it) { this.it = it; } @Override public boolean hasNext() { return it.hasNext(); } @Override public CloseableIterator next() { return (CloseableIterator) it.next(); } @Override public void remove() { it.remove(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/base/DataStoreCallbackManager.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.base; import java.io.Closeable; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.callback.DeleteCallback; import org.locationtech.geowave.core.store.callback.DeleteCallbackList; import org.locationtech.geowave.core.store.callback.IngestCallback; import org.locationtech.geowave.core.store.callback.IngestCallbackList; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; public class DataStoreCallbackManager { private final DataStatisticsStore statsStore; private boolean persistStats = true; private final boolean captureAdapterStats; final Map> icache = new HashMap<>(); final Map> dcache = new HashMap<>(); public DataStoreCallbackManager( final DataStatisticsStore statsStore, final boolean captureAdapterStats) { this.statsStore = statsStore; this.captureAdapterStats = captureAdapterStats; } public IngestCallback getIngestCallback( final InternalDataAdapter writableAdapter, final AdapterToIndexMapping indexMapping, final Index index) { if (!icache.containsKey(writableAdapter.getAdapterId())) { final List> callbackList = new ArrayList<>(); if (persistStats) { callbackList.add( statsStore.createUpdateCallback( index, indexMapping, writableAdapter, captureAdapterStats)); } icache.put(writableAdapter.getAdapterId(), new IngestCallbackList<>(callbackList)); } return (IngestCallback) icache.get(writableAdapter.getAdapterId()); } public void setPersistStats(final boolean persistStats) { this.persistStats = persistStats; } public DeleteCallback getDeleteCallback( final InternalDataAdapter writableAdapter, final AdapterToIndexMapping indexMapping, final Index index) { if (!dcache.containsKey(writableAdapter.getAdapterId())) { final List> callbackList = new ArrayList<>(); if (persistStats) { callbackList.add( statsStore.createUpdateCallback( index, indexMapping, writableAdapter, captureAdapterStats)); } dcache.put(writableAdapter.getAdapterId(), new DeleteCallbackList<>(callbackList)); } return (DeleteCallback) dcache.get(writableAdapter.getAdapterId()); } public void close() throws IOException { for (final IngestCallback callback : icache.values()) { if (callback instanceof Closeable) { ((Closeable) callback).close(); } } for (final DeleteCallback callback : dcache.values()) { if (callback instanceof Closeable) { ((Closeable) callback).close(); } } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/base/GeoWaveValueStore.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.base; import org.locationtech.geowave.core.store.entities.GeoWaveValue; public interface GeoWaveValueStore { public GeoWaveValue[] getValue(byte[] dataId); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/base/IntermediaryReadEntryInfo.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.base; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils; class IntermediaryReadEntryInfo { private final boolean decodeRow; private final Index index; private InternalDataAdapter dataAdapter; private AdapterToIndexMapping indexMapping; private boolean adapterVerified; public IntermediaryReadEntryInfo(final Index index, final boolean decodeRow) { this.index = index; this.decodeRow = decodeRow; } public Index getIndex() { return index; } public boolean isDecodeRow() { return decodeRow; } // Adapter is set either by the user or from the data // If null, expect it from data, so no verify needed public boolean setDataAdapter(final InternalDataAdapter dataAdapter, final boolean fromData) { this.dataAdapter = dataAdapter; this.adapterVerified = fromData ? true : (dataAdapter == null); return hasDataAdapter(); } public boolean setIndexMapping(final AdapterToIndexMapping indexMapping) { this.indexMapping = indexMapping; return hasIndexMapping(); } public boolean verifyAdapter(final short internalAdapterId) { if ((this.dataAdapter == null) || (internalAdapterId == 0)) { return false; } this.adapterVerified = (internalAdapterId == dataAdapter.getAdapterId()) ? true : false; return this.adapterVerified; } public boolean setOrRetrieveAdapter( final InternalDataAdapter adapter, final short internalAdapterId, final PersistentAdapterStore adapterStore) { // Verify the current data adapter if (setDataAdapter(adapter, false)) { return true; } // Can't retrieve an adapter without the store if (adapterStore == null) { return false; } // Try to retrieve the adapter from the store if (setDataAdapter((InternalDataAdapter) adapterStore.getAdapter(internalAdapterId), true)) { return true; } // No adapter set or retrieved return false; } public boolean setOrRetrieveIndexMapping( final AdapterToIndexMapping indexMapping, final short adapterId, final AdapterIndexMappingStore mappingStore) { if (setIndexMapping(indexMapping)) { return true; } if (mappingStore == null) { return false; } if (setIndexMapping(mappingStore.getMapping(adapterId, index.getName()))) { return true; } return false; } public boolean isAdapterVerified() { return this.adapterVerified; } public boolean adapterSupportsDataIndex() { return DataIndexUtils.adapterSupportsDataIndex(getDataAdapter()); } public boolean hasDataAdapter() { return this.dataAdapter != null; } public boolean hasIndexMapping() { return this.indexMapping != null; } public InternalDataAdapter getDataAdapter() { return dataAdapter; } public AdapterToIndexMapping getIndexMapping() { return indexMapping; } public String getTypeName() { if (dataAdapter != null) { return dataAdapter.getTypeName(); } return null; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/base/IntermediaryWriteEntryInfo.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.base; import java.util.Arrays; import java.util.function.IntFunction; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.store.entities.GeoWaveKey; import org.locationtech.geowave.core.store.entities.GeoWaveKeyImpl; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.entities.GeoWaveRowImpl; import org.locationtech.geowave.core.store.entities.GeoWaveValue; /** * There is a single intermediate row per original entry passed into a write operation. This offers * a higher level abstraction from the raw key-value pairs in geowave (can be multiple per original * entry). A datastore is responsible for translating from this intermediary representation of rows * to key-value rows. */ class IntermediaryWriteEntryInfo { public static class FieldInfo { private final String fieldName; private final String visibility; private final byte[] writtenValue; public FieldInfo(final String fieldName, final byte[] writtenValue, final String visibility) { this.fieldName = fieldName; this.writtenValue = writtenValue; this.visibility = visibility == null ? "" : visibility; } public String getFieldId() { return fieldName; } public byte[] getWrittenValue() { return writtenValue; } public String getVisibility() { return visibility; } } private final byte[] dataId; private final short internalAdapterId; private final InsertionIds insertionIds; private final GeoWaveValue[] entryValues; public IntermediaryWriteEntryInfo( final byte[] dataId, final short internalAdapterId, final InsertionIds insertionIds, final GeoWaveValue[] entryValues) { this.dataId = dataId; this.internalAdapterId = internalAdapterId; this.insertionIds = insertionIds; this.entryValues = entryValues; } @Override public String toString() { return new ByteArray(dataId).getString(); } public short getInternalAdapterId() { return internalAdapterId; } public InsertionIds getInsertionIds() { return insertionIds; } public boolean isDataIdIndex() { return insertionIds == null; } public byte[] getDataId() { return dataId; } public GeoWaveValue[] getValues() { return entryValues; } public GeoWaveRow[] getRows() { if (isDataIdIndex()) { return new GeoWaveRow[] { // intentionally make the data ID as the sort Key and the data ID empty new GeoWaveRowImpl( new GeoWaveKeyImpl(dataId, internalAdapterId, new byte[0], new byte[0], 0), entryValues)}; } final GeoWaveKey[] keys = GeoWaveKeyImpl.createKeys(insertionIds, dataId, internalAdapterId); return Arrays.stream(keys).map(k -> new GeoWaveRowImpl(k, entryValues)).toArray( new ArrayGenerator()); } private static class ArrayGenerator implements IntFunction { @Override public GeoWaveRow[] apply(final int value) { return new GeoWaveRow[value]; } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/base/dataidx/BatchDataIndexRetrieval.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.base.dataidx; import java.util.concurrent.CompletableFuture; import org.locationtech.geowave.core.store.entities.GeoWaveValue; public interface BatchDataIndexRetrieval extends DataIndexRetrieval { CompletableFuture getDataAsync(short adapterId, byte[] dataId); void flush(); void notifyIteratorInitiated(); void notifyIteratorExhausted(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/base/dataidx/BatchDataIndexRetrievalIteratorHelper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.base.dataidx; import java.util.concurrent.BlockingQueue; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class BatchDataIndexRetrievalIteratorHelper { private static final Logger LOGGER = LoggerFactory.getLogger(BatchDataIndexRetrievalIteratorHelper.class); private static final NoOp NO_OP = new NoOp(); private static final int MAX_COMPLETED_OBJECT_CAPACITY = 1000000; private final BlockingQueue completedObjects = new LinkedBlockingDeque<>(MAX_COMPLETED_OBJECT_CAPACITY); private final AtomicInteger outstandingFutures = new AtomicInteger(0); private static final Object POISON = new Object(); private final AtomicBoolean scannedResultsExhausted = new AtomicBoolean(false); private final AtomicBoolean scannedResultsStarted = new AtomicBoolean(false); private final BatchDataIndexRetrieval dataIndexRetrieval; public BatchDataIndexRetrievalIteratorHelper(final BatchDataIndexRetrieval dataIndexRetrieval) { this.dataIndexRetrieval = dataIndexRetrieval; } public void preHasNext() { if (!scannedResultsStarted.getAndSet(true)) { dataIndexRetrieval.notifyIteratorInitiated(); } } public V postDecodeRow(final V decodedRow) { return postDecodeRow(decodedRow, (Function) NO_OP); } public V postDecodeRow(final V decodedRow, final Function f) { if (decodedRow instanceof CompletableFuture) { if (((CompletableFuture) decodedRow).isDone()) { try { return (V) ((CompletableFuture) decodedRow).get(); } catch (InterruptedException | ExecutionException e) { LOGGER.warn("unable to get results", e); } } else { outstandingFutures.incrementAndGet(); ((CompletableFuture) decodedRow).whenComplete((decodedValue, exception) -> { if (decodedValue != null) { try { completedObjects.put(f.apply((V) decodedValue)); } catch (final InterruptedException e) { LOGGER.error("Unable to put value in blocking queue", e); } } else if (exception != null) { LOGGER.error("Error decoding row", exception); scannedResultsExhausted.set(true); dataIndexRetrieval.notifyIteratorExhausted(); } if ((outstandingFutures.decrementAndGet() == 0) && scannedResultsExhausted.get()) { try { completedObjects.put(POISON); } catch (final InterruptedException e) { LOGGER.error("Unable to put poison in blocking queue", e); } } }); } return null; } return decodedRow; } public O postFindNext(final boolean hasNextValue, final boolean hasNextScannedResult) { if (!hasNextScannedResult && !scannedResultsExhausted.getAndSet(true)) { dataIndexRetrieval.notifyIteratorExhausted(); } O retVal = null; if (!hasNextValue && ((outstandingFutures.get() > 0) || !completedObjects.isEmpty())) { try { final Object completedObj = completedObjects.take(); if (completedObj == POISON) { retVal = null; } else { retVal = (O) completedObj; } } catch (final InterruptedException e) { LOGGER.error("Unable to take value from blocking queue", e); } } return retVal; } private static class NoOp implements Function { @Override public Object apply(final Object t) { return t; } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/base/dataidx/BatchIndexRetrievalImpl.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.base.dataidx; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.CompletableFuture; import java.util.concurrent.atomic.AtomicInteger; import org.apache.commons.lang3.tuple.Pair; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.CloseableIteratorWrapper; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.entities.GeoWaveValue; import org.locationtech.geowave.core.store.operations.DataStoreOperations; import org.locationtech.geowave.core.store.operations.RowReader; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Iterators; public class BatchIndexRetrievalImpl implements BatchDataIndexRetrieval { private static final Logger LOGGER = LoggerFactory.getLogger(BatchIndexRetrievalImpl.class); private final int batchSize; private final Map>> currentBatchesPerAdapter = new HashMap<>(); private final DataStoreOperations operations; private final PersistentAdapterStore adapterStore; private final AdapterIndexMappingStore mappingStore; private final InternalAdapterStore internalAdapterStore; private final Pair> fieldSubsets; private final Pair, Aggregation> aggregation; private final String[] additionalAuthorizations; private final AtomicInteger outstandingIterators = new AtomicInteger(0); public BatchIndexRetrievalImpl( final DataStoreOperations operations, final PersistentAdapterStore adapterStore, final AdapterIndexMappingStore mappingStore, final InternalAdapterStore internalAdapterStore, final Pair> fieldSubsets, final Pair, Aggregation> aggregation, final String[] additionalAuthorizations, final int batchSize) { this.operations = operations; this.adapterStore = adapterStore; this.mappingStore = mappingStore; this.internalAdapterStore = internalAdapterStore; this.fieldSubsets = fieldSubsets; this.aggregation = aggregation; this.additionalAuthorizations = additionalAuthorizations; this.batchSize = batchSize; } @Override public GeoWaveValue[] getData(final short adapterId, final byte[] dataId) { try (CloseableIterator it = getData(adapterId, new byte[][] {dataId})) { if (it.hasNext()) { return it.next(); } } return null; } private CloseableIterator getData(final short adapterId, final byte[][] dataIds) { final RowReader rowReader = DataIndexUtils.getRowReader( operations, adapterStore, mappingStore, internalAdapterStore, fieldSubsets, aggregation, additionalAuthorizations, adapterId, dataIds); return new CloseableIteratorWrapper<>( rowReader, Iterators.transform(rowReader, r -> r.getFieldValues())); } @Override public synchronized CompletableFuture getDataAsync( final short adapterId, final byte[] dataId) { Map> batch = currentBatchesPerAdapter.get(adapterId); if (batch == null) { batch = new HashMap<>(); currentBatchesPerAdapter.put(adapterId, batch); } final ByteArray dataIdKey = new ByteArray(dataId); CompletableFuture retVal = batch.get(dataIdKey); if (retVal == null) { retVal = new CompletableFuture<>(); retVal = retVal.exceptionally(e -> { LOGGER.error("Unable to retrieve from data index", e); return null; }); batch.put(dataIdKey, retVal); if (batch.size() >= batchSize) { flush(adapterId, batch); } } return retVal; } private void flush( final Short adapterId, final Map> batch) { final byte[][] internalDataIds; final CompletableFuture[] internalSuppliers; internalDataIds = new byte[batch.size()][]; internalSuppliers = new CompletableFuture[batch.size()]; final Iterator>> it = batch.entrySet().iterator(); for (int i = 0; i < internalDataIds.length; i++) { final Entry> entry = it.next(); internalDataIds[i] = entry.getKey().getBytes(); internalSuppliers[i] = entry.getValue(); } batch.clear(); if (internalSuppliers.length > 0) { CompletableFuture.supplyAsync(() -> getData(adapterId, internalDataIds)).whenComplete( (values, ex) -> { if (values != null) { try { int i = 0; while (values.hasNext() && (i < internalSuppliers.length)) { // the iterator has to be in order internalSuppliers[i++].complete(values.next()); } if (values.hasNext()) { LOGGER.warn("There are more data index results than expected"); } else if (i < internalSuppliers.length) { LOGGER.warn("There are less data index results than expected"); while (i < internalSuppliers.length) { // there should be exactly as many results as suppliers so this shouldn't happen internalSuppliers[i++].complete(null); } } } finally { values.close(); } } else if (ex != null) { LOGGER.warn("Unable to retrieve from data index", ex); Arrays.stream(internalSuppliers).forEach(s -> s.completeExceptionally(ex)); } }); } } @Override public synchronized void flush() { if (!currentBatchesPerAdapter.isEmpty()) { currentBatchesPerAdapter.forEach((k, v) -> flush(k, v)); } } @Override public void notifyIteratorInitiated() { outstandingIterators.incrementAndGet(); } @Override public void notifyIteratorExhausted() { if (outstandingIterators.decrementAndGet() <= 0) { flush(); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/base/dataidx/DataIndexRetrieval.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.base.dataidx; import org.locationtech.geowave.core.store.entities.GeoWaveValue; public interface DataIndexRetrieval { GeoWaveValue[] getData(short adapterId, byte[] dataId); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/base/dataidx/DataIndexRetrievalImpl.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.base.dataidx; import org.apache.commons.lang3.tuple.Pair; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.entities.GeoWaveValue; import org.locationtech.geowave.core.store.operations.DataStoreOperations; public class DataIndexRetrievalImpl implements DataIndexRetrieval { private final DataStoreOperations operations; private final PersistentAdapterStore adapterStore; private final AdapterIndexMappingStore mappingStore; private final InternalAdapterStore internalAdapterStore; private final Pair> fieldSubsets; private final Pair, Aggregation> aggregation; private final String[] additionalAuthorizations; public DataIndexRetrievalImpl( final DataStoreOperations operations, final PersistentAdapterStore adapterStore, final AdapterIndexMappingStore mappingStore, final InternalAdapterStore internalAdapterStore, final Pair> fieldSubsets, final Pair, Aggregation> aggregation, final String[] additionalAuthorizations) { this.operations = operations; this.adapterStore = adapterStore; this.mappingStore = mappingStore; this.internalAdapterStore = internalAdapterStore; this.fieldSubsets = fieldSubsets; this.aggregation = aggregation; this.additionalAuthorizations = additionalAuthorizations; } @Override public GeoWaveValue[] getData(final short adapterId, final byte[] dataId) { return DataIndexUtils.getFieldValuesFromDataIdIndex( operations, adapterStore, mappingStore, internalAdapterStore, fieldSubsets, aggregation, additionalAuthorizations, adapterId, dataId); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/base/dataidx/DataIndexUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.base.dataidx; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import org.apache.commons.lang3.tuple.Pair; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.base.BaseDataStoreUtils; import org.locationtech.geowave.core.store.callback.ScanCallback; import org.locationtech.geowave.core.store.entities.GeoWaveKeyImpl; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.entities.GeoWaveRowImpl; import org.locationtech.geowave.core.store.entities.GeoWaveValue; import org.locationtech.geowave.core.store.entities.GeoWaveValueImpl; import org.locationtech.geowave.core.store.index.NullIndex; import org.locationtech.geowave.core.store.operations.DataIndexReaderParams; import org.locationtech.geowave.core.store.operations.DataIndexReaderParamsBuilder; import org.locationtech.geowave.core.store.operations.DataStoreOperations; import org.locationtech.geowave.core.store.operations.RowReader; import org.locationtech.geowave.core.store.util.NativeEntryIteratorWrapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.primitives.Bytes; public class DataIndexUtils { private static final Logger LOGGER = LoggerFactory.getLogger(DataIndexUtils.class); public static final Index DATA_ID_INDEX = new NullIndex("DATA"); public static boolean isDataIndex(final String indexName) { return DATA_ID_INDEX.getName().equals(indexName); } public static GeoWaveValue deserializeDataIndexValue( final byte[] serializedValue, final byte[] visibility) { return deserializeDataIndexValue(serializedValue, visibility, false); } public static GeoWaveValue deserializeDataIndexValue( final byte[] serializedValue, final boolean visibilityEnabled) { return deserializeDataIndexValue(serializedValue, null, visibilityEnabled); } public static GeoWaveValue deserializeDataIndexValue( final byte[] serializedValue, final byte[] visibilityInput, final boolean visibilityEnabled) { final ByteBuffer buf = ByteBuffer.wrap(serializedValue); int lengthBytes = 1; final byte[] fieldMask = new byte[serializedValue[serializedValue.length - 1]]; buf.get(fieldMask); final byte[] visibility; if (visibilityInput != null) { visibility = visibilityInput; } else if (visibilityEnabled) { lengthBytes++; visibility = new byte[serializedValue[serializedValue.length - 2]]; buf.get(visibility); } else { visibility = new byte[0]; } final byte[] value = new byte[buf.remaining() - lengthBytes]; buf.get(value); return new GeoWaveValueImpl(fieldMask, visibility, value); } public static boolean adapterSupportsDataIndex(final DataTypeAdapter adapter) { // currently row merging is not supported by the data index return !BaseDataStoreUtils.isRowMerging(adapter); } public static GeoWaveRow deserializeDataIndexRow( final byte[] dataId, final short adapterId, final byte[] serializedValue, final byte[] serializedVisibility) { return new GeoWaveRowImpl( new GeoWaveKeyImpl(dataId, adapterId, new byte[0], new byte[0], 0), new GeoWaveValue[] {deserializeDataIndexValue(serializedValue, serializedVisibility)}); } public static GeoWaveRow deserializeDataIndexRow( final byte[] dataId, final short adapterId, final byte[] serializedValue, final boolean visibilityEnabled) { return new GeoWaveRowImpl( new GeoWaveKeyImpl(dataId, adapterId, new byte[0], new byte[0], 0), new GeoWaveValue[] {deserializeDataIndexValue(serializedValue, visibilityEnabled)}); } public static byte[] serializeDataIndexValue( final GeoWaveValue value, final boolean visibilityEnabled) { if (visibilityEnabled) { return Bytes.concat( value.getFieldMask(), value.getVisibility(), value.getValue(), new byte[] {(byte) value.getVisibility().length, (byte) value.getFieldMask().length}); } else { return Bytes.concat( value.getFieldMask(), value.getValue(), new byte[] {(byte) value.getFieldMask().length}); } } public static DataIndexRetrieval getDataIndexRetrieval( final DataStoreOperations operations, final PersistentAdapterStore adapterStore, final AdapterIndexMappingStore mappingStore, final InternalAdapterStore internalAdapterStore, final Index index, final Pair> fieldSubsets, final Pair, Aggregation> aggregation, final String[] additionalAuthorizations, final int dataIndexBatchSize) { if ((dataIndexBatchSize > 0) && !isDataIndex(index.getName())) { // this implies that this index merely contains a reference by data ID and a second lookup // must be done if (dataIndexBatchSize > 1) { return new BatchIndexRetrievalImpl( operations, adapterStore, mappingStore, internalAdapterStore, fieldSubsets, aggregation, additionalAuthorizations, dataIndexBatchSize); } return new DataIndexRetrievalImpl( operations, adapterStore, mappingStore, internalAdapterStore, fieldSubsets, aggregation, additionalAuthorizations); } return null; } protected static GeoWaveValue[] getFieldValuesFromDataIdIndex( final DataStoreOperations operations, final PersistentAdapterStore adapterStore, final AdapterIndexMappingStore mappingStore, final InternalAdapterStore internalAdapterStore, final Pair> fieldSubsets, final Pair, Aggregation> aggregation, final String[] additionalAuthorizations, final Short adapterId, final byte[] dataId) { try (final RowReader reader = getRowReader( operations, adapterStore, mappingStore, internalAdapterStore, fieldSubsets, aggregation, additionalAuthorizations, adapterId, dataId)) { if (reader.hasNext()) { return reader.next().getFieldValues(); } else { LOGGER.warn( "Unable to find data ID '" + StringUtils.stringFromBinary(dataId) + " (hex:" + ByteArrayUtils.getHexString(dataId) + ")' with adapter ID " + adapterId + " in data table"); } } catch (final Exception e) { LOGGER.warn("Unable to close reader", e); } return null; } public static void delete( final DataStoreOperations operations, final PersistentAdapterStore adapterStore, final AdapterIndexMappingStore mappingStore, final InternalAdapterStore internalAdapterStore, final Pair> fieldSubsets, final Pair, Aggregation> aggregation, final String[] additionalAuthorizations, final ScanCallback scanCallback, final short adapterId, final byte[]... dataIds) { final DataIndexReaderParams readerParams = new DataIndexReaderParamsBuilder<>( adapterStore, mappingStore, internalAdapterStore).additionalAuthorizations( additionalAuthorizations).isAuthorizationsLimiting(false).adapterId( adapterId).dataIds(dataIds).fieldSubsets(fieldSubsets).aggregation( aggregation).build(); if (scanCallback != null) { // we need to read first to support scan callbacks and then delete (we might consider changing // the interface on base operations delete with DataIndexReaderParams to allow for a scan // callback but for now we can explicitly read before deleting) try (RowReader rowReader = operations.createReader(readerParams)) { final NativeEntryIteratorWrapper scanCallBackIterator = new NativeEntryIteratorWrapper( adapterStore, mappingStore, DataIndexUtils.DATA_ID_INDEX, rowReader, null, scanCallback, BaseDataStoreUtils.getFieldBitmask(fieldSubsets, DataIndexUtils.DATA_ID_INDEX), null, !BaseDataStoreUtils.isCommonIndexAggregation(aggregation), null); // just drain the iterator so the scan callback is properly exercised scanCallBackIterator.forEachRemaining(it -> { }); } } operations.delete(readerParams); } public static void delete( final DataStoreOperations operations, final PersistentAdapterStore adapterStore, final AdapterIndexMappingStore mappingStore, final InternalAdapterStore internalAdapterStore, final Pair> fieldSubsets, final Pair, Aggregation> aggregation, final String[] additionalAuthorizations, final ScanCallback scanCallback, final short adapterId, final byte[] startDataId, final byte[] endDataId) { // TODO within the datastores delete by range is not supported (the deletion logic expect Data // IDs to be non-null within reader params and deletions don't have logic for handling ranges // for now, although less efficient, let's query by prefix and then delete by the returned IDs final DataIndexReaderParams readerParams = new DataIndexReaderParamsBuilder<>( adapterStore, mappingStore, internalAdapterStore).additionalAuthorizations( additionalAuthorizations).isAuthorizationsLimiting(false).adapterId( adapterId).dataIdsByRange(startDataId, endDataId).fieldSubsets( fieldSubsets).aggregation(aggregation).build(); final List dataIds = new ArrayList<>(); try (RowReader reader = operations.createReader(readerParams)) { while (reader.hasNext()) { dataIds.add(reader.next().getDataId()); } } delete( operations, adapterStore, mappingStore, internalAdapterStore, fieldSubsets, aggregation, additionalAuthorizations, scanCallback, adapterId, dataIds.toArray(new byte[dataIds.size()][])); } public static RowReader getRowReader( final DataStoreOperations operations, final PersistentAdapterStore adapterStore, final AdapterIndexMappingStore mappingStore, final InternalAdapterStore internalAdapterStore, final Pair> fieldSubsets, final Pair, Aggregation> aggregation, final String[] additionalAuthorizations, final short adapterId, final byte[]... dataIds) { final DataIndexReaderParams readerParams = new DataIndexReaderParamsBuilder<>( adapterStore, mappingStore, internalAdapterStore).additionalAuthorizations( additionalAuthorizations).isAuthorizationsLimiting(false).adapterId( adapterId).dataIds(dataIds).fieldSubsets(fieldSubsets).aggregation( aggregation).build(); return operations.createReader(readerParams); } public static RowReader getRowReader( final DataStoreOperations operations, final PersistentAdapterStore adapterStore, final AdapterIndexMappingStore mappingStore, final InternalAdapterStore internalAdapterStore, final Pair> fieldSubsets, final Pair, Aggregation> aggregation, final String[] additionalAuthorizations, final short adapterId, final byte[] startDataId, final byte[] endDataId, final boolean reverse) { final DataIndexReaderParams readerParams = new DataIndexReaderParamsBuilder<>( adapterStore, mappingStore, internalAdapterStore).additionalAuthorizations( additionalAuthorizations).isAuthorizationsLimiting(false).adapterId( adapterId).dataIdsByRange(startDataId, endDataId, reverse).fieldSubsets( fieldSubsets).aggregation(aggregation).build(); return operations.createReader(readerParams); } public static RowReader getRowReader( final DataStoreOperations operations, final PersistentAdapterStore adapterStore, final AdapterIndexMappingStore mappingStore, final InternalAdapterStore internalAdapterStore, final Pair> fieldSubsets, final Pair, Aggregation> aggregation, final String[] additionalAuthorizations, final short adapterId) { final DataIndexReaderParams readerParams = new DataIndexReaderParamsBuilder<>( adapterStore, mappingStore, internalAdapterStore).additionalAuthorizations( additionalAuthorizations).isAuthorizationsLimiting(false).adapterId( adapterId).fieldSubsets(fieldSubsets).aggregation(aggregation).build(); return operations.createReader(readerParams); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/base/dataidx/DefaultDataIndexRowDeleterWrapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.base.dataidx; import org.locationtech.geowave.core.store.base.dataidx.DefaultDataIndexRowWriterWrapper.GeoWaveRowWrapper; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.operations.RowDeleter; public class DefaultDataIndexRowDeleterWrapper implements RowDeleter { private final RowDeleter delegateDeleter; public DefaultDataIndexRowDeleterWrapper(final RowDeleter delegateDeleter) { this.delegateDeleter = delegateDeleter; } @Override public void delete(final GeoWaveRow row) { delegateDeleter.delete(new GeoWaveRowWrapper(row)); } @Override public void flush() { delegateDeleter.flush(); } @Override public void close() { delegateDeleter.close(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/base/dataidx/DefaultDataIndexRowWriterWrapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.base.dataidx; import java.util.Arrays; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.entities.GeoWaveValue; import org.locationtech.geowave.core.store.operations.RowWriter; import com.google.common.primitives.Bytes; public class DefaultDataIndexRowWriterWrapper implements RowWriter { private final RowWriter delegateWriter; public DefaultDataIndexRowWriterWrapper(final RowWriter delegateWriter) { this.delegateWriter = delegateWriter; } @Override public void close() throws Exception { delegateWriter.close(); } @Override public void write(final GeoWaveRow[] rows) { Arrays.stream(rows).forEach(r -> delegateWriter.write(new GeoWaveRowWrapper(r))); } @Override public void write(final GeoWaveRow row) { delegateWriter.write(row); } @Override public void flush() { delegateWriter.flush(); } protected static class GeoWaveRowWrapper implements GeoWaveRow { private final GeoWaveRow row; protected GeoWaveRowWrapper(final GeoWaveRow row) { this.row = row; } @Override public GeoWaveValue[] getFieldValues() { return row.getFieldValues(); } @Override public byte[] getDataId() { return row.getDataId(); } @Override public short getAdapterId() { return row.getAdapterId(); } @Override public byte[] getSortKey() { final byte[] sortKey = row.getDataId(); return Bytes.concat(new byte[] {(byte) sortKey.length}, sortKey); } @Override public byte[] getPartitionKey() { return row.getPartitionKey(); } @Override public int getNumberOfDuplicates() { return row.getNumberOfDuplicates(); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/callback/DeleteCallback.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.callback; import org.locationtech.geowave.core.store.entities.GeoWaveRow; /** * This interface provides a callback mechanism when deleting a collection of entries. * * @param A generic type for entries * @param A generic type for rows */ public interface DeleteCallback { /** * This will be called after an entry is successfully deleted with the row IDs that were used * * @param entry the entry that was deleted * @param rows the raw rows that were deleted */ public void entryDeleted(final T entry, final R... rows); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/callback/DeleteCallbackList.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.callback; import java.io.Closeable; import java.io.IOException; import java.util.List; import org.locationtech.geowave.core.store.entities.GeoWaveRow; public class DeleteCallbackList implements DeleteCallback, Closeable { private final List> callbacks; public DeleteCallbackList(final List> callbacks) { this.callbacks = callbacks; } public void addCallback(final DeleteCallback c) { this.callbacks.add(c); } @Override public void entryDeleted(final T entry, final R... rows) { for (final DeleteCallback callback : callbacks) { callback.entryDeleted(entry, rows); } } @Override public void close() throws IOException { for (final DeleteCallback callback : callbacks) { if (callback instanceof Closeable) { ((Closeable) callback).close(); } } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/callback/DeleteOtherIndicesCallback.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.callback; import java.io.Closeable; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.SinglePartitionInsertionIds; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.entities.GeoWaveKeyImpl; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.entities.GeoWaveRowImpl; import org.locationtech.geowave.core.store.operations.DataStoreOperations; import org.locationtech.geowave.core.store.operations.RowDeleter; import org.locationtech.geowave.core.store.util.DataStoreUtils; import com.github.benmanes.caffeine.cache.CacheLoader; import com.github.benmanes.caffeine.cache.Caffeine; import com.github.benmanes.caffeine.cache.LoadingCache; public class DeleteOtherIndicesCallback implements DeleteCallback, Closeable { private final DataStoreOperations dataStoreOperations; private final InternalDataAdapter adapter; private final List indices; private final Map indexMappings; private final PersistentAdapterStore adapterStore; private final InternalAdapterStore internalAdapterStore; private final String[] authorizations; private final LoadingCache rowDeleters = Caffeine.newBuilder().build(new CacheLoader() { @Override public RowDeleter load(final String indexName) throws Exception { return dataStoreOperations.createRowDeleter( indexName, adapterStore, internalAdapterStore, authorizations); } }); public DeleteOtherIndicesCallback( final DataStoreOperations dataStoreOperations, final InternalDataAdapter adapter, final List indices, final PersistentAdapterStore adapterStore, final AdapterIndexMappingStore mappingStore, final InternalAdapterStore internalAdapterStore, final String... authorizations) { this.adapter = adapter; this.indices = indices; this.indexMappings = indices.stream().map( index -> mappingStore.getMapping(adapter.getAdapterId(), index.getName())).collect( Collectors.toMap(AdapterToIndexMapping::getIndexName, mapping -> mapping)); this.dataStoreOperations = dataStoreOperations; this.adapterStore = adapterStore; this.internalAdapterStore = internalAdapterStore; this.authorizations = authorizations; } @Override public void close() throws IOException { rowDeleters.asMap().values().forEach(d -> d.close()); rowDeleters.invalidateAll(); } @Override public void entryDeleted(final T entry, final GeoWaveRow... rows) { if (rows.length > 0) { for (final Index index : indices) { final InsertionIds ids = DataStoreUtils.getInsertionIdsForEntry( entry, adapter, indexMappings.get(index.getName()), index); for (final SinglePartitionInsertionIds partitionId : ids.getPartitionKeys()) { for (final byte[] sortKey : partitionId.getSortKeys()) { rowDeleters.get(index.getName()).delete( new GeoWaveRowImpl( new GeoWaveKeyImpl( rows[0].getDataId(), adapter.getAdapterId(), partitionId.getPartitionKey(), sortKey, rows[0].getNumberOfDuplicates()), rows[0].getFieldValues())); } } } } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/callback/DuplicateDeletionCallback.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.callback; import java.io.Closeable; import java.io.IOException; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.SinglePartitionInsertionIds; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.Query; import org.locationtech.geowave.core.store.api.QueryBuilder; import org.locationtech.geowave.core.store.base.BaseDataStore; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.query.constraints.InsertionIdQuery; import org.locationtech.geowave.core.store.util.DataStoreUtils; /** This callback finds the duplicates for each scanned entry, and deletes them by insertion ID */ public class DuplicateDeletionCallback implements DeleteCallback, Closeable { private final BaseDataStore dataStore; private final InternalDataAdapter adapter; private final Index index; private final AdapterToIndexMapping indexMapping; private final Map> insertionIdsNotYetDeletedByDataId; private boolean closed = false; public DuplicateDeletionCallback( final BaseDataStore store, final InternalDataAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index) { this.adapter = adapter; this.index = index; this.indexMapping = indexMapping; dataStore = store; insertionIdsNotYetDeletedByDataId = new HashMap<>(); } @Override public synchronized void close() throws IOException { if (closed) { return; } else { closed = true; } for (final Map.Entry> entry : insertionIdsNotYetDeletedByDataId.entrySet()) { for (final InsertionIdData insertionId : entry.getValue()) { final InsertionIdQuery constraint = new InsertionIdQuery( insertionId.partitionKey, insertionId.sortKey, entry.getKey().getBytes()); final Query query = (Query) QueryBuilder.newBuilder().indexName(index.getName()).addTypeName( adapter.getTypeName()).constraints(constraint).build(); // we don't want the duplicates to try to delete one another // recursively over and over so we pass false for this deletion dataStore.delete(query, false); } } } @Override public synchronized void entryDeleted(final T entry, final GeoWaveRow... rows) { closed = false; if (rows.length > 0) { if ((rows[0].getNumberOfDuplicates() > 0) && (rows.length <= rows[0].getNumberOfDuplicates())) { final ByteArray dataId = new ByteArray(rows[0].getDataId()); Set insertionIds = insertionIdsNotYetDeletedByDataId.get(dataId); if (insertionIds == null) { insertionIds = new HashSet<>(); insertionIdsNotYetDeletedByDataId.put(dataId, insertionIds); // we haven't visited this data ID yet so we need to start tracking it final InsertionIds ids = DataStoreUtils.getInsertionIdsForEntry(entry, adapter, indexMapping, index); for (final SinglePartitionInsertionIds insertId : ids.getPartitionKeys()) { for (final byte[] sortKey : insertId.getSortKeys()) { byte[] partitionKey = insertId.getPartitionKey(); insertionIds.add( new InsertionIdData( partitionKey == null ? new byte[0] : partitionKey, sortKey == null ? new byte[0] : sortKey)); } } } final Set i = insertionIds; // we need to do is remove the rows in this callback. marking them as deleted Arrays.stream(rows).forEach(row -> { byte[] partitionKey = row.getPartitionKey(); byte[] sortKey = row.getSortKey(); i.remove( new InsertionIdData( partitionKey == null ? new byte[0] : partitionKey, sortKey == null ? new byte[0] : sortKey)); }); } } } private static class InsertionIdData { public final byte[] partitionKey; public final byte[] sortKey; public InsertionIdData(final byte[] partitionKey, final byte[] sortKey) { this.partitionKey = partitionKey; this.sortKey = sortKey; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + Arrays.hashCode(partitionKey); result = (prime * result) + Arrays.hashCode(sortKey); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final InsertionIdData other = (InsertionIdData) obj; if (!Arrays.equals(partitionKey, other.partitionKey)) { return false; } if (!Arrays.equals(sortKey, other.sortKey)) { return false; } return true; } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/callback/IngestCallback.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.callback; import org.locationtech.geowave.core.store.entities.GeoWaveRow; /** * This interface provides a callback mechanism when ingesting a collection of entries to receive * the row IDs where each entry is ingested * * @param A generic type for ingested entries */ public interface IngestCallback { /** * This will be called after an entry is successfully ingested with the row IDs that were used * * @param entry the entry that was ingested * @param rows the rows inserted into the table for this entry */ public void entryIngested(T entry, GeoWaveRow... rows); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/callback/IngestCallbackList.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.callback; import java.io.Closeable; import java.io.Flushable; import java.io.IOException; import java.util.List; import org.locationtech.geowave.core.store.entities.GeoWaveRow; public class IngestCallbackList implements IngestCallback, Flushable, Closeable { private final List> callbacks; public IngestCallbackList(final List> callbacks) { this.callbacks = callbacks; } @Override public void entryIngested(final T entry, final GeoWaveRow... kvs) { for (final IngestCallback callback : callbacks) { callback.entryIngested(entry, kvs); } } @Override public void close() throws IOException { for (final IngestCallback callback : callbacks) { if (callback instanceof Closeable) { ((Closeable) callback).close(); } } } @Override public void flush() throws IOException { for (final IngestCallback callback : callbacks) { if (callback instanceof Flushable) { ((Flushable) callback).flush(); } } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/callback/ScanCallback.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.callback; import org.locationtech.geowave.core.store.entities.GeoWaveRow; /** * This interface provides a callback mechanism when scanning entries * * @param A generic type for ingested entries */ public interface ScanCallback { /** * This will be called after an entry is successfully scanned with the row IDs that were used. * Deduplication, if performed, occurs prior to calling this method. * *

Without or without de-duplication, row ids are not consolidate, thus each entry only * contains one row id. If the entry is not de-dupped, then the entry this method is called for * each duplicate, each with a different row id. * * @param entry the entry that was ingested * @param row the raw row scanned from the table for this entry */ public void entryScanned(final T entry, final R row); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/callback/ScanCallbackList.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.callback; import java.io.Closeable; import java.io.IOException; import java.util.List; import java.util.concurrent.locks.ReentrantLock; import org.locationtech.geowave.core.store.entities.GeoWaveRow; public class ScanCallbackList implements ScanCallback, Closeable { private final List> callbacks; private ReentrantLock lock; private static Object MUTEX = new Object(); public ScanCallbackList(final List> callbacks) { this.callbacks = callbacks; } public void addScanCallback(final ScanCallback callback) { callbacks.add(callback); if (lock != null) { lock.unlock(); } } public void waitUntilCallbackAdded() { // this waits until a callback is added before allowing entryScanned() // calls to proceed this.lock = new ReentrantLock(); this.lock.lock(); } @Override public void entryScanned(final T entry, final R rows) { if (lock != null) { synchronized (MUTEX) { if (lock != null) { lock.lock(); lock = null; } } } for (final ScanCallback callback : callbacks) { callback.entryScanned(entry, rows); } } @Override public void close() throws IOException { for (final ScanCallback callback : callbacks) { if (callback instanceof Closeable) { ((Closeable) callback).close(); } } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/CLIUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli; import java.io.File; import java.util.Properties; import org.locationtech.geowave.core.store.base.BaseDataStoreUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.cli.store.StoreLoader; import com.beust.jcommander.ParameterException; import com.beust.jcommander.internal.Console; public class CLIUtils { public static DataStorePluginOptions loadStore( final String storeName, final File configFile, final Console console) { final StoreLoader inputStoreLoader = new StoreLoader(storeName); if (!inputStoreLoader.loadFromConfig(configFile, console)) { throw new ParameterException("Cannot find store name: " + inputStoreLoader.getStoreName()); } final DataStorePluginOptions storeOptions = inputStoreLoader.getDataStorePlugin(); BaseDataStoreUtils.verifyCLIVersion(storeName, storeOptions); return storeOptions; } public static DataStorePluginOptions loadStore( final Properties properties, final String storeName, final File configFile, final Console console) { final StoreLoader inputStoreLoader = new StoreLoader(storeName); if (!inputStoreLoader.loadFromConfig( properties, DataStorePluginOptions.getStoreNamespace(storeName), configFile, console)) { throw new ParameterException("Cannot find store name: " + inputStoreLoader.getStoreName()); } final DataStorePluginOptions storeOptions = inputStoreLoader.getDataStorePlugin(); BaseDataStoreUtils.verifyCLIVersion(storeName, storeOptions); return storeOptions; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/VisibilityOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli; import java.io.Serializable; import java.util.List; import java.util.Map; import org.bouncycastle.util.Strings; import org.locationtech.geowave.core.store.api.VisibilityHandler; import org.locationtech.geowave.core.store.data.visibility.FallbackVisibilityHandler; import org.locationtech.geowave.core.store.data.visibility.FieldLevelVisibilityHandler; import org.locationtech.geowave.core.store.data.visibility.FieldMappedVisibilityHandler; import org.locationtech.geowave.core.store.data.visibility.GlobalVisibilityHandler; import org.locationtech.geowave.core.store.data.visibility.JsonFieldLevelVisibilityHandler; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.clearspring.analytics.util.Lists; import com.google.common.collect.Maps; public class VisibilityOptions implements Serializable { /** * */ private static final long serialVersionUID = 1L; @Parameter( names = {"-v", "--visibility"}, description = "The global visibility of the data ingested (optional; if not specified, the data will be unrestricted)") private String visibility = null; @Parameter( names = {"-fv", "--fieldVisibility"}, description = "Specify the visibility of a specific field in the format `:`. This option can be specified multiple times for different fields.") private List fieldVisibilities = Lists.newArrayList(); @Parameter( names = {"-va", "--visibilityAttribute"}, description = "Specify a field that contains visibility information for the whole row. If specified, any field visibilities defined by `-fv` will be ignored.") private String visibilityAttribute = null; @Parameter( names = {"--jsonVisibilityAttribute"}, description = "If specified, the value of the visibility field defined by `-va` will be treated as a JSON object with keys that represent fields and values that represent their visibility.") private boolean jsonVisibilityAttribute = false; public String getGlobalVisibility() { return visibility; } public void setGlobalVisibility(final String visibility) { this.visibility = visibility; } public void setFieldVisibilities(final List fieldVisibilities) { this.fieldVisibilities = fieldVisibilities; } public void addFieldVisiblity(final String fieldName, final String visibility) { fieldVisibilities.add(fieldName + ":" + visibility); } public List getFieldVisibilities() { return fieldVisibilities; } public void setVisibilityAttribute(final String visibilityAttribute) { this.visibilityAttribute = visibilityAttribute; } public String getVisibilityAttribute() { return visibilityAttribute; } public void setJsonVisibilityAttribute(final boolean jsonVisibility) { this.jsonVisibilityAttribute = jsonVisibility; } public boolean isJsonVisibilityAttribute() { return jsonVisibilityAttribute; } public VisibilityHandler getConfiguredVisibilityHandler() { final VisibilityHandler globalVisibilityHandler; if (visibility != null && visibility.trim().length() > 0) { globalVisibilityHandler = new GlobalVisibilityHandler(visibility.trim()); } else { globalVisibilityHandler = null; } if (visibilityAttribute != null && visibilityAttribute.trim().length() > 0) { if (jsonVisibilityAttribute) { return new JsonFieldLevelVisibilityHandler(visibilityAttribute); } return new FieldLevelVisibilityHandler(visibilityAttribute); } final VisibilityHandler fieldVisibilityHandler = parseFieldVisibilities(); if (fieldVisibilityHandler == null) { if (globalVisibilityHandler != null) { return globalVisibilityHandler; } return null; } else if (globalVisibilityHandler != null) { return new FallbackVisibilityHandler( new VisibilityHandler[] {fieldVisibilityHandler, globalVisibilityHandler}); } return fieldVisibilityHandler; } private VisibilityHandler parseFieldVisibilities() { if (fieldVisibilities.size() == 0) { return null; } final Map fieldVisMap = Maps.newHashMapWithExpectedSize(fieldVisibilities.size()); for (final String vis : fieldVisibilities) { final String[] split = Strings.split(vis, ':'); if (split.length != 2) { throw new ParameterException( "Error parsing field visibility '" + vis + "', expected format :."); } fieldVisMap.put(split[0], split[1]); } return new FieldMappedVisibilityHandler(fieldVisMap); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/index/AddIndexCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.index; import java.util.ArrayList; import java.util.List; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.index.IndexPluginOptions; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.operations.remote.options.BasicIndexOptions; import org.locationtech.geowave.core.store.spi.DimensionalityTypeOptions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import com.beust.jcommander.ParametersDelegate; @GeowaveOperation(name = "add", parentOperation = IndexSection.class) @Parameters(commandDescription = "Add an index to a data store") public class AddIndexCommand extends ServiceEnabledCommand { private static final Logger LOGGER = LoggerFactory.getLogger(AddIndexCommand.class); @Parameter(description = " ", required = true) private List parameters = new ArrayList<>(); @Parameter( names = {"-t", "--type"}, required = true, description = "The type of index, such as spatial, or spatial_temporal") private String type; private IndexPluginOptions pluginOptions = new IndexPluginOptions(); @ParametersDelegate private BasicIndexOptions basicIndexOptions = new BasicIndexOptions(); @ParametersDelegate DimensionalityTypeOptions opts; @Override public boolean prepare(final OperationParams params) { super.prepare(params); pluginOptions.selectPlugin(type); pluginOptions.setBasicIndexOptions(basicIndexOptions); opts = pluginOptions.getDimensionalityOptions(); return true; } public void setBasicIndexOptions(BasicIndexOptions basicIndexOptions) { this.basicIndexOptions = basicIndexOptions; } @Override public void execute(final OperationParams params) { // Ensure we have all the required arguments if (parameters.size() != 2) { throw new ParameterException("Requires arguments: "); } computeResults(params); } @Override public String computeResults(final OperationParams params) { final String storeName = parameters.get(0); final String indexName = parameters.get(1); pluginOptions.setName(indexName); // Attempt to load store. final DataStorePluginOptions storeOptions = CLIUtils.loadStore(storeName, getGeoWaveConfigFile(params), params.getConsole()); final Index newIndex = pluginOptions.createIndex(storeOptions.createDataStore()); final IndexStore indexStore = storeOptions.createIndexStore(); if (indexStore.indexExists(newIndex.getName())) { throw new ParameterException("That index already exists: " + newIndex.getName()); } storeOptions.createDataStore().addIndex(newIndex); return newIndex.getName(); } public IndexPluginOptions getPluginOptions() { return pluginOptions; } public List getParameters() { return parameters; } public void setParameters(final String storeName) { parameters = new ArrayList<>(); parameters.add(storeName); } public String getType() { return type; } public void setType(final String type) { this.type = type; } public void setPluginOptions(final IndexPluginOptions pluginOptions) { this.pluginOptions = pluginOptions; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/index/CompactIndexCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.index; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.Command; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.operations.DataStoreOperations; import org.locationtech.geowave.core.store.util.DataStoreUtils; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "compact", parentOperation = IndexSection.class) @Parameters(commandDescription = "Compact all rows for a given index") public class CompactIndexCommand extends DefaultOperation implements Command { @Parameter(description = " ") private List parameters = new ArrayList<>(); private DataStorePluginOptions inputStoreOptions = null; private List inputIndices = null; /** Prep the driver & run the operation. */ @Override public void execute(final OperationParams params) { // Ensure we have all the required arguments if (parameters.size() != 2) { throw new ParameterException("Requires arguments: "); } final String inputStoreName = parameters.get(0); final String indexList = parameters.get(1); // Attempt to load store. inputStoreOptions = CLIUtils.loadStore(inputStoreName, getGeoWaveConfigFile(params), params.getConsole()); // Load the Indexes inputIndices = DataStoreUtils.loadIndices(inputStoreOptions.createIndexStore(), indexList); final PersistentAdapterStore adapterStore = inputStoreOptions.createAdapterStore(); final InternalAdapterStore internalAdapterStore = inputStoreOptions.createInternalAdapterStore(); final AdapterIndexMappingStore adapterIndexMappingStore = inputStoreOptions.createAdapterIndexMappingStore(); final DataStoreOperations operations = inputStoreOptions.createDataStoreOperations(); for (final Index index : inputIndices) { if (!operations.mergeData( index, adapterStore, internalAdapterStore, adapterIndexMappingStore, inputStoreOptions.getFactoryOptions().getStoreOptions().getMaxRangeDecomposition())) { params.getConsole().println("Unable to merge data within index '" + index.getName() + "'"); } else { params.getConsole().println( "Data successfully merged within index '" + index.getName() + "'"); } } } public List getParameters() { return parameters; } public void setParameters(final String storeName, final String adapterId) { parameters = Arrays.asList(storeName, adapterId); } public DataStorePluginOptions getInputStoreOptions() { return inputStoreOptions; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/index/IndexOperationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.index; import org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi; public class IndexOperationProvider implements CLIOperationProviderSpi { private static final Class[] OPERATIONS = new Class[] { IndexSection.class, AddIndexCommand.class, ListIndicesCommand.class, ListIndexPluginsCommand.class, RemoveIndexCommand.class, CompactIndexCommand.class}; @Override public Class[] getOperations() { return OPERATIONS; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/index/IndexSection.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.index; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.operations.GeoWaveTopLevelSection; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "index", parentOperation = GeoWaveTopLevelSection.class) @Parameters(commandDescription = "Commands to manage indices") public class IndexSection extends DefaultOperation { } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/index/ListIndexPluginsCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.index; import java.util.Map.Entry; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.store.spi.DimensionalityTypeProviderSpi; import org.locationtech.geowave.core.store.spi.DimensionalityTypeRegistry; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "listplugins", parentOperation = IndexSection.class) @Parameters(commandDescription = "List supported index types") public class ListIndexPluginsCommand extends ServiceEnabledCommand { @Override public void execute(final OperationParams params) { params.getConsole().println(computeResults(params)); } @Override public String computeResults(final OperationParams params) { final StringBuilder builder = new StringBuilder(); builder.append("Available index types currently registered as plugins:\n"); for (final Entry pluginProviderEntry : DimensionalityTypeRegistry.getRegisteredDimensionalityTypes().entrySet()) { final DimensionalityTypeProviderSpi pluginProvider = pluginProviderEntry.getValue(); final String desc = pluginProvider.getDimensionalityTypeDescription() == null ? "no description" : pluginProvider.getDimensionalityTypeDescription(); builder.append(String.format("%n %s:%n %s%n", pluginProviderEntry.getKey(), desc)); } return builder.toString(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/index/ListIndicesCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.index; import java.util.ArrayList; import java.util.List; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.cli.exceptions.TargetNotFoundException; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "list", parentOperation = IndexSection.class) @Parameters(commandDescription = "Display all indices in a data store") public class ListIndicesCommand extends ServiceEnabledCommand { @Parameter(description = "") private List parameters = new ArrayList<>(); @Override public void execute(final OperationParams params) throws TargetNotFoundException { params.getConsole().println(computeResults(params)); } public void setParameters(List parameters) { this.parameters = parameters; } @Override public String computeResults(final OperationParams params) throws TargetNotFoundException { if (parameters.size() < 1) { throw new ParameterException("Must specify store name"); } final String inputStoreName = parameters.get(0); // Attempt to load store. final DataStorePluginOptions storeOptions = CLIUtils.loadStore(inputStoreName, getGeoWaveConfigFile(params), params.getConsole()); final StringBuffer buffer = new StringBuffer(); try (final CloseableIterator it = storeOptions.createIndexStore().getIndices()) { while (it.hasNext()) { final Index index = it.next(); buffer.append(index.getName()).append(' '); } } return "Available indexes: " + buffer.toString(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/index/RemoveIndexCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.index; import java.util.ArrayList; import java.util.List; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.cli.exceptions.TargetNotFoundException; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.index.IndexStore; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "rm", parentOperation = IndexSection.class) @Parameters(commandDescription = "Remove an index and all associated data from a data store") public class RemoveIndexCommand extends ServiceEnabledCommand { @Parameter(description = " ") private List parameters = new ArrayList<>(); @Override public void execute(final OperationParams params) throws Exception { // Ensure we have all the required arguments if (parameters.size() != 2) { throw new ParameterException("Requires arguments: "); } computeResults(params); } public void setParameters(final List parameters) { this.parameters = parameters; } @Override public String computeResults(final OperationParams params) throws Exception { final String storeName = parameters.get(0); final String indexName = parameters.get(1); // Attempt to load store. final DataStorePluginOptions storeOptions = CLIUtils.loadStore(storeName, getGeoWaveConfigFile(params), params.getConsole()); final IndexStore indexStore = storeOptions.createIndexStore(); final Index existingIndex = indexStore.getIndex(indexName); if (existingIndex == null) { throw new TargetNotFoundException(indexName + " does not exist"); } indexStore.removeIndex(indexName); return "index." + indexName + " successfully removed"; } @Override public Boolean successStatusIs200() { return true; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/query/CSVQueryOutputFormat.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.query; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStreamWriter; import org.apache.commons.csv.CSVFormat; import org.apache.commons.csv.CSVPrinter; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.query.gwql.Result; import org.locationtech.geowave.core.store.query.gwql.ResultSet; import com.beust.jcommander.Parameter; public class CSVQueryOutputFormat extends QueryOutputFormatSpi { public static final String FORMAT_NAME = "csv"; @Parameter(names = {"-o", "--outputFile"}, required = true, description = "Output file") private String outputFile; public CSVQueryOutputFormat() { super(FORMAT_NAME); } @Override public void output(final ResultSet results) { try (OutputStreamWriter writer = new OutputStreamWriter(new FileOutputStream(outputFile), StringUtils.getGeoWaveCharset())) { try (CSVPrinter printer = new CSVPrinter(writer, CSVFormat.DEFAULT)) { final String[] header = new String[results.columnCount()]; for (int i = 0; i < results.columnCount(); i++) { header[i] = results.columnName(i); } printer.printRecord((Object[]) header); while (results.hasNext()) { final Result result = results.next(); final Object[] values = new Object[results.columnCount()]; for (int i = 0; i < results.columnCount(); i++) { values[i] = result.columnValue(i); } printer.printRecord(values); } } } catch (IOException e) { throw new RuntimeException("Error writing CSV: " + e.getMessage(), e); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/query/ConsoleQueryOutputFormat.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.query; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.locationtech.geowave.core.cli.utils.ConsoleTablePrinter; import org.locationtech.geowave.core.store.query.gwql.Result; import org.locationtech.geowave.core.store.query.gwql.ResultSet; import com.beust.jcommander.internal.Console; public class ConsoleQueryOutputFormat extends QueryOutputFormatSpi { public static final String FORMAT_NAME = "console"; private static final int RESULTS_PER_PAGE = 24; private static final int MIN_COLUMN_SIZE = 5; private Console console = null; public ConsoleQueryOutputFormat() { super(FORMAT_NAME); } public void setConsole(final Console console) { this.console = console; } @Override public void output(final ResultSet results) { // The column headers for display List headers = new ArrayList(results.columnCount()); for (int i = 0; i < results.columnCount(); i++) { headers.add(results.columnName(i)); } ConsoleTablePrinter printer = new ConsoleTablePrinter(MIN_COLUMN_SIZE, RESULTS_PER_PAGE, console); printer.print(headers, getRows(results, headers.size())); // If more results exist, we will paginate while (results.hasNext()) { System.out.println("Press for more results..."); try { System.in.read(); } catch (final IOException ignore) { break; } printer.print(headers, getRows(results, headers.size())); } } // Convert to the more generic Object matrix structure for console printing private List> getRows(final ResultSet results, final int columns) { List> rows = new ArrayList>(); while (results.hasNext() && rows.size() < RESULTS_PER_PAGE) { Result result = results.next(); List values = new ArrayList(columns); for (int i = 0; i < columns; i++) { values.add(result.columnValue(i)); } rows.add(values); } return rows; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/query/GWQLQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.query; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.apache.commons.lang3.time.StopWatch; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.core.config.Configurator; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.Command; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.operations.GeoWaveTopLevelSection; import org.locationtech.geowave.core.index.SPIServiceRegistry; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.query.gwql.ResultSet; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import com.beust.jcommander.ParametersDelegate; import com.google.common.collect.Iterators; @GeowaveOperation(name = "query", parentOperation = GeoWaveTopLevelSection.class) @Parameters(commandDescription = "Query vector data using a GWQL") public class GWQLQuery extends DefaultOperation implements Command { private static Logger LOGGER = LoggerFactory.getLogger(GWQLQuery.class); @Parameter(description = " ") private List parameters = new ArrayList<>(); @Parameter( names = {"-f", "--format"}, required = false, description = "Output format such as console, csv, shp, geojson, etc.") private String outputFormat = ConsoleQueryOutputFormat.FORMAT_NAME; @Parameter( names = {"-a", "--authorization"}, required = false, description = "Authorization to use. Can be specified multiple times.") private List authorizations = new ArrayList<>(); @ParametersDelegate private QueryOutputFormatSpi output; @Parameter( names = "--debug", required = false, description = "Print out additional info for debug purposes") private boolean debug = false; public void setOutputFormat(final String outputFormat) { this.outputFormat = outputFormat; } public void setDebug(final boolean debug) { this.debug = debug; } public void setParameters(final List parameters) { this.parameters = parameters; } public void setAuthorizations(final List authorizations) { this.authorizations = authorizations; } @Override public boolean prepare(final OperationParams params) { super.prepare(params); final Iterator spiIter = new SPIServiceRegistry(GWQLQuery.class).load(QueryOutputFormatSpi.class); boolean outputFound = false; while (spiIter.hasNext()) { final QueryOutputFormatSpi format = spiIter.next(); if ((outputFormat != null) && outputFormat.equalsIgnoreCase(format.name())) { output = format; if (output instanceof ConsoleQueryOutputFormat) { ((ConsoleQueryOutputFormat) output).setConsole(params.getConsole()); } outputFound = true; break; } } if (!outputFound) { throw new ParameterException( "Not a valid output format. " + "Available options are: " + Iterators.toString(Iterators.transform(spiIter, a -> a.name()))); } return true; } @Override public void execute(final OperationParams params) { if (debug) { Configurator.setLevel(LogManager.getRootLogger().getName(), Level.DEBUG); } // Ensure we have all the required arguments if (parameters.size() != 2) { throw new ParameterException("Requires arguments: "); } final String storeName = parameters.get(0); // Attempt to load store. final DataStorePluginOptions inputStoreOptions = CLIUtils.loadStore(storeName, getGeoWaveConfigFile(params), params.getConsole()); final String query = parameters.get(1); final StopWatch stopWatch = new StopWatch(); stopWatch.start(); final ResultSet results = inputStoreOptions.createDataStore().query( query, authorizations.toArray(new String[authorizations.size()])); stopWatch.stop(); output.output(results); results.close(); if (debug) { LOGGER.debug("Executed query in " + stopWatch.toString()); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/query/QueryOperationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.query; import org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi; public class QueryOperationProvider implements CLIOperationProviderSpi { private static final Class[] OPERATIONS = new Class[] {GWQLQuery.class}; @Override public Class[] getOperations() { return OPERATIONS; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/query/QueryOutputFormatSpi.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.query; import org.locationtech.geowave.core.store.query.gwql.ResultSet; /** * Output ResultSets from geowave queries. */ public abstract class QueryOutputFormatSpi { private final String name; protected QueryOutputFormatSpi(final String name) { this.name = name; } /** * @return The name of the output format. */ public final String name() { return name; } /** * Output the results. * * @param results the results of a geowave query */ public abstract void output(final ResultSet results); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/stats/AbstractStatsCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.stats; import java.io.IOException; import java.util.List; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import com.beust.jcommander.ParametersDelegate; import com.beust.jcommander.internal.Console; /** Common methods for dumping, manipulating and calculating stats. */ public abstract class AbstractStatsCommand extends ServiceEnabledCommand { /** Return "200 OK" for all stats commands. */ @Override public Boolean successStatusIs200() { return true; } @ParametersDelegate private StatsCommandLineOptions statsOptions = new StatsCommandLineOptions(); public void run(final OperationParams params, final List parameters) { DataStorePluginOptions inputStoreOptions = null; if (parameters.size() > 0) { final String storeName = parameters.get(0); // Attempt to load store. inputStoreOptions = CLIUtils.loadStore(storeName, getGeoWaveConfigFile(params), params.getConsole()); } try { performStatsCommand(inputStoreOptions, statsOptions, params.getConsole()); } catch (final IOException e) { throw new RuntimeException("Unable to parse stats tool arguments", e); } } public void setStatsOptions(final StatsCommandLineOptions statsOptions) { this.statsOptions = statsOptions; } /** Abstracted command method to be called when command selected */ protected abstract boolean performStatsCommand( final DataStorePluginOptions options, final StatsCommandLineOptions statsOptions, final Console console) throws IOException; /** * Helper method to extract a list of authorizations from a string passed in from the command line * * @param auths - String to be parsed */ protected static String[] getAuthorizations(final String auths) { if ((auths == null) || (auths.length() == 0)) { return new String[0]; } final String[] authsArray = auths.split(","); for (int i = 0; i < authsArray.length; i++) { authsArray[i] = authsArray[i].trim(); } return authsArray; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/stats/AddStatCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.stats; import java.util.ArrayList; import java.util.List; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticBinningStrategy; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.statistics.StatisticsRegistry; import org.locationtech.geowave.core.store.statistics.binning.CompositeBinningStrategy; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import com.beust.jcommander.ParametersDelegate; @GeowaveOperation(name = "add", parentOperation = StatsSection.class) @Parameters(commandDescription = "Add a statistic to a data store") public class AddStatCommand extends ServiceEnabledCommand { @Parameter(description = "") private List parameters = new ArrayList<>(); @Parameter( names = {"-b", "--binningStrategy"}, description = "If specified, statistics will be binned using the given strategy.") private String binningStrategyName = null; @Parameter( names = {"-skip", "--skipCalculation"}, description = "If specified, the initial value of the statistic will not be calculated.") private boolean skipCalculation = false; @Parameter(names = {"-t", "--type"}, required = true, description = "The statistic type to add.") private String statType = null; @ParametersDelegate private Statistic statOptions; @ParametersDelegate private StatisticBinningStrategy binningStrategy = null; @Override public boolean prepare(final OperationParams params) { if (!super.prepare(params)) { return false; } if (statType == null) { throw new ParameterException("Missing statistic type."); } statOptions = StatisticsRegistry.instance().getStatistic(statType); if (statOptions == null) { throw new ParameterException("Unrecognized statistic type: " + statType); } if (binningStrategyName != null) { binningStrategy = StatisticsRegistry.instance().getBinningStrategy(binningStrategyName); if (binningStrategy == null) { throw new ParameterException("Unrecognized binning strategy: " + binningStrategyName); } if (binningStrategy instanceof CompositeBinningStrategy) { throw new ParameterException( "Statistics with composite binning strategies are currently unable to be added through the CLI."); } } return true; } @Override public void execute(final OperationParams params) { if (parameters.size() != 1) { throw new ParameterException("Requires argument: "); } computeResults(params); } @Override public Void computeResults(final OperationParams params) { final String storeName = parameters.get(0); // Attempt to load store. final DataStorePluginOptions storeOptions = CLIUtils.loadStore(storeName, getGeoWaveConfigFile(params), params.getConsole()); final DataStore dataStore = storeOptions.createDataStore(); if (binningStrategy != null) { statOptions.setBinningStrategy(binningStrategy); } if (skipCalculation) { dataStore.addEmptyStatistic(statOptions); } else { dataStore.addStatistic(statOptions); } return null; } void setBinningStrategyName(final String binningStrategyName) { this.binningStrategyName = binningStrategyName; } void setStatType(final String statType) { this.statType = statType; } void setSkipCalculation(final boolean skipCalculation) { this.skipCalculation = skipCalculation; } void setParameters(final List parameters) { this.parameters = parameters; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/stats/CompactStatsCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.stats; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.Command; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.operations.DataStoreOperations; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "compact", parentOperation = StatsSection.class) @Parameters(commandDescription = "Compact all statistics in data store") public class CompactStatsCommand extends DefaultOperation implements Command { @Parameter(description = "") private List parameters = new ArrayList<>(); private DataStorePluginOptions inputStoreOptions = null; /** Prep the driver & run the operation. */ @Override public void execute(final OperationParams params) { // Ensure we have all the required arguments if (parameters.size() != 1) { throw new ParameterException("Requires arguments: "); } final String inputStoreName = parameters.get(0); // Attempt to load input store. if (inputStoreOptions == null) { // Attempt to load store. inputStoreOptions = CLIUtils.loadStore(inputStoreName, getGeoWaveConfigFile(params), params.getConsole()); } final DataStatisticsStore statsStore = inputStoreOptions.createDataStatisticsStore(); final DataStoreOperations operations = inputStoreOptions.createDataStoreOperations(); operations.mergeStats(statsStore); } public List getParameters() { return parameters; } public void setParameters(final String storeName, final String adapterId) { parameters = Arrays.asList(storeName, adapterId); } public DataStorePluginOptions getInputStoreOptions() { return inputStoreOptions; } public void setInputStoreOptions(final DataStorePluginOptions inputStoreOptions) { this.inputStoreOptions = inputStoreOptions; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/stats/ListStatTypesCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.stats; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Map.Entry; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.cli.utils.ConsoleTablePrinter; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.DataTypeStatistic; import org.locationtech.geowave.core.store.api.FieldStatistic; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.IndexStatistic; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticBinningStrategy; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.statistics.StatisticsRegistry; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import com.beust.jcommander.internal.Console; import com.google.common.collect.Lists; import com.google.common.collect.Maps; @GeowaveOperation(name = "listtypes", parentOperation = StatsSection.class) @Parameters( commandDescription = "List statistic types that are compatible with the given data store, " + "if no data store is provided, all registered statistics will be listed.") public class ListStatTypesCommand extends ServiceEnabledCommand { @Parameter(description = "") private final List parameters = new ArrayList<>(); @Parameter( names = {"--indexName"}, description = "If specified, only statistics that are compatible with this index will be listed.") private String indexName = null; @Parameter( names = {"--typeName"}, description = "If specified, only statistics that are compatible with this type will be listed.") private String typeName = null; @Parameter( names = {"--fieldName"}, description = "If specified, only statistics that are compatible with this field will be displayed.") private String fieldName = null; @Parameter( names = {"-b", "--binningStrategies"}, description = "If specified, a list of registered binning strategies will be displayed.") private boolean binningStrategies = false; @Override public void execute(final OperationParams params) { computeResults(params); } @Override public Void computeResults(final OperationParams params) { if (parameters.isEmpty()) { listAllRegisteredStatistics(params.getConsole()); return null; } final String storeName = parameters.get(0); // Attempt to load store. final DataStorePluginOptions storeOptions = CLIUtils.loadStore(storeName, getGeoWaveConfigFile(params), params.getConsole()); final DataStore dataStore = storeOptions.createDataStore(); if ((indexName != null) && (typeName != null)) { throw new ParameterException("Specify either index name or type name, not both."); } final Index index = indexName != null ? dataStore.getIndex(indexName) : null; if ((indexName != null) && (index == null)) { throw new ParameterException("Unable to find index: " + indexName); } final DataTypeAdapter adapter = typeName != null ? dataStore.getType(typeName) : null; if ((typeName != null) && (adapter == null)) { throw new ParameterException("Unrecognized type name: " + typeName); } final Map>>> indexStats = Maps.newHashMap(); final Map>>> adapterStats = Maps.newHashMap(); final Map>>>> fieldStats = Maps.newHashMap(); boolean hasAdapters = false; if (adapter == null) { if (index != null) { indexStats.put( index.getName(), StatisticsRegistry.instance().getRegisteredIndexStatistics(index.getClass())); } else { final DataTypeAdapter[] adapters = dataStore.getTypes(); for (final DataTypeAdapter dataAdapter : adapters) { hasAdapters = true; adapterStats.put( dataAdapter.getTypeName(), StatisticsRegistry.instance().getRegisteredDataTypeStatistics( dataAdapter.getDataClass())); fieldStats.put( dataAdapter.getTypeName(), StatisticsRegistry.instance().getRegisteredFieldStatistics(dataAdapter, fieldName)); } final Index[] indices = dataStore.getIndices(); for (final Index idx : indices) { indexStats.put( idx.getName(), StatisticsRegistry.instance().getRegisteredIndexStatistics(idx.getClass())); } } } else { hasAdapters = true; adapterStats.put( adapter.getTypeName(), StatisticsRegistry.instance().getRegisteredDataTypeStatistics(adapter.getDataClass())); fieldStats.put( adapter.getTypeName(), StatisticsRegistry.instance().getRegisteredFieldStatistics(adapter, fieldName)); } final ConsoleTablePrinter printer = new ConsoleTablePrinter(0, Integer.MAX_VALUE, params.getConsole()); if (hasAdapters) { displayIndexStats(printer, indexStats); displayAdapterStats(printer, adapterStats); displayFieldStats(printer, fieldStats); displayBinningStrategies(printer); } else { params.getConsole().println("There are no types in the data store."); } return null; } private void listAllRegisteredStatistics(final Console console) { final List> indexStats = Lists.newLinkedList(); final List> adapterStats = Lists.newLinkedList(); final List> fieldStats = Lists.newLinkedList(); final List>> allStats = StatisticsRegistry.instance().getAllRegisteredStatistics(); Collections.sort( allStats, (s1, s2) -> s1.getStatisticType().getString().compareTo(s2.getStatisticType().getString())); for (final Statistic statistic : allStats) { if (statistic instanceof IndexStatistic) { indexStats.add(statistic); } else if (statistic instanceof DataTypeStatistic) { adapterStats.add(statistic); } else if (statistic instanceof FieldStatistic) { fieldStats.add(statistic); } } final ConsoleTablePrinter printer = new ConsoleTablePrinter(0, Integer.MAX_VALUE, console); displayStatList(printer, indexStats, "Registered Index Statistics"); displayStatList(printer, adapterStats, "Registered Adapter Statistics"); displayStatList(printer, fieldStats, "Registered Field Statistics"); displayBinningStrategies(printer); } private void displayBinningStrategies(final ConsoleTablePrinter printer) { if (!binningStrategies) { return; } printer.println("Registered Binning Strategies: "); final List binningStrategies = StatisticsRegistry.instance().getAllRegisteredBinningStrategies(); final List> rows = Lists.newArrayListWithCapacity(binningStrategies.size()); for (final StatisticBinningStrategy binningStrategy : binningStrategies) { rows.add(Arrays.asList(binningStrategy.getStrategyName(), binningStrategy.getDescription())); } printer.print(Arrays.asList("Strategy", "Description"), rows); } private void displayStatList( final ConsoleTablePrinter printer, final List>> stats, final String title) { printer.println(title + ": "); final List> rows = Lists.newArrayListWithCapacity(stats.size()); for (final Statistic o : stats) { rows.add(Arrays.asList(o.getStatisticType(), o.getDescription())); } printer.print(Arrays.asList("Statistic", "Description"), rows); } private void displayIndexStats( final ConsoleTablePrinter printer, final Map>>> stats) { if (stats.size() == 0) { return; } printer.println("Compatible index statistics: "); final List> rows = Lists.newArrayListWithCapacity(stats.size()); for (final Entry>>> indexStats : stats.entrySet()) { boolean first = true; for (final Statistic o : indexStats.getValue()) { rows.add( Arrays.asList( first ? indexStats.getKey() : "", o.getStatisticType(), o.getDescription())); first = false; } } printer.print(Arrays.asList("Index", "Statistic", "Description"), rows); } private void displayAdapterStats( final ConsoleTablePrinter printer, final Map>>> stats) { if (stats.size() == 0) { return; } printer.println("Compatible data type statistics: "); final List> rows = Lists.newArrayListWithCapacity(stats.size()); for (final Entry>>> adapterStats : stats.entrySet()) { boolean first = true; for (final Statistic o : adapterStats.getValue()) { rows.add( Arrays.asList( first ? adapterStats.getKey() : "", o.getStatisticType(), o.getDescription())); first = false; } } printer.print(Arrays.asList("Type", "Statistic", "Description"), rows); } private void displayFieldStats( final ConsoleTablePrinter printer, final Map>>>> stats) { if (stats.size() == 0) { return; } printer.println("Compatible field statistics: "); final List> rows = Lists.newArrayListWithCapacity(stats.size()); for (final Entry>>>> adapterStats : stats.entrySet()) { boolean firstAdapter = true; for (final Entry>>> fieldStats : adapterStats.getValue().entrySet()) { boolean firstField = true; for (final Statistic o : fieldStats.getValue()) { rows.add( Arrays.asList( firstAdapter ? adapterStats.getKey() : "", firstField ? fieldStats.getKey() : "", o.getStatisticType(), o.getDescription())); firstAdapter = false; firstField = false; } } } printer.print(Arrays.asList("Type", "Field", "Statistic", "Description"), rows); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/stats/ListStatsCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.stats; import java.io.IOException; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.function.Predicate; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.Command; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.exceptions.TargetNotFoundException; import org.locationtech.geowave.core.cli.utils.ConsoleTablePrinter; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.DataTypeStatistic; import org.locationtech.geowave.core.store.api.FieldStatistic; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.IndexStatistic; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.locationtech.geowave.core.store.statistics.StatisticType; import org.locationtech.geowave.core.store.statistics.StatisticsRegistry; import org.locationtech.geowave.core.store.statistics.StatisticsValueIterator; import org.locationtech.geowave.core.store.statistics.adapter.DataTypeStatisticType; import org.locationtech.geowave.core.store.statistics.binning.CompositeBinningStrategy; import org.locationtech.geowave.core.store.statistics.binning.DataTypeBinningStrategy; import org.locationtech.geowave.core.store.statistics.field.FieldStatisticType; import org.locationtech.geowave.core.store.statistics.index.IndexStatisticType; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import com.beust.jcommander.internal.Console; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; @GeowaveOperation(name = "list", parentOperation = StatsSection.class) @Parameters(commandDescription = "Print statistics of a data store to standard output") public class ListStatsCommand extends AbstractStatsCommand implements Command { @Parameter(description = "") private List parameters = new ArrayList<>(); @Parameter( names = "--limit", description = "Limit the number or rows returned. By default, all results will be displayed.") private Integer limit = null; @Parameter(names = "--csv", description = "Output statistics in CSV format.") private boolean csv = false; private String retValue = ""; @Override public void execute(final OperationParams params) throws TargetNotFoundException { computeResults(params); } @Override protected boolean performStatsCommand( final DataStorePluginOptions storeOptions, final StatsCommandLineOptions statsOptions, final Console console) throws IOException { final DataStatisticsStore statsStore = storeOptions.createDataStatisticsStore(); final IndexStore indexStore = storeOptions.createIndexStore(); final String[] authorizations = getAuthorizations(statsOptions.getAuthorizations()); DataTypeAdapter adapter = null; if (statsOptions.getTypeName() != null) { adapter = storeOptions.createDataStore().getType(statsOptions.getTypeName()); if (adapter == null) { throw new ParameterException( "A type called " + statsOptions.getTypeName() + " was not found."); } } StatisticType> statisticType = null; if (statsOptions.getStatType() != null) { statisticType = StatisticsRegistry.instance().getStatisticType(statsOptions.getStatType()); if (statisticType == null) { throw new ParameterException("Unrecognized statistic type: " + statsOptions.getStatType()); } } List headers = null; List> statsToList = Lists.newLinkedList(); ValueTransformer transformer = null; Predicate> filter; if (statsOptions.getIndexName() != null) { if (statisticType != null && !(statisticType instanceof IndexStatisticType)) { throw new ParameterException( "Only index statistic types can be specified when listing statistics for a specific index."); } Index index = indexStore.getIndex(statsOptions.getIndexName()); if (index == null) { throw new ParameterException( "An index called " + statsOptions.getIndexName() + " was not found."); } headers = Lists.newArrayList("Statistic", "Tag", "Bin", "Value"); transformer = new ValueToRow(); try (CloseableIterator>> stats = statsStore.getIndexStatistics(index, statisticType, statsOptions.getTag())) { if (adapter != null) { stats.forEachRemaining(stat -> { if (stat.getBinningStrategy() instanceof DataTypeBinningStrategy || (stat.getBinningStrategy() instanceof CompositeBinningStrategy && ((CompositeBinningStrategy) stat.getBinningStrategy()).usesStrategy( DataTypeBinningStrategy.class))) { statsToList.add(stat); } }); filter = new IndexAdapterFilter(adapter.getTypeName()); } else { stats.forEachRemaining(statsToList::add); filter = null; } } } else if (statsOptions.getTypeName() != null) { filter = null; if (statsOptions.getFieldName() != null) { if (statisticType != null && !(statisticType instanceof FieldStatisticType)) { throw new ParameterException( "Only field statistic types can be specified when listing statistics for a specific field."); } headers = Lists.newArrayList("Statistic", "Tag", "Bin", "Value"); transformer = new ValueToRow(); try (CloseableIterator>> stats = statsStore.getFieldStatistics( adapter, statisticType, statsOptions.getFieldName(), statsOptions.getTag())) { stats.forEachRemaining(statsToList::add); } } else { if (statisticType != null && statisticType instanceof IndexStatisticType) { throw new ParameterException( "Only data type and field statistic types can be specified when listing statistics for a specific data type."); } headers = Lists.newArrayList("Statistic", "Tag", "Field", "Bin", "Value"); transformer = new ValueToFieldRow(); if (statisticType == null || statisticType instanceof DataTypeStatisticType) { try (CloseableIterator>> stats = statsStore.getDataTypeStatistics(adapter, statisticType, statsOptions.getTag())) { stats.forEachRemaining(statsToList::add); } } if (statisticType == null || statisticType instanceof FieldStatisticType) { try (CloseableIterator>> stats = statsStore.getFieldStatistics(adapter, statisticType, null, statsOptions.getTag())) { stats.forEachRemaining(statsToList::add); } } } } else if (statsOptions.getFieldName() != null) { throw new ParameterException("A type name must be supplied with a field name."); } else { filter = null; headers = Lists.newArrayList("Index/Adapter", "Statistic", "Tag", "Field", "Bin", "Value"); transformer = new ValueToAllRow(); try (CloseableIterator>> stats = statsStore.getAllStatistics(statisticType)) { stats.forEachRemaining(stat -> { if (statsOptions.getTag() == null || stat.getTag().equals(statsOptions.getTag())) { statsToList.add(stat); } }); } } Collections.sort(statsToList, new StatComparator()); try (StatisticsValueIterator values = new StatisticsValueIterator(statsStore, statsToList.iterator(), null, authorizations)) { Iterator> rows = Iterators.transform( filter == null ? values : Iterators.filter(values, v -> filter.test(v)), transformer::transform); if (limit != null) { rows = Iterators.limit(rows, limit); } if (rows.hasNext()) { if (csv) { StringBuilder sb = new StringBuilder(); sb.append(Arrays.toString(headers.toArray())); rows.forEachRemaining(row -> sb.append(Arrays.toString(row.toArray()))); retValue = sb.toString(); console.println(retValue); } else { console.println("Matching statistics:"); ConsoleTablePrinter printer = new ConsoleTablePrinter(0, limit != null ? limit : 30, console); printer.print(headers, rows); } } else { console.println("No matching statistics were found."); } } return true; } public List getParameters() { return parameters; } public void setParameters(final String storeName) { parameters = new ArrayList<>(); parameters.add(storeName); } @Override public String computeResults(final OperationParams params) throws TargetNotFoundException { // Ensure we have all the required arguments if (parameters.size() < 1) { throw new ParameterException("Requires arguments: "); } super.run(params, parameters); if (!retValue.equals("")) { return retValue; } else { return "No Data Found"; } } private static class IndexAdapterFilter implements Predicate> { private final ByteArray adapterBin; public IndexAdapterFilter(final String typeName) { this.adapterBin = DataTypeBinningStrategy.getBin(typeName); } @Override public boolean test(StatisticValue value) { Statistic statistic = value.getStatistic(); if (statistic.getBinningStrategy() instanceof DataTypeBinningStrategy) { return Arrays.equals(value.getBin().getBytes(), adapterBin.getBytes()); } else if (statistic.getBinningStrategy() instanceof CompositeBinningStrategy && ((CompositeBinningStrategy) statistic.getBinningStrategy()).usesStrategy( DataTypeBinningStrategy.class)) { CompositeBinningStrategy binningStrategy = (CompositeBinningStrategy) statistic.getBinningStrategy(); if (binningStrategy.binMatches(DataTypeBinningStrategy.class, value.getBin(), adapterBin)) { return true; } } return false; } } private static class StatComparator implements Comparator>, Serializable { private static final long serialVersionUID = 7635824822932295378L; @Override public int compare(Statistic o1, Statistic o2) { int compare = 0; if ((o1 instanceof IndexStatistic && o2 instanceof DataTypeStatistic) || (o1 instanceof IndexStatistic && o2 instanceof FieldStatistic) || (o1 instanceof DataTypeStatistic && o2 instanceof FieldStatistic)) { compare = -1; } else if ((o2 instanceof IndexStatistic && o1 instanceof DataTypeStatistic) || (o2 instanceof IndexStatistic && o1 instanceof FieldStatistic) || (o2 instanceof DataTypeStatistic && o1 instanceof FieldStatistic)) { compare = 1; } if (compare == 0) { compare = o1.getId().getGroupId().getString().compareTo(o2.getId().getGroupId().getString()); } if (compare == 0) { compare = o1.getStatisticType().getString().compareTo(o2.getStatisticType().getString()); } if (compare == 0) { compare = o1.getTag().compareTo(o2.getTag()); } return compare; } } private static interface ValueTransformer { List transform(StatisticValue value); } private static class ValueToRow implements ValueTransformer { @Override public List transform(StatisticValue value) { return Lists.newArrayList( value.getStatistic().getStatisticType(), value.getStatistic().getTag(), value.getStatistic().getBinningStrategy() != null ? value.getStatistic().getBinningStrategy().binToString(value.getBin()) : "N/A", value); } } private static class ValueToFieldRow implements ValueTransformer { @Override public List transform(StatisticValue value) { String fieldName = value.getStatistic() instanceof FieldStatistic ? ((FieldStatistic) value.getStatistic()).getFieldName() : "N/A"; return Lists.newArrayList( value.getStatistic().getStatisticType(), value.getStatistic().getTag(), fieldName, value.getStatistic().getBinningStrategy() != null ? value.getStatistic().getBinningStrategy().binToString(value.getBin()) : "N/A", value); } } private static class ValueToAllRow implements ValueTransformer { @Override public List transform(StatisticValue value) { Statistic statistic = value.getStatistic(); String indexOrAdapter = null; String field = "N/A"; String bin = "N/A"; if (statistic instanceof IndexStatistic) { indexOrAdapter = ((IndexStatistic) statistic).getIndexName(); } else if (statistic instanceof DataTypeStatistic) { indexOrAdapter = ((DataTypeStatistic) statistic).getTypeName(); } else if (statistic instanceof FieldStatistic) { indexOrAdapter = ((FieldStatistic) statistic).getTypeName(); field = ((FieldStatistic) statistic).getFieldName(); } if (statistic.getBinningStrategy() != null) { bin = statistic.getBinningStrategy().binToString(value.getBin()); } return Lists.newArrayList( indexOrAdapter, statistic.getStatisticType(), statistic.getTag(), field, bin, value); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/stats/RecalculateStatsCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.stats; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.locationtech.geowave.core.cli.VersionUtils; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.base.BaseDataStoreUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.entities.GeoWaveMetadata; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.operations.DataStoreOperations; import org.locationtech.geowave.core.store.operations.MetadataDeleter; import org.locationtech.geowave.core.store.operations.MetadataQuery; import org.locationtech.geowave.core.store.operations.MetadataReader; import org.locationtech.geowave.core.store.operations.MetadataType; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.locationtech.geowave.core.store.statistics.DefaultStatisticsProvider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import com.beust.jcommander.internal.Console; @GeowaveOperation(name = "recalc", parentOperation = StatsSection.class) @Parameters(commandDescription = "Recalculate statistics in a given data store") public class RecalculateStatsCommand extends AbstractStatsCommand { private static final Logger LOGGER = LoggerFactory.getLogger(RecalculateStatsCommand.class); @Parameter(description = "") private List parameters = new ArrayList<>(); @Parameter( names = "--all", description = "If specified, all matching statistics will be recalculated.") private boolean all = false; @Override public void execute(final OperationParams params) { computeResults(params); } @Override protected boolean performStatsCommand( final DataStorePluginOptions storeOptions, final StatsCommandLineOptions statsOptions, final Console console) throws IOException { final DataStore dataStore = storeOptions.createDataStore(); final DataStatisticsStore statStore = storeOptions.createDataStatisticsStore(); final IndexStore indexStore = storeOptions.createIndexStore(); if (all) { // check for legacy stats table and if it exists, delete it and add all default stats final DataStoreOperations ops = storeOptions.createDataStoreOperations(); final MetadataReader reader = ops.createMetadataReader(MetadataType.LEGACY_STATISTICS); boolean legacyStatsExists; // rather than checking for table existence, its more thorough for each data store // implementation to check for at least one row try (CloseableIterator it = reader.query(new MetadataQuery(null, null))) { legacyStatsExists = it.hasNext(); } if (legacyStatsExists) { console.println( "Found legacy stats prior to v1.3. Deleting and recalculating all default stats as a migration to v" + VersionUtils.getVersion() + "."); // first let's do the add just to make sure things are in working order prior to deleting // legacy stats console.println("Adding default statistics..."); final List> defaultStatistics = new ArrayList<>(); for (final Index index : dataStore.getIndices()) { if (index instanceof DefaultStatisticsProvider) { defaultStatistics.addAll(((DefaultStatisticsProvider) index).getDefaultStatistics()); } } for (final DataTypeAdapter adapter : dataStore.getTypes()) { final DefaultStatisticsProvider defaultStatProvider = BaseDataStoreUtils.getDefaultStatisticsProvider(adapter); if (defaultStatProvider != null) { defaultStatistics.addAll(defaultStatProvider.getDefaultStatistics()); } } dataStore.addEmptyStatistic( defaultStatistics.toArray(new Statistic[defaultStatistics.size()])); console.println("Deleting legacy statistics..."); try (MetadataDeleter deleter = ops.createMetadataDeleter(MetadataType.LEGACY_STATISTICS)) { deleter.delete(new MetadataQuery(null, null)); } catch (final Exception e) { LOGGER.warn("Error deleting legacy statistics", e); } // Clear out all options so that all stats get recalculated. statsOptions.setIndexName(null); statsOptions.setTypeName(null); statsOptions.setFieldName(null); statsOptions.setStatType(null); statsOptions.setTag(null); } } final List>> toRecalculate = statsOptions.resolveMatchingStatistics(dataStore, statStore, indexStore); if (toRecalculate.isEmpty()) { throw new ParameterException("A matching statistic could not be found"); } else if ((toRecalculate.size() > 1) && !all) { throw new ParameterException( "Multiple statistics matched the given parameters. If this is intentional, " + "supply the --all option, otherwise provide additional parameters to " + "specify which statistic to recalculate."); } final Statistic[] toRecalcArray = toRecalculate.toArray(new Statistic[toRecalculate.size()]); dataStore.recalcStatistic(toRecalcArray); console.println( toRecalculate.size() + " statistic" + (toRecalculate.size() == 1 ? " was" : "s were") + " successfully recalculated."); return true; } @Override public Void computeResults(final OperationParams params) { // Ensure we have all the required arguments if (parameters.size() != 1) { throw new ParameterException("Requires argument: "); } super.run(params, parameters); return null; } public void setParameters(final List parameters) { this.parameters = parameters; } public void setAll(final boolean all) { this.all = all; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/stats/RemoveStatCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.stats; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import com.beust.jcommander.internal.Console; @GeowaveOperation(name = "rm", parentOperation = StatsSection.class) @Parameters(commandDescription = "Remove a statistic from a data store") public class RemoveStatCommand extends AbstractStatsCommand { @Parameter(description = "") private List parameters = new ArrayList<>(); @Parameter( names = "--all", description = "If specified, all matching statistics will be removed.") private boolean all = false; @Parameter( names = "--force", description = "Force an internal statistic to be removed. IMPORTANT: Removing statistics " + "that are marked as \"internal\" can have a detrimental impact on performance!") private boolean force = false; @Override public void execute(final OperationParams params) { computeResults(params); } @Override protected boolean performStatsCommand( final DataStorePluginOptions storeOptions, final StatsCommandLineOptions statsOptions, final Console console) throws IOException { final DataStore dataStore = storeOptions.createDataStore(); final DataStatisticsStore statStore = storeOptions.createDataStatisticsStore(); final IndexStore indexStore = storeOptions.createIndexStore(); final List>> toRemove = statsOptions.resolveMatchingStatistics(dataStore, statStore, indexStore); if (!force) { for (Statistic stat : toRemove) { if (stat.isInternal()) { throw new ParameterException( "Unable to remove an internal statistic without specifying the --force option. " + "Removing an internal statistic can have a detrimental impact on performance."); } } } if (toRemove.isEmpty()) { throw new ParameterException("A matching statistic could not be found"); } else if (toRemove.size() > 1 && !all) { throw new ParameterException( "Multiple statistics matched the given parameters. If this is intentional, " + "supply the --all option, otherwise provide additional parameters to " + "specify which statistic to delete."); } if (!statStore.removeStatistics(toRemove.iterator())) { throw new RuntimeException("Unable to remove statistics"); } console.println( toRemove.size() + " statistic" + (toRemove.size() == 1 ? " was" : "s were") + " successfully removed."); return true; } @Override public Void computeResults(final OperationParams params) { // Ensure we have all the required arguments if (parameters.size() != 1) { throw new ParameterException("Requires argument: "); } super.run(params, parameters); return null; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/stats/StatsCommandLineOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.stats; import java.util.List; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.DataTypeStatistic; import org.locationtech.geowave.core.store.api.FieldStatistic; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.IndexStatistic; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.locationtech.geowave.core.store.statistics.StatisticType; import org.locationtech.geowave.core.store.statistics.StatisticsRegistry; import org.locationtech.geowave.core.store.statistics.adapter.DataTypeStatisticType; import org.locationtech.geowave.core.store.statistics.field.FieldStatisticType; import org.locationtech.geowave.core.store.statistics.index.IndexStatisticType; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.clearspring.analytics.util.Lists; public class StatsCommandLineOptions { @Parameter(names = {"-t", "--type"}, description = "The type of the statistic.") private String statType; @Parameter(names = "--indexName", description = "The name of the index, for index statistics.") private String indexName; @Parameter( names = "--typeName", description = "The name of the data type adapter, for field and type statistics.") private String typeName; @Parameter(names = "--fieldName", description = "The name of the field, for field statistics.") private String fieldName; @Parameter(names = "--tag", description = "The tag of the statistic.") private String tag; @Parameter(names = "--auth", description = "The authorizations used when querying statistics.") private String authorizations; public StatsCommandLineOptions() {} public String getAuthorizations() { return authorizations; } public void setAuthorizations(final String authorizations) { this.authorizations = authorizations; } public void setIndexName(final String indexName) { this.indexName = indexName; } public String getIndexName() { return indexName; } public void setTypeName(final String typeName) { this.typeName = typeName; } public String getTypeName() { return typeName; } public void setFieldName(final String fieldName) { this.fieldName = fieldName; } public String getFieldName() { return fieldName; } public void setTag(final String tag) { this.tag = tag; } public String getTag() { return tag; } public void setStatType(final String statType) { this.statType = statType; } public String getStatType() { return statType; } @SuppressWarnings({"rawtypes", "unchecked"}) public List>> resolveMatchingStatistics( final DataStore dataStore, final DataStatisticsStore statsStore, final IndexStore indexStore) { final List>> matching = Lists.newArrayList(); if ((indexName != null) && ((typeName != null) || (fieldName != null))) { throw new ParameterException( "Unable to process index statistics for a single type. Specify either an index name or a type name."); } StatisticType statisticType = null; if (statType != null) { statisticType = StatisticsRegistry.instance().getStatisticType(statType); if (statisticType == null) { throw new ParameterException("Unrecognized statistic type: " + statType); } } if (statisticType != null) { if (statisticType instanceof IndexStatisticType) { if (indexName == null) { throw new ParameterException( "An index name must be supplied when specifying an index statistic type."); } final Index index = indexStore.getIndex(indexName); if (index == null) { throw new ParameterException("Unable to find an index named: " + indexName); } try (CloseableIterator>> stats = statsStore.getIndexStatistics(index, statisticType, tag)) { stats.forEachRemaining(stat -> matching.add(stat)); } } else if (statisticType instanceof DataTypeStatisticType) { if (typeName == null) { throw new ParameterException( "A type name must be supplied when specifying a data type statistic type."); } final DataTypeAdapter adapter = dataStore.getType(typeName); if (adapter == null) { throw new ParameterException("Unable to find an type named: " + typeName); } try (CloseableIterator>> stats = statsStore.getDataTypeStatistics(adapter, statisticType, tag)) { stats.forEachRemaining(stat -> matching.add(stat)); } } else if (statisticType instanceof FieldStatisticType) { if (typeName == null) { throw new ParameterException( "A type name must be supplied when specifying a field statistic type."); } final DataTypeAdapter adapter = dataStore.getType(typeName); if (adapter == null) { throw new ParameterException("Unable to find an type named: " + typeName); } if (fieldName == null) { throw new ParameterException( "A field name must be supplied when specifying a field statistic type."); } boolean fieldFound = false; final FieldDescriptor[] fields = adapter.getFieldDescriptors(); for (int i = 0; i < fields.length; i++) { if (fields[i].fieldName().equals(fieldName)) { fieldFound = true; break; } } if (!fieldFound) { throw new ParameterException( "Unable to find a field named '" + fieldName + "' on type '" + typeName + "'."); } try (CloseableIterator>> stats = statsStore.getFieldStatistics(adapter, statisticType, fieldName, tag)) { stats.forEachRemaining(stat -> matching.add(stat)); } } } else { try (CloseableIterator>> stats = statsStore.getAllStatistics(null)) { stats.forEachRemaining(stat -> { // This could all be optimized to one giant check, but it's split for readability if ((tag != null) && !tag.equals(stat.getTag())) { return; } if ((indexName != null) && (!(stat instanceof IndexStatistic) || !indexName.equals(((IndexStatistic) stat).getIndexName()))) { return; } if (typeName != null) { if (stat instanceof IndexStatistic) { return; } if ((stat instanceof DataTypeStatistic) && !typeName.equals(((DataTypeStatistic) stat).getTypeName())) { return; } if ((stat instanceof FieldStatistic) && !typeName.equals(((FieldStatistic) stat).getTypeName())) { return; } } if ((fieldName != null) && (!(stat instanceof FieldStatistic) || !fieldName.equals(((FieldStatistic) stat).getFieldName()))) { return; } matching.add(stat); }); } } return matching; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/stats/StatsOperationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.stats; import org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi; public class StatsOperationProvider implements CLIOperationProviderSpi { private static final Class[] OPERATIONS = new Class[] { StatsSection.class, AddStatCommand.class, ListStatTypesCommand.class, CompactStatsCommand.class, ListStatsCommand.class, RecalculateStatsCommand.class, RemoveStatCommand.class, AddStatCommand.class}; @Override public Class[] getOperations() { return OPERATIONS; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/stats/StatsSection.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.stats; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.operations.GeoWaveTopLevelSection; import com.beust.jcommander.Parameters; @GeowaveOperation(name = {"stat", "statistics"}, parentOperation = GeoWaveTopLevelSection.class) @Parameters(commandDescription = "Commands to manage statistics") public class StatsSection extends DefaultOperation { } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/store/AbstractRemoveCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.store; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Properties; import java.util.Set; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.cli.exceptions.TargetNotFoundException; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; /** Common code for removing an entry from the properties file. */ public abstract class AbstractRemoveCommand extends ServiceEnabledCommand { /** Return "200 OK" for all removal commands. */ @Override public Boolean successStatusIs200() { return true; } @Parameter(description = "", required = true, arity = 1) private List parameters = new ArrayList<>(); protected String pattern = null; public String getEntryName() { if (parameters.size() < 1) { throw new ParameterException("Must specify entry name to delete"); } return parameters.get(0).trim(); } public String computeResults(final OperationParams params, final String patternPrefix) throws Exception { // this ensures we are only exact-matching rather than using the prefix final String pattern = patternPrefix + "."; final Properties existingProps = getGeoWaveConfigProperties(params); // Find properties to remove final Set keysToRemove = new HashSet<>(); for (final String key : existingProps.stringPropertyNames()) { if (key.startsWith(pattern)) { keysToRemove.add(key); } } final int startSize = existingProps.size(); // Remove each property. for (final String key : keysToRemove) { existingProps.remove(key); } // Write properties file ConfigOptions.writeProperties(getGeoWaveConfigFile(params), existingProps, params.getConsole()); final int endSize = existingProps.size(); if (endSize < startSize) { return patternPrefix + " successfully removed"; } else { throw new TargetNotFoundException(patternPrefix + " does not exist"); } } public void setEntryName(final String entryName) { parameters = new ArrayList<>(); parameters.add(entryName); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/store/AddStoreCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.store; import java.util.ArrayList; import java.util.List; import java.util.Properties; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.DefaultPluginOptions; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.store.StoreFactoryOptions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import com.beust.jcommander.ParametersDelegate; @GeowaveOperation(name = "add", parentOperation = StoreSection.class) @Parameters(commandDescription = "Add a data store to the GeoWave configuration") public class AddStoreCommand extends ServiceEnabledCommand { private static final Logger LOGGER = LoggerFactory.getLogger(AddStoreCommand.class); public static final String PROPERTIES_CONTEXT = "properties"; @Parameter(description = "") private List parameters = new ArrayList<>(); @Parameter( names = {"-d", "--default"}, description = "Make this the default store in all operations") private Boolean makeDefault; @Parameter( names = {"-t", "--type"}, required = true, description = "The type of store, such as accumulo, hbase, etc") private String storeType; private DataStorePluginOptions pluginOptions = new DataStorePluginOptions(); @ParametersDelegate private StoreFactoryOptions requiredOptions; @Override public boolean prepare(final OperationParams params) { super.prepare(params); // Load SPI options for the given type into pluginOptions. if (storeType != null) { pluginOptions.selectPlugin(storeType); requiredOptions = pluginOptions.getFactoryOptions(); } else { final Properties existingProps = getGeoWaveConfigProperties(params); // Try to load the 'default' options. final String defaultStore = existingProps.getProperty(DataStorePluginOptions.DEFAULT_PROPERTY_NAMESPACE); // Load the default index. if (defaultStore != null) { try { if (pluginOptions.load( existingProps, DataStorePluginOptions.getStoreNamespace(defaultStore))) { // Set the required type option. storeType = pluginOptions.getType(); requiredOptions = pluginOptions.getFactoryOptions(); } } catch (final ParameterException pe) { // HP Fortify "Improper Output Neutralization" false // positive // What Fortify considers "user input" comes only // from users with OS-level access anyway LOGGER.warn("Couldn't load default store: " + defaultStore, pe); } } } return true; } @Override public void execute(final OperationParams params) { computeResults(params); } @Override public String computeResults(final OperationParams params) { final Properties existingProps = getGeoWaveConfigProperties(params); // Ensure that a name is chosen. if (parameters.size() != 1) { throw new ParameterException("Must specify store name"); } // Make sure we're not already in the index. final DataStorePluginOptions existingOptions = new DataStorePluginOptions(); if (existingOptions.load(existingProps, getNamespace())) { throw new ParameterException("That store already exists: " + getPluginName()); } if (pluginOptions.getFactoryOptions() != null) { pluginOptions.getFactoryOptions().validatePluginOptions(existingProps, params.getConsole()); } // Save the store options. pluginOptions.save(existingProps, getNamespace()); // Make default? if (Boolean.TRUE.equals(makeDefault)) { existingProps.setProperty(DataStorePluginOptions.DEFAULT_PROPERTY_NAMESPACE, getPluginName()); } // Write properties file ConfigOptions.writeProperties( getGeoWaveConfigFile(), existingProps, pluginOptions.getFactoryOptions().getClass(), getNamespace() + "." + DefaultPluginOptions.OPTS, params.getConsole()); final StringBuilder builder = new StringBuilder(); for (final Object key : existingProps.keySet()) { final String[] split = key.toString().split("\\."); if (split.length > 1) { if (split[1].equals(parameters.get(0))) { builder.append(key.toString() + "=" + existingProps.getProperty(key.toString()) + "\n"); } } } return builder.toString(); } public DataStorePluginOptions getPluginOptions() { return pluginOptions; } public String getPluginName() { return parameters.get(0); } public String getNamespace() { return DataStorePluginOptions.getStoreNamespace(getPluginName()); } public List getParameters() { return parameters; } public void setParameters(final String storeName) { parameters = new ArrayList<>(); parameters.add(storeName); } public Boolean getMakeDefault() { return makeDefault; } public void setMakeDefault(final Boolean makeDefault) { this.makeDefault = makeDefault; } public String getStoreType() { return storeType; } public void setStoreType(final String storeType) { this.storeType = storeType; } public void setPluginOptions(final DataStorePluginOptions pluginOptions) { this.pluginOptions = pluginOptions; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/store/ClearStoreCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.store; import java.util.ArrayList; import java.util.List; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.store.api.QueryBuilder; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "clear", parentOperation = StoreSection.class) @Parameters(commandDescription = "Clear ALL data from a data store and delete tables") public class ClearStoreCommand extends ServiceEnabledCommand { /** Return "200 OK" for all clear commands. */ @Override public Boolean successStatusIs200() { return true; } private static final Logger LOGGER = LoggerFactory.getLogger(ClearStoreCommand.class); @Parameter(description = "") private List parameters = new ArrayList<>(); private DataStorePluginOptions inputStoreOptions = null; @Override public void execute(final OperationParams params) { computeResults(params); } public List getParameters() { return parameters; } public void setParameters(final String storeName) { parameters = new ArrayList<>(); parameters.add(storeName); } public DataStorePluginOptions getInputStoreOptions() { return inputStoreOptions; } @Override public Void computeResults(final OperationParams params) { if (parameters.size() < 1) { throw new ParameterException("Must specify store name"); } final String inputStoreName = parameters.get(0); // Attempt to load store. inputStoreOptions = CLIUtils.loadStore(inputStoreName, getGeoWaveConfigFile(params), params.getConsole()); LOGGER.info("Deleting everything in store: " + inputStoreName); inputStoreOptions.createDataStore().delete(QueryBuilder.newBuilder().build()); return null; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/store/CopyConfigStoreCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.store; import java.util.ArrayList; import java.util.List; import java.util.Properties; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.Command; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import com.beust.jcommander.ParametersDelegate; @GeowaveOperation(name = "copycfg", parentOperation = StoreSection.class) @Parameters(commandDescription = "Copy and modify local data store configuration") public class CopyConfigStoreCommand extends DefaultOperation implements Command { @Parameter(description = " ") private List parameters = new ArrayList<>(); @Parameter( names = {"-d", "--default"}, description = "Make this the default store in all operations") private Boolean makeDefault; @ParametersDelegate private DataStorePluginOptions newPluginOptions = new DataStorePluginOptions(); @Override public boolean prepare(final OperationParams params) { super.prepare(params); final Properties existingProps = getGeoWaveConfigProperties(params); // Load the old store, so that we can override the values String oldStore = null; if (parameters.size() >= 1) { oldStore = parameters.get(0); if (!newPluginOptions.load( existingProps, DataStorePluginOptions.getStoreNamespace(oldStore))) { throw new ParameterException("Could not find store: " + oldStore); } } // Successfully prepared. return true; } public void setNewPluginOptions(final DataStorePluginOptions newPluginOptions) { this.newPluginOptions = newPluginOptions; } @Override public void execute(final OperationParams params) { final Properties existingProps = getGeoWaveConfigProperties(params); if (parameters.size() < 2) { throw new ParameterException("Must specify names"); } // This is the new store name. final String newStore = parameters.get(1); final String newStoreNamespace = DataStorePluginOptions.getStoreNamespace(newStore); // Make sure we're not already in the index. final DataStorePluginOptions existPlugin = new DataStorePluginOptions(); if (existPlugin.load(existingProps, newStoreNamespace)) { throw new ParameterException("That store already exists: " + newStore); } // Save the options. newPluginOptions.save(existingProps, newStoreNamespace); // Make default? if (Boolean.TRUE.equals(makeDefault)) { existingProps.setProperty(DataStorePluginOptions.DEFAULT_PROPERTY_NAMESPACE, newStore); } // Write properties file ConfigOptions.writeProperties(getGeoWaveConfigFile(params), existingProps, params.getConsole()); } public List getParameters() { return parameters; } public void setParameters(final String existingStore, final String newStore) { parameters = new ArrayList<>(); parameters.add(existingStore); parameters.add(newStore); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/store/CopyStoreCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.store; import java.io.File; import java.util.ArrayList; import java.util.List; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.Command; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.store.cli.CLIUtils; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "copy", parentOperation = StoreSection.class) @Parameters(commandDescription = "Copy all data from one data store to another existing data store") public class CopyStoreCommand extends DefaultOperation implements Command { @Parameter(description = " ") private List parameters = new ArrayList<>(); private DataStorePluginOptions inputStoreOptions = null; private DataStorePluginOptions outputStoreOptions = null; @Override public void execute(final OperationParams params) throws Exception { // Ensure we have all the required arguments if (parameters.size() != 2) { throw new ParameterException("Requires arguments: "); } // Config file final File configFile = getGeoWaveConfigFile(params); final String inputStoreName = parameters.get(0); final String outputStoreName = parameters.get(1); // Attempt to load input store. inputStoreOptions = CLIUtils.loadStore(inputStoreName, configFile, params.getConsole()); // Attempt to load output store. outputStoreOptions = CLIUtils.loadStore(outputStoreName, configFile, params.getConsole()); inputStoreOptions.createDataStore().copyTo(outputStoreOptions.createDataStore()); } public List getParameters() { return parameters; } public void setParameters(final String inputStore, final String outputStore) { parameters = new ArrayList<>(); parameters.add(inputStore); parameters.add(outputStore); } public DataStorePluginOptions getInputStoreOptions() { return inputStoreOptions; } public DataStorePluginOptions getOutputStoreOptions() { return outputStoreOptions; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/store/DataStorePluginOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.store; import java.util.Map; import org.locationtech.geowave.core.cli.api.DefaultPluginOptions; import org.locationtech.geowave.core.cli.api.PluginOptions; import org.locationtech.geowave.core.store.GeoWaveStoreFinder; import org.locationtech.geowave.core.store.PropertyStore; import org.locationtech.geowave.core.store.StoreFactoryFamilySpi; import org.locationtech.geowave.core.store.StoreFactoryOptions; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.config.ConfigUtils; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.operations.DataStoreOperations; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import com.beust.jcommander.ParameterException; import com.beust.jcommander.ParametersDelegate; /** * Class is used to facilitate loading of a DataStore from options specified on the command line. */ public class DataStorePluginOptions extends DefaultPluginOptions implements PluginOptions { public static final String DATASTORE_PROPERTY_NAMESPACE = "store"; public static final String DEFAULT_PROPERTY_NAMESPACE = "storedefault"; // This is the plugin loaded from SPI based on "datastore" private StoreFactoryFamilySpi factoryPlugin = null; // These are the options loaded from factoryPlugin based on "datastore" @ParametersDelegate private StoreFactoryOptions factoryOptions = null; public DataStorePluginOptions() {} /** * From the given options (like 'username', 'password') setup this plugin options to be able to * create data stores. * * @param options */ public DataStorePluginOptions(final Map options) throws IllegalArgumentException { factoryPlugin = GeoWaveStoreFinder.findStoreFamily(options); if (factoryPlugin == null) { throw new IllegalArgumentException("Cannot find store plugin factory"); } factoryOptions = factoryPlugin.getDataStoreFactory().createOptionsInstance(); ConfigUtils.populateOptionsFromList(getFactoryOptions(), options); } public DataStorePluginOptions(final StoreFactoryOptions factoryOptions) { this.factoryOptions = factoryOptions; factoryPlugin = factoryOptions.getStoreFactory(); } /** * This method will allow the user to specify the desired factory, such as 'accumulo' or 'hbase'. */ @Override public void selectPlugin(final String qualifier) { if (qualifier != null) { final Map factories = GeoWaveStoreFinder.getRegisteredStoreFactoryFamilies(); factoryPlugin = factories.get(qualifier); if (factoryPlugin == null) { throw new ParameterException("Unknown datastore type: " + qualifier); } factoryOptions = factoryPlugin.getDataStoreFactory().createOptionsInstance(); } else { factoryPlugin = null; factoryOptions = null; } } public Map getOptionsAsMap() { final Map configOptions = ConfigUtils.populateListFromOptions(factoryOptions); if (factoryPlugin != null) { configOptions.put(GeoWaveStoreFinder.STORE_HINT_OPTION.getName(), factoryPlugin.getType()); } return configOptions; } public void setFactoryOptions(final StoreFactoryOptions factoryOptions) { this.factoryOptions = factoryOptions; } public void setFactoryFamily(final StoreFactoryFamilySpi factoryPlugin) { this.factoryPlugin = factoryPlugin; } public StoreFactoryFamilySpi getFactoryFamily() { return factoryPlugin; } public StoreFactoryOptions getFactoryOptions() { return factoryOptions; } public DataStore createDataStore() { return getFactoryFamily().getDataStoreFactory().createStore(getFactoryOptions()); } public PersistentAdapterStore createAdapterStore() { return getFactoryFamily().getAdapterStoreFactory().createStore(getFactoryOptions()); } public IndexStore createIndexStore() { return getFactoryFamily().getIndexStoreFactory().createStore(getFactoryOptions()); } public DataStatisticsStore createDataStatisticsStore() { return getFactoryFamily().getDataStatisticsStoreFactory().createStore(getFactoryOptions()); } public AdapterIndexMappingStore createAdapterIndexMappingStore() { return getFactoryFamily().getAdapterIndexMappingStoreFactory().createStore(getFactoryOptions()); } public InternalAdapterStore createInternalAdapterStore() { return getFactoryFamily().getInternalAdapterStoreFactory().createStore(getFactoryOptions()); } public PropertyStore createPropertyStore() { return getFactoryFamily().getPropertyStoreFactory().createStore(getFactoryOptions()); } public DataStoreOperations createDataStoreOperations() { return getFactoryFamily().getDataStoreOperationsFactory().createStore(getFactoryOptions()); } @Override public String getType() { if (factoryPlugin == null) { return null; } return factoryPlugin.getType(); } public static String getStoreNamespace(final String name) { return String.format("%s.%s", DATASTORE_PROPERTY_NAMESPACE, name); } public String getGeoWaveNamespace() { return getFactoryOptions().getGeoWaveNamespace(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/store/DescribeStoreCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.store; import java.io.File; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.stream.Collectors; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.cli.utils.ConsoleTablePrinter; import org.locationtech.geowave.core.cli.utils.FirstElementListComparator; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "describe", parentOperation = StoreSection.class) @Parameters(commandDescription = "List all of the configuration parameters for a given store") public class DescribeStoreCommand extends ServiceEnabledCommand> { @Parameter(description = "") private List parameters = new ArrayList<>(); public List getParameters() { return this.parameters; } public void setParameters(final String storeName) { this.parameters = new ArrayList<>(); this.parameters.add(storeName); } @Override public void execute(OperationParams params) throws Exception { Map configMap = computeResults(params); List> rows = new ArrayList>(configMap.size()); Iterator> entryIter = configMap.entrySet().iterator(); while (entryIter.hasNext()) { Map.Entry entry = entryIter.next(); List values = new ArrayList(2); values.add(entry.getKey()); values.add(entry.getValue()); rows.add(values); } Collections.sort(rows, new FirstElementListComparator()); new ConsoleTablePrinter(params.getConsole()).print( Arrays.asList("Config Parameter", "Value"), rows); } @Override public Map computeResults(OperationParams params) throws Exception { if (parameters.size() < 1) { throw new ParameterException("Must specify store name"); } final File configFile = getGeoWaveConfigFile(params); final Properties configProps = ConfigOptions.loadProperties(configFile); final String configPrefix = "store." + parameters.get(0) + ".opts."; Map storeMap = configProps.entrySet().stream().filter( entry -> entry.getKey().toString().startsWith(configPrefix)).collect( Collectors.toMap( entry -> ((String) entry.getKey()).substring(configPrefix.length()), entry -> (String) entry.getValue())); return storeMap; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/store/ListStorePluginsCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.store; import java.util.Map; import java.util.Map.Entry; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.store.GeoWaveStoreFinder; import org.locationtech.geowave.core.store.StoreFactoryFamilySpi; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "listplugins", parentOperation = StoreSection.class) @Parameters(commandDescription = "List supported data store types") public class ListStorePluginsCommand extends ServiceEnabledCommand { @Override public void execute(final OperationParams params) { params.getConsole().println(computeResults(params)); } @Override public String computeResults(final OperationParams params) { final StringBuilder builder = new StringBuilder(); builder.append("Available datastores currently registered:\n"); final Map dataStoreFactories = GeoWaveStoreFinder.getRegisteredStoreFactoryFamilies(); for (final Entry dataStoreFactoryEntry : dataStoreFactories.entrySet()) { final StoreFactoryFamilySpi dataStoreFactory = dataStoreFactoryEntry.getValue(); final String desc = dataStoreFactory.getDescription() == null ? "no description" : dataStoreFactory.getDescription(); builder.append(String.format("%n %s:%n %s%n", dataStoreFactory.getType(), desc)); } return builder.toString(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/store/ListStoresCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.store; import java.io.File; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.stream.Collectors; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.cli.utils.ConsoleTablePrinter; import org.locationtech.geowave.core.cli.utils.FirstElementListComparator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "list", parentOperation = StoreSection.class) @Parameters(commandDescription = "List non-default geowave data stores and their associated type") public class ListStoresCommand extends ServiceEnabledCommand> { private static final Logger LOGGER = LoggerFactory.getLogger(ListStoresCommand.class); @Override public void execute(OperationParams params) throws Exception { Map storeMap = computeResults(params); List> rows = new ArrayList>(storeMap.size()); storeMap.entrySet().forEach(entry -> { List values = new ArrayList(2); String key = entry.getKey(); values.add(key.substring(6, key.length() - ".type".length())); values.add(entry.getValue()); rows.add(values); }); Collections.sort(rows, new FirstElementListComparator()); new ConsoleTablePrinter(params.getConsole()).print(Arrays.asList("Data Store", "Type"), rows); } @Override public Map computeResults(OperationParams params) throws Exception { final File configFile = getGeoWaveConfigFile(params); // ConfigOptions checks/will never return null Properties Properties configProps = ConfigOptions.loadProperties(configFile); LOGGER.debug(configProps.size() + " entries in the config file"); // The name that the user gave the store is in a property named // as "store." <[optional namespace.] the name the user gave > ".type" Map storeMap = configProps.entrySet().stream().filter( entry -> !entry.getKey().toString().startsWith("store.default-")) // Omit defaults .filter(entry -> entry.getKey().toString().startsWith("store.")).filter( entry -> entry.getKey().toString().endsWith(".type")).collect( Collectors.toMap( entry -> (String) entry.getKey(), entry -> (String) entry.getValue())); return storeMap; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/store/RemoveStoreCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.store; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "rm", parentOperation = StoreSection.class) @Parameters(commandDescription = "Remove a data store from the GeoWave configuration") public class RemoveStoreCommand extends AbstractRemoveCommand { @Override public String computeResults(final OperationParams params) throws Exception { // Search for properties relevant to the given name pattern = DataStorePluginOptions.getStoreNamespace(getEntryName()); return super.computeResults(params, pattern); } @Override public void execute(final OperationParams params) throws Exception { computeResults(params); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/store/StoreLoader.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.store; import java.io.File; import java.lang.annotation.Annotation; import java.lang.reflect.Field; import java.util.Properties; import org.locationtech.geowave.core.cli.api.DefaultPluginOptions; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.cli.operations.config.security.utils.SecurityUtils; import org.locationtech.geowave.core.cli.utils.JCommanderParameterUtils; import org.locationtech.geowave.core.store.StoreFactoryFamilySpi; import org.locationtech.geowave.core.store.StoreFactoryOptions; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.JCommander; import com.beust.jcommander.Parameter; import com.beust.jcommander.internal.Console; /** * This is a convenience class which sets up some obvious values in the OperationParams based on the * parsed 'store name' from the main parameter. The other parameters are saved in case they need to * be used. */ public class StoreLoader { private static final Logger LOGGER = LoggerFactory.getLogger(StoreLoader.class); private final String storeName; private DataStorePluginOptions dataStorePlugin = null; /** Constructor */ public StoreLoader(final String store) { storeName = store; } /** * Attempt to load the data store configuration from the config file. * * @param configFile * @return {@code true} if the configuration was successfully loaded */ public boolean loadFromConfig(final File configFile) { return loadFromConfig(configFile, new JCommander().getConsole()); } /** * Attempt to load the data store configuration from the config file. * * @param console the console to print output to * @param configFile * @return {@code true} if the configuration was successfully loaded */ public boolean loadFromConfig(final File configFile, final Console console) { final String namespace = DataStorePluginOptions.getStoreNamespace(storeName); return loadFromConfig( ConfigOptions.loadProperties(configFile, "^" + namespace), namespace, configFile, console); } /** * Attempt to load the data store configuration from the config file. * * @param configFile * @return {@code true} if the configuration was successfully loaded */ public boolean loadFromConfig( final Properties props, final String namespace, final File configFile, final Console console) { dataStorePlugin = new DataStorePluginOptions(); // load all plugin options and initialize dataStorePlugin with type and // options if (!dataStorePlugin.load(props, namespace)) { return false; } // knowing the datastore plugin options and class type, get all fields // and parameters in order to detect which are password fields if ((configFile != null) && (dataStorePlugin.getFactoryOptions() != null)) { File tokenFile = SecurityUtils.getFormattedTokenKeyFileForConfig(configFile); final Field[] fields = dataStorePlugin.getFactoryOptions().getClass().getDeclaredFields(); for (final Field field : fields) { for (final Annotation annotation : field.getAnnotations()) { if (annotation.annotationType() == Parameter.class) { final Parameter parameter = (Parameter) annotation; if (JCommanderParameterUtils.isPassword(parameter)) { final String storeFieldName = ((namespace != null) && !"".equals(namespace.trim())) ? namespace + "." + DefaultPluginOptions.OPTS + "." + field.getName() : field.getName(); if (props.containsKey(storeFieldName)) { final String value = props.getProperty(storeFieldName); String decryptedValue = value; try { decryptedValue = SecurityUtils.decryptHexEncodedValue( value, tokenFile.getAbsolutePath(), console); } catch (final Exception e) { LOGGER.error( "An error occurred encrypting specified password value: " + e.getLocalizedMessage(), e); } props.setProperty(storeFieldName, decryptedValue); } } } } } tokenFile = null; } // reload datastore plugin with new password-encrypted properties if (!dataStorePlugin.load(props, namespace)) { return false; } return true; } public DataStorePluginOptions getDataStorePlugin() { return dataStorePlugin; } public void setDataStorePlugin(final DataStorePluginOptions dataStorePlugin) { this.dataStorePlugin = dataStorePlugin; } public String getStoreName() { return storeName; } public StoreFactoryFamilySpi getFactoryFamily() { return dataStorePlugin.getFactoryFamily(); } public StoreFactoryOptions getFactoryOptions() { return dataStorePlugin.getFactoryOptions(); } public DataStore createDataStore() { return dataStorePlugin.createDataStore(); } public PersistentAdapterStore createAdapterStore() { return dataStorePlugin.createAdapterStore(); } public InternalAdapterStore createInternalAdapterStore() { return dataStorePlugin.createInternalAdapterStore(); } public IndexStore createIndexStore() { return dataStorePlugin.createIndexStore(); } public DataStatisticsStore createDataStatisticsStore() { return dataStorePlugin.createDataStatisticsStore(); } public AdapterIndexMappingStore createAdapterIndexMappingStore() { return dataStorePlugin.createAdapterIndexMappingStore(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/store/StoreOperationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.store; import org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi; public class StoreOperationProvider implements CLIOperationProviderSpi { private static final Class[] OPERATIONS = new Class[] { StoreSection.class, AddStoreCommand.class, ClearStoreCommand.class, CopyStoreCommand.class, CopyConfigStoreCommand.class, DescribeStoreCommand.class, ListStoresCommand.class, ListStorePluginsCommand.class, RemoveStoreCommand.class, VersionCommand.class}; @Override public Class[] getOperations() { return OPERATIONS; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/store/StoreSection.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.store; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.operations.GeoWaveTopLevelSection; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "store", parentOperation = GeoWaveTopLevelSection.class) @Parameters(commandDescription = "Commands to manage GeoWave data stores") public class StoreSection extends DefaultOperation { } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/store/VersionCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.store; import java.util.ArrayList; import java.util.List; import org.locationtech.geowave.core.cli.VersionUtils; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.operations.DataStoreOperations; import org.locationtech.geowave.core.store.server.ServerSideOperations; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; /** Command for trying to retrieve the version of GeoWave for a remote datastore */ @GeowaveOperation(name = "version", parentOperation = StoreSection.class) @Parameters(commandDescription = "Get the version of GeoWave used by a data store") public class VersionCommand extends ServiceEnabledCommand { @Parameter(description = "") private List parameters = new ArrayList<>(); @Override public void execute(final OperationParams params) throws Exception { computeResults(params); } public void setParameters(final List parameters) { this.parameters = parameters; } @Override public String computeResults(final OperationParams params) throws Exception { if (parameters.size() < 1) { throw new ParameterException("Must specify store name"); } final String inputStoreName = parameters.get(0); final DataStorePluginOptions inputStoreOptions = CLIUtils.loadStore(inputStoreName, getGeoWaveConfigFile(params), params.getConsole()); // TODO: This return probably should be formatted as JSON final DataStoreOperations ops = inputStoreOptions.createDataStoreOperations(); if ((ops instanceof ServerSideOperations) && inputStoreOptions.getFactoryOptions().getStoreOptions().isServerSideLibraryEnabled()) { params.getConsole().println( "Looking up remote datastore version for type [" + inputStoreOptions.getType() + "] and name [" + inputStoreName + "]"); final String version = "Version: " + ((ServerSideOperations) ops).getVersion(); params.getConsole().println(version); return version; } else { final String ret1 = "Datastore for type [" + inputStoreOptions.getType() + "] and name [" + inputStoreName + "] does not have a serverside library enabled."; params.getConsole().println(ret1); final String ret2 = "Commandline Version: " + VersionUtils.getVersion(); params.getConsole().println(ret2); return ret1 + '\n' + ret2; } } @Override public HttpMethod getMethod() { return HttpMethod.GET; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/type/DescribeTypeCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.type; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.cli.utils.ConsoleTablePrinter; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "describe", parentOperation = TypeSection.class) @Parameters(commandDescription = "Describes a type with a given name in a data store") public class DescribeTypeCommand extends ServiceEnabledCommand { private static final Logger LOGGER = LoggerFactory.getLogger(DescribeTypeCommand.class); @Parameter(description = " ") private List parameters = new ArrayList<>(); private DataStorePluginOptions inputStoreOptions = null; /** Return "200 OK" for all describe commands. */ @Override public Boolean successStatusIs200() { return true; } @Override public void execute(final OperationParams params) { computeResults(params); } public List getParameters() { return parameters; } public void setParameters(final String storeName, final String adapterId) { parameters = new ArrayList<>(); parameters.add(storeName); parameters.add(adapterId); } public DataStorePluginOptions getInputStoreOptions() { return inputStoreOptions; } @Override public Void computeResults(final OperationParams params) { // Ensure we have all the required arguments if (parameters.size() != 2) { throw new ParameterException("Requires arguments: "); } final String inputStoreName = parameters.get(0); final String typeName = parameters.get(1); // Attempt to load store. inputStoreOptions = CLIUtils.loadStore(inputStoreName, getGeoWaveConfigFile(params), params.getConsole()); LOGGER.info( "Describing everything in store: " + inputStoreName + " with type name: " + typeName); final PersistentAdapterStore adapterStore = inputStoreOptions.createAdapterStore(); final InternalAdapterStore internalAdapterStore = inputStoreOptions.createInternalAdapterStore(); final DataTypeAdapter type = adapterStore.getAdapter(internalAdapterStore.getAdapterId(typeName)).getAdapter(); final FieldDescriptor[] typeFields = type.getFieldDescriptors(); final List> rows = new ArrayList<>(); for (final FieldDescriptor field : typeFields) { final List row = new ArrayList<>(); row.add(field.fieldName()); row.add(field.bindingClass().getName()); rows.add(row); } final List headers = new ArrayList<>(); headers.add("Field"); headers.add("Class"); params.getConsole().println("Data type class: " + type.getDataClass().getName()); params.getConsole().println("\nFields:"); final ConsoleTablePrinter cp = new ConsoleTablePrinter(params.getConsole()); cp.print(headers, rows); final Map additionalProperties = type.describe(); if (additionalProperties.size() > 0) { rows.clear(); headers.clear(); headers.add("Property"); headers.add("Value"); params.getConsole().println("\nAdditional Properties:"); for (final Entry property : additionalProperties.entrySet()) { final List row = new ArrayList<>(); row.add(property.getKey()); row.add(property.getValue()); rows.add(row); } cp.print(headers, rows); } return null; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/type/ListTypesCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.type; import java.util.ArrayList; import java.util.List; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "list", parentOperation = TypeSection.class) @Parameters(commandDescription = "Display all type names in a data store") public class ListTypesCommand extends ServiceEnabledCommand { @Parameter(description = "") private List parameters = new ArrayList<>(); private DataStorePluginOptions inputStoreOptions = null; @Override public void execute(final OperationParams params) { params.getConsole().println("Available types: " + computeResults(params)); } public List getParameters() { return parameters; } public void setParameters(final String storeName) { parameters = new ArrayList<>(); parameters.add(storeName); } public DataStorePluginOptions getInputStoreOptions() { return inputStoreOptions; } @Override public String computeResults(final OperationParams params) { if (parameters.size() < 1) { throw new ParameterException("Must specify store name"); } final String inputStoreName = parameters.get(0); // Attempt to load store. inputStoreOptions = CLIUtils.loadStore(inputStoreName, getGeoWaveConfigFile(params), params.getConsole()); final String[] typeNames = inputStoreOptions.createInternalAdapterStore().getTypeNames(); final StringBuffer buffer = new StringBuffer(); for (final String typeName : typeNames) { buffer.append(typeName).append(' '); } return buffer.toString(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/type/RemoveTypeCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.type; import java.util.ArrayList; import java.util.List; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.store.api.QueryBuilder; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "rm", parentOperation = TypeSection.class) @Parameters(commandDescription = "Remove a data type and all associated data from a data store") public class RemoveTypeCommand extends ServiceEnabledCommand { private static final Logger LOGGER = LoggerFactory.getLogger(RemoveTypeCommand.class); @Parameter(description = " ") private List parameters = new ArrayList<>(); private DataStorePluginOptions inputStoreOptions = null; /** Return "200 OK" for all removal commands. */ @Override public Boolean successStatusIs200() { return true; } @Override public void execute(final OperationParams params) { computeResults(params); } public List getParameters() { return parameters; } public void setParameters(final String storeName, final String adapterId) { parameters = new ArrayList<>(); parameters.add(storeName); parameters.add(adapterId); } public DataStorePluginOptions getInputStoreOptions() { return inputStoreOptions; } @Override public Void computeResults(final OperationParams params) { // Ensure we have all the required arguments if (parameters.size() != 2) { throw new ParameterException("Requires arguments: "); } final String inputStoreName = parameters.get(0); final String typeName = parameters.get(1); // Attempt to load store. inputStoreOptions = CLIUtils.loadStore(inputStoreName, getGeoWaveConfigFile(params), params.getConsole()); LOGGER.info("Deleting everything in store: " + inputStoreName + " with type name: " + typeName); inputStoreOptions.createDataStore().delete( QueryBuilder.newBuilder().addTypeName(typeName).build()); return null; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/type/TypeOperationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.type; import org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi; public class TypeOperationProvider implements CLIOperationProviderSpi { private static final Class[] OPERATIONS = new Class[] { ListTypesCommand.class, RemoveTypeCommand.class, DescribeTypeCommand.class, TypeSection.class}; @Override public Class[] getOperations() { return OPERATIONS; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/cli/type/TypeSection.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.cli.type; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.operations.GeoWaveTopLevelSection; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "type", parentOperation = GeoWaveTopLevelSection.class) @Parameters(commandDescription = "Commands for managing types within a data store") public class TypeSection extends DefaultOperation { } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/config/ConfigOption.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.config; public class ConfigOption { private final String name; private final String description; private final boolean optional; private boolean password; private boolean usesStringConverter; private Class type; public ConfigOption( final String name, final String description, final boolean optional, final Class type) { this.name = name; this.description = description; this.optional = optional; this.type = type; } public Class getType() { return type; } public void setType(final Class type) { this.type = type; } public String getName() { return name; } public String getDescription() { return description; } public boolean isOptional() { return optional; } public boolean isPassword() { return password; } public void setPassword(final boolean password) { this.password = password; } public boolean usesStringConverter() { return usesStringConverter; } public void setUsesStringConverter(boolean usesStringConverter) { this.usesStringConverter = usesStringConverter; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/config/ConfigUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.config; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import org.locationtech.geowave.core.cli.prefix.JCommanderPrefixTranslator; import org.locationtech.geowave.core.cli.prefix.JCommanderPropertiesTransformer; import org.locationtech.geowave.core.cli.prefix.JCommanderTranslationMap; import org.locationtech.geowave.core.cli.prefix.TranslationEntry; import org.locationtech.geowave.core.store.GeoWaveStoreFinder; import org.locationtech.geowave.core.store.StoreFactoryOptions; public class ConfigUtils { public static String cleanOptionName(String name) { name = name.trim().toLowerCase(Locale.ENGLISH).replaceAll(" ", "_"); name = name.replaceAll(",", ""); return name; } public static StringBuilder getOptions(final Collection strs, final String prefixStr) { final StringBuilder builder = new StringBuilder(); for (final String str : strs) { if (builder.length() > 0) { builder.append(","); } else { builder.append(prefixStr); } builder.append("'").append(cleanOptionName(str)).append("'"); } return builder; } public static StringBuilder getOptions(final Collection strs) { return getOptions(strs, "Options include: "); } /** * This method will use the parameter descriptions from JCommander to create/populate an * AbstractConfigOptions map. */ public static ConfigOption[] createConfigOptionsFromJCommander( final Object createOptionsInstance, final boolean includeHidden) { ConfigOption[] opts = null; if (createOptionsInstance != null) { final JCommanderPrefixTranslator translator = new JCommanderPrefixTranslator(); translator.addObject(createOptionsInstance); final JCommanderTranslationMap map = translator.translate(); final Collection entries = map.getEntries().values(); final List options = new ArrayList<>(); for (final TranslationEntry entry : entries) { if (includeHidden || !entry.isHidden()) { final ConfigOption opt = new ConfigOption( entry.getAsPropertyName(), entry.getDescription(), !entry.isRequired(), entry.getParam().getType()); opt.setPassword(entry.isPassword()); opt.setUsesStringConverter(entry.hasStringConverter()); options.add(opt); } } opts = options.toArray(new ConfigOption[options.size()]); } else { opts = new ConfigOption[0]; } return opts; } /** Take the given options and populate the given options list. This is JCommander specific. */ public static T populateOptionsFromList( final T optionsObject, final Map optionList) { if (optionsObject != null) { final JCommanderPropertiesTransformer translator = new JCommanderPropertiesTransformer(); translator.addObject(optionsObject); translator.transformFromMap(optionList); } return optionsObject; } /** Take the given options and populate the given options list. This is JCommander specific. */ public static Map populateListFromOptions( final StoreFactoryOptions optionsObject) { final Map mapOptions = new HashMap<>(); if (optionsObject != null) { final JCommanderPropertiesTransformer translator = new JCommanderPropertiesTransformer(); translator.addObject(optionsObject); translator.transformToMap(mapOptions); mapOptions.put(GeoWaveStoreFinder.STORE_HINT_KEY, optionsObject.getStoreFactory().getType()); } return mapOptions; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/CommonIndexedPersistenceEncoding.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.numeric.BasicNumericDataset; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.dimension.NumericDimensionField; /** * This class models all of the necessary information for persisting data in Accumulo (following the * common index model) and is used internally within GeoWave as an intermediary object between the * direct storage format and the native data format. It also contains information about the * persisted object within a particular index such as the insertion ID in the index and the number * of duplicates for this entry in the index, and is used when reading data from the index. */ public class CommonIndexedPersistenceEncoding extends IndexedPersistenceEncoding { public CommonIndexedPersistenceEncoding( final short internalAdapterId, final byte[] dataId, final byte[] insertionPartitionKey, final byte[] insertionSortKey, final int duplicateCount, final PersistentDataset commonData, final PersistentDataset unknownData) { super( internalAdapterId, dataId, insertionPartitionKey, insertionSortKey, duplicateCount, commonData, unknownData); } /** * Given an index, convert this persistent encoding to a set of insertion IDs for that index * * @param index the index * @return The insertions IDs for this object in the index */ public InsertionIds getInsertionIds(final Index index) { final MultiDimensionalNumericData boxRangeData = getNumericData(index.getIndexModel().getDimensions()); return index.getIndexStrategy().getInsertionIds(boxRangeData); } /** * Given an ordered set of dimensions, convert this persistent encoding common index data into a * MultiDimensionalNumericData object that can then be used by the Index * * @param dimensions the ordered set of dimensions * @return the numeric data */ @SuppressWarnings({"rawtypes", "unchecked"}) public MultiDimensionalNumericData getNumericData(final NumericDimensionField[] dimensions) { final NumericData[] dataPerDimension = new NumericData[dimensions.length]; for (int d = 0; d < dimensions.length; d++) { final Object val = getCommonData().getValue(dimensions[d].getFieldName()); if (val != null) { dataPerDimension[d] = dimensions[d].getNumericData(val); } } return new BasicNumericDataset(dataPerDimension); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/DataReader.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data; import org.locationtech.geowave.core.store.data.field.FieldReader; /** * This interface is used to read data from a row in a GeoWave data store. * * @param The binding class of this field */ public interface DataReader { /** * Get a reader for an individual field. * * @param fieldName the ID of the field * @return the FieldReader for the given field Name (ID) */ public FieldReader getReader(String fieldName); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/DataWriter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data; import org.locationtech.geowave.core.store.data.field.FieldWriter; /** * This interface is used to write data for a row in a GeoWave data store. * * @param The binding class of this field */ public interface DataWriter { /** * Get a writer for an individual field given the ID. * * @param fieldName the unique field ID * @return the writer for the given field */ public FieldWriter getWriter(String fieldName); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/DeferredReadCommonIndexedPersistenceEncoding.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data; import java.util.List; import org.locationtech.geowave.core.store.adapter.AbstractAdapterPersistenceEncoding; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.flatten.FlattenedFieldInfo; import org.locationtech.geowave.core.store.flatten.FlattenedUnreadData; import org.locationtech.geowave.core.store.index.CommonIndexModel; /** * Consults adapter to lookup field readers based on bitmasked fieldIds when converting unknown data * to adapter extended values * * @since 0.9.1 */ public class DeferredReadCommonIndexedPersistenceEncoding extends AbstractAdapterPersistenceEncoding { private final FlattenedUnreadData unreadData; public DeferredReadCommonIndexedPersistenceEncoding( final short adapterId, final byte[] dataId, final byte[] partitionKey, final byte[] sortKey, final int duplicateCount, final PersistentDataset commonData, final FlattenedUnreadData unreadData) { super( adapterId, dataId, partitionKey, sortKey, duplicateCount, commonData, new MultiFieldPersistentDataset(), new MultiFieldPersistentDataset<>()); this.unreadData = unreadData; } @Override public void convertUnknownValues( final InternalDataAdapter adapter, final CommonIndexModel model) { if (unreadData != null) { final List fields = unreadData.finishRead(); for (final FlattenedFieldInfo field : fields) { String fieldName = adapter.getFieldNameForPosition(model, field.getFieldPosition()); if (fieldName == null) { fieldName = adapter.getFieldNameForPosition(model, field.getFieldPosition()); } final FieldReader reader = adapter.getReader(fieldName); final Object value = reader.readField(field.getValue()); adapterExtendedData.addValue(fieldName, value); } } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/IndexedPersistenceEncoding.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data; /** * This class models all of the necessary information for persisting data in the data store * (following the common index model) and is used internally within GeoWave as an intermediary * object between the direct storage format and the native data format. It also contains information * about the persisted object within a particular index such as the insertion ID in the index and * the number of duplicates for this entry in the index, and is used when reading data from the * index. */ public class IndexedPersistenceEncoding extends PersistenceEncoding { private final byte[] insertionPartitionKey; private final byte[] insertionSortKey; private final int duplicateCount; public IndexedPersistenceEncoding( final Short internalAdapterId, final byte[] dataId, final byte[] insertionPartitionKey, final byte[] insertionSortKey, final int duplicateCount, final PersistentDataset commonData, final PersistentDataset unknownData) { super(internalAdapterId, dataId, commonData, unknownData); this.insertionPartitionKey = insertionPartitionKey; this.insertionSortKey = insertionSortKey; this.duplicateCount = duplicateCount; } public boolean isAsync() { return false; } /** * Return the partition key portion of the insertion ID * * @return the insertion partition key */ public byte[] getInsertionPartitionKey() { return insertionPartitionKey; } /** * Return the sort key portion of the insertion ID * * @return the insertion sort key */ public byte[] getInsertionSortKey() { return insertionSortKey; } @Override public boolean isDeduplicationEnabled() { return duplicateCount >= 0; } /** * Return the number of duplicates for this entry. Entries are duplicated when a single row ID is * insufficient to index it. * * @return the number of duplicates */ public int getDuplicateCount() { return duplicateCount; } /** * Return a flag indicating if the entry has any duplicates * * @return is it duplicated? */ public boolean isDuplicated() { return duplicateCount > 0; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/MultiFieldPersistentDataset.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data; import java.util.HashMap; import java.util.Map; /** * This is a basic mapping of field ID to native field type. "Native" in this sense can be to either * the data adapter or the common index, depending on whether it is in the common index or is an * extended field. * * @param The most specific generalization for the type for all of the values in this dataset. */ public class MultiFieldPersistentDataset implements PersistentDataset { private final Map fieldNameToValueMap; public MultiFieldPersistentDataset() { fieldNameToValueMap = new HashMap<>(); } public MultiFieldPersistentDataset(final String fieldName, final T value) { this(); addValue(fieldName, value); } public MultiFieldPersistentDataset(final Map fieldIdToValueMap) { this.fieldNameToValueMap = fieldIdToValueMap; } /* * (non-Javadoc) * * @see org.locationtech.geowave.core.store.data.PersistentDataSet#addValue(java.lang.String, T) */ @Override public void addValue(final String fieldName, final T value) { fieldNameToValueMap.put(fieldName, value); } /* * (non-Javadoc) * * @see org.locationtech.geowave.core.store.data.PersistentDataSet#addValues(java.util.Map) */ @Override public void addValues(final Map values) { fieldNameToValueMap.putAll(values); } /* * (non-Javadoc) * * @see org.locationtech.geowave.core.store.data.PersistentDataSet#getValue(java.lang.String) */ @Override public T getValue(final String fieldName) { return fieldNameToValueMap.get(fieldName); } /* * (non-Javadoc) * * @see org.locationtech.geowave.core.store.data.PersistentDataSet#getValues() */ @Override public Map getValues() { return fieldNameToValueMap; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/PersistenceEncoding.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class models all of the necessary information for persisting data in the data store * (following the common index model) and is used internally within GeoWave as an intermediary * object between the direct storage format and the native data format. It is the responsibility of * the data adapter to convert to and from this object and the native object. It does not contain * any information about the entry in a particular index and is used when writing an entry, prior to * its existence in an index. */ public class PersistenceEncoding { private Short internalAdapterId; private final byte[] dataId; protected final PersistentDataset commonData; protected final PersistentDataset unknownData; protected static final Logger LOGGER = LoggerFactory.getLogger(PersistenceEncoding.class); protected static final double DOUBLE_TOLERANCE = 1E-12d; public PersistenceEncoding( final Short internalAdapterId, final byte[] dataId, final PersistentDataset commonData, final PersistentDataset unknownData) { this.internalAdapterId = internalAdapterId; this.dataId = dataId; this.commonData = commonData; this.unknownData = unknownData; } public short getInternalAdapterId() { return internalAdapterId; } public void setInternalAdapterId(final short internalAdapterId) { this.internalAdapterId = internalAdapterId; } /** * Return the data that has been persisted but not identified by a field reader * * @return the unknown data that is yet to be identified by a field reader */ public PersistentDataset getUnknownData() { return unknownData; } /** * Return the common index data that has been persisted * * @return the common index data */ public PersistentDataset getCommonData() { return commonData; } /** * Return the data ID, data ID's should be unique per adapter * * @return the data ID */ public byte[] getDataId() { return dataId; } public boolean isDeduplicationEnabled() { return true; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/PersistentDataset.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data; import java.util.Map; public interface PersistentDataset { /** * Add the field ID/value pair to this data set. Do not overwrite. * * @param value the field ID/value pair to add */ void addValue(String fieldName, T value); /** Add several values to the data set. */ void addValues(Map values); /** * Given a field ID, get the associated value * * @param fieldName the field ID * @return the stored field value, null if this does not contain a value for the ID */ T getValue(String fieldName); /** * Get all of the values from this persistent data set * * @return all of the value */ Map getValues(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/PersistentValue.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data; /** * This represents a single value in the GeoWave data store as the value plus the field ID pair * * @param The binding class for this value */ public class PersistentValue { private final String fieldName; private final T value; public PersistentValue(final String fieldName, final T value) { this.fieldName = fieldName; this.value = value; } /** * Return the field name * * @return the field name */ public String getFieldName() { return fieldName; } /** * Return the value * * @return the value */ public T getValue() { return value; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/SingleFieldPersistentDataset.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data; import java.util.Collections; import java.util.Map; import java.util.Map.Entry; /** * This is a basic mapping of field ID to native field type. "Native" in this sense can be to either * the data adapter or the common index, depending on whether it is in the common index or is an * extended field. * * @param The most specific generalization for the type for all of the values in this dataset. */ public class SingleFieldPersistentDataset implements PersistentDataset { private String fieldName; private T value; public SingleFieldPersistentDataset() {} public SingleFieldPersistentDataset(final String fieldName, final T value) { this(); this.fieldName = fieldName; this.value = value; } /* * (non-Javadoc) * * @see org.locationtech.geowave.core.store.data.PersistentDataSet#addValue(java.lang.String, T) */ @Override public void addValue(final String fieldName, final T value) { this.fieldName = fieldName; this.value = value; } /* * (non-Javadoc) * * @see org.locationtech.geowave.core.store.data.PersistentDataSet#addValues(java.util.Map) */ @Override public void addValues(final Map values) { if (!values.isEmpty()) { final Entry e = values.entrySet().iterator().next(); fieldName = e.getKey(); value = e.getValue(); } } /* * (non-Javadoc) * * @see org.locationtech.geowave.core.store.data.PersistentDataSet#getValue(java.lang.String) */ @Override public T getValue(final String fieldName) { if ((this.fieldName == null) && (fieldName == null)) { return value; } if ((this.fieldName != null) && this.fieldName.equals(fieldName)) { return value; } return null; } /* * (non-Javadoc) * * @see org.locationtech.geowave.core.store.data.PersistentDataSet#getValues() */ @Override public Map getValues() { return Collections.singletonMap(fieldName, value); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/UnreadFieldDataList.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data; import java.util.ArrayList; import java.util.List; import org.locationtech.geowave.core.store.flatten.FlattenedFieldInfo; import org.locationtech.geowave.core.store.flatten.FlattenedUnreadData; public class UnreadFieldDataList implements FlattenedUnreadData { private final List unreadData; private List cachedRead; public UnreadFieldDataList(final List unreadData) { this.unreadData = unreadData; } @Override public List finishRead() { if (cachedRead == null) { cachedRead = new ArrayList<>(); for (final FlattenedUnreadData d : unreadData) { cachedRead.addAll(d.finishRead()); } } return cachedRead; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/ArrayReader.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field; import java.lang.reflect.Array; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.data.field.ArrayWriter.Encoding; import org.locationtech.geowave.core.store.util.GenericTypeResolver; /** This class contains the basic array reader field types */ public class ArrayReader implements FieldReader { private final FieldReader reader; public ArrayReader(final FieldReader reader) { this.reader = reader; } @Override public FieldType[] readField(final byte[] fieldData) { if ((fieldData == null) || (fieldData.length == 0)) { return null; } final byte encoding = fieldData[0]; final SerializationHelper serializationHelper = new SerializationHelper() { @Override public int readUnsignedInt(final ByteBuffer buffer) { return VarintUtils.readUnsignedInt(buffer); } @Override public FieldType readField(final FieldReader reader, final byte[] bytes) { return reader.readField(bytes); } }; // try to read the encoding first if (encoding == Encoding.FIXED_SIZE_ENCODING.getByteEncoding()) { return readFixedSizeField(fieldData, serializationHelper); } else if (encoding == Encoding.VARIABLE_SIZE_ENCODING.getByteEncoding()) { return readVariableSizeField(fieldData, serializationHelper); } // class type not supported! // to be safe, treat as variable size return readVariableSizeField(fieldData, serializationHelper); } @Override public FieldType[] readField(final byte[] fieldData, final byte serializationVersion) { if (serializationVersion < FieldUtils.SERIALIZATION_VERSION) { final SerializationHelper serializationHelper = new SerializationHelper() { @Override public int readUnsignedInt(final ByteBuffer buffer) { return buffer.getInt(); } @Override public FieldType readField(final FieldReader reader, final byte[] bytes) { return reader.readField(bytes, serializationVersion); } }; final byte encoding = fieldData[0]; // try to read the encoding first if (encoding == Encoding.FIXED_SIZE_ENCODING.getByteEncoding()) { return readFixedSizeField(fieldData, serializationHelper); } else if (encoding == Encoding.VARIABLE_SIZE_ENCODING.getByteEncoding()) { return readVariableSizeField(fieldData, serializationHelper); } // class type not supported! // to be safe, treat as variable size return readVariableSizeField(fieldData, serializationHelper); } else { return readField(fieldData); } } @SuppressWarnings("unchecked") protected FieldType[] readFixedSizeField( final byte[] fieldData, final SerializationHelper serializationHelper) { if (fieldData.length < 1) { return null; } final List result = new ArrayList<>(); final ByteBuffer buff = ByteBuffer.wrap(fieldData); // this would be bad if (buff.get() != Encoding.FIXED_SIZE_ENCODING.getByteEncoding()) { return null; } final int bytesPerEntry = serializationHelper.readUnsignedInt(buff); final byte[] data = new byte[Math.min(bytesPerEntry, buff.remaining())]; while (buff.hasRemaining()) { final int header = buff.get(); for (int i = 0; i < 8; i++) { final int mask = (int) Math.pow(2.0, i); if ((header & mask) != 0) { if (buff.hasRemaining()) { buff.get(data); result.add(serializationHelper.readField(reader, data)); } else { break; } } else { result.add(null); } } } final FieldType[] resultArray = (FieldType[]) Array.newInstance( GenericTypeResolver.resolveTypeArgument(reader.getClass(), FieldReader.class), result.size()); return result.toArray(resultArray); } @SuppressWarnings("unchecked") protected FieldType[] readVariableSizeField( final byte[] fieldData, final SerializationHelper serializationHelper) { if ((fieldData == null) || (fieldData.length == 0)) { return null; } final List result = new ArrayList<>(); final ByteBuffer buff = ByteBuffer.wrap(fieldData); // this would be bad if (buff.get() != Encoding.VARIABLE_SIZE_ENCODING.getByteEncoding()) { return null; } while (buff.remaining() >= 1) { final int size = serializationHelper.readUnsignedInt(buff); if (size > 0) { final byte[] bytes = ByteArrayUtils.safeRead(buff, size); result.add(serializationHelper.readField(reader, bytes)); } else { result.add(null); } } final FieldType[] resultArray = (FieldType[]) Array.newInstance( GenericTypeResolver.resolveTypeArgument(reader.getClass(), FieldReader.class), result.size()); return result.toArray(resultArray); } private static interface SerializationHelper { public int readUnsignedInt(ByteBuffer buffer); public FieldType readField(FieldReader reader, byte[] bytes); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/ArrayWriter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.VarintUtils; /** This class contains the basic object array writer field types */ public abstract class ArrayWriter implements FieldWriter { public static enum Encoding { FIXED_SIZE_ENCODING((byte) 0), VARIABLE_SIZE_ENCODING((byte) 1); private final byte encoding; Encoding(final byte encoding) { this.encoding = encoding; } public byte getByteEncoding() { return encoding; } } private final FieldWriter writer; public ArrayWriter(final FieldWriter writer) { this.writer = writer; } protected byte[] writeFixedSizeField(final FieldType[] fieldValue) { if (fieldValue == null) { return new byte[] {}; } final byte[][] byteData = getBytes(fieldValue); int bytesPerEntry = 0; for (final byte[] bytes : byteData) { if (bytes.length > 0) { bytesPerEntry = bytes.length; } } final ByteBuffer buf = ByteBuffer.allocate( 1 + VarintUtils.unsignedIntByteLength(bytesPerEntry) + (int) Math.ceil(fieldValue.length / 8.0) + getLength(byteData)); // this is a header value to indicate how data should be read/written buf.put(Encoding.FIXED_SIZE_ENCODING.getByteEncoding()); // this is a header value to indicate the size of each entry VarintUtils.writeUnsignedInt(bytesPerEntry, buf); for (int i = 0; i < fieldValue.length; i += 8) { int header = 255; final int headerIdx = buf.position(); buf.position(headerIdx + 1); for (int j = 0; ((i + j) < fieldValue.length) && (j < 8); j++) { final int mask = ~((int) Math.pow(2.0, j)); if (fieldValue[i + j] == null) { header = header & mask; } else { buf.put(byteData[i + j]); } } buf.put(headerIdx, (byte) header); } return buf.array(); } protected byte[] writeVariableSizeField(final FieldType[] fieldValue) { if (fieldValue == null) { return new byte[] {}; } final byte[][] bytes = getBytes(fieldValue); int sizeBytes = 0; for (final byte[] entry : bytes) { sizeBytes += VarintUtils.unsignedIntByteLength(entry.length); } final ByteBuffer buf = ByteBuffer.allocate(1 + sizeBytes + getLength(bytes)); // this is a header value to indicate how data should be read/written buf.put(Encoding.VARIABLE_SIZE_ENCODING.getByteEncoding()); for (final byte[] entry : bytes) { VarintUtils.writeUnsignedInt(entry.length, buf); if (entry.length > 0) { buf.put(entry); } } return buf.array(); } private byte[][] getBytes(final FieldType[] fieldData) { final byte[][] bytes = new byte[fieldData.length][]; for (int i = 0; i < fieldData.length; i++) { if (fieldData[i] == null) { bytes[i] = new byte[] {}; } else { bytes[i] = writer.writeField(fieldData[i]); } } return bytes; } private int getLength(final byte[][] bytes) { int length = 0; for (final byte[] entry : bytes) { length += entry.length; } return length; } public static class FixedSizeObjectArrayWriter extends ArrayWriter { public FixedSizeObjectArrayWriter(final FieldWriter writer) { super(writer); } @Override public byte[] writeField(final FieldType[] fieldValue) { return super.writeFixedSizeField(fieldValue); } } public static class VariableSizeObjectArrayWriter extends ArrayWriter { public VariableSizeObjectArrayWriter(final FieldWriter writer) { super(writer); } @Override public byte[] writeField(final FieldType[] fieldValue) { return super.writeVariableSizeField(fieldValue); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/FieldReader.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field; import java.util.function.Function; /** * This interface deserializes a field from binary data * * @param */ public interface FieldReader extends Function { /** * Deserializes the field from binary data * * @param fieldData The binary serialization of the data object * @return The deserialization of the entry */ public FieldType readField(byte[] fieldData); public default FieldType readField(final byte[] fieldData, final byte serializationVersion) { return readField(fieldData); } @Override default FieldType apply(final byte[] fieldData) { return readField(fieldData, FieldUtils.SERIALIZATION_VERSION); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/FieldSerializationProviderSpi.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field; public interface FieldSerializationProviderSpi { public FieldReader getFieldReader(); public FieldWriter getFieldWriter(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/FieldUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import org.locationtech.geowave.core.index.SPIServiceRegistry; import org.locationtech.geowave.core.store.util.GenericTypeResolver; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class has a set of convenience methods to determine the appropriate field reader and writer * for a given field type (Class) */ public class FieldUtils { public static final byte SERIALIZATION_VERSION = 0x1; private static final Logger LOGGER = LoggerFactory.getLogger(FieldUtils.class); private static Map, FieldReader> fieldReaderRegistry = null; private static Map, FieldWriter> fieldWriterRegistry = null; private static synchronized Map, FieldReader> getRegisteredFieldReaders() { if (fieldReaderRegistry == null) { initRegistry(); } return fieldReaderRegistry; } private static synchronized Map, FieldWriter> getRegisteredFieldWriters() { if (fieldWriterRegistry == null) { initRegistry(); } return fieldWriterRegistry; } private static synchronized void initRegistry() { fieldReaderRegistry = new HashMap<>(); fieldWriterRegistry = new HashMap<>(); final Iterator serializationProviders = new SPIServiceRegistry(FieldSerializationProviderSpi.class).load( FieldSerializationProviderSpi.class); while (serializationProviders.hasNext()) { final FieldSerializationProviderSpi serializationProvider = serializationProviders.next(); if (serializationProvider != null) { final Class type = GenericTypeResolver.resolveTypeArgument( serializationProvider.getClass(), FieldSerializationProviderSpi.class); final FieldReader reader = serializationProvider.getFieldReader(); if (reader != null) { if (fieldReaderRegistry.containsKey(type)) { LOGGER.warn( "Field reader already registered for " + type + "; not able to add " + reader); } else { fieldReaderRegistry.put(type, reader); } } final FieldWriter writer = serializationProvider.getFieldWriter(); if (writer != null) { if (fieldWriterRegistry.containsKey(type)) { LOGGER.warn( "Field writer already registered for " + type + "; not able to add " + writer); } else { fieldWriterRegistry.put(type, writer); } } } } } @SuppressWarnings("unchecked") public static FieldReader getDefaultReaderForClass(final Class myClass) { final Map, FieldReader> internalFieldReaders = getRegisteredFieldReaders(); // try concrete class FieldReader reader = (FieldReader) internalFieldReaders.get(myClass); if (reader != null) { return reader; } // if the concrete class lookup failed, try inheritance synchronized (internalFieldReaders) { reader = (FieldReader) getAssignableValueFromClassMap(myClass, internalFieldReaders); if (reader != null) { internalFieldReaders.put(myClass, reader); } } return reader; } @SuppressWarnings("unchecked") public static FieldWriter getDefaultWriterForClass(final Class myClass) { final Map, FieldWriter> internalFieldWriters = getRegisteredFieldWriters(); // try concrete class FieldWriter writer = (FieldWriter) internalFieldWriters.get(myClass); if (writer != null) { return writer; } // if the concrete class lookup failed, try inheritance synchronized (internalFieldWriters) { writer = (FieldWriter) getAssignableValueFromClassMap(myClass, internalFieldWriters); if (writer != null) { internalFieldWriters.put(myClass, writer); } } return writer; } public static T getAssignableValueFromClassMap( final Class myClass, final Map, T> classToAssignableValueMap) { // loop through the map to discover the first class that is assignable // from myClass for (final Entry, T> candidate : classToAssignableValueMap.entrySet()) { if (candidate.getKey().isAssignableFrom(myClass)) { return candidate.getValue(); } } return null; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/FieldWriter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field; import java.util.function.Function; /** * This interface serializes a field's value into a byte array * * * @param */ public interface FieldWriter extends Function { /** * Serializes the entry into binary data that will be stored as the value for the row * * @param fieldValue The data object to serialize * @return The binary serialization of the data object */ public byte[] writeField(FieldType fieldValue); @Override default byte[] apply(final FieldType fieldValue) { return writeField(fieldValue); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/PersistableReader.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistableFactory; public class PersistableReader implements FieldReader { private final short classId; public PersistableReader(final short classId) { super(); this.classId = classId; } @Override public F readField(final byte[] fieldData) { final F newInstance = (F) PersistableFactory.getInstance().newInstance(classId); newInstance.fromBinary(fieldData); return newInstance; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/PersistableWriter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field; import org.locationtech.geowave.core.index.persist.Persistable; public class PersistableWriter implements FieldWriter { @Override public byte[] writeField(final F fieldValue) { if (fieldValue == null) { return new byte[0]; } return fieldValue.toBinary(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/BigDecimalArraySerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field.base; import java.math.BigDecimal; import org.locationtech.geowave.core.store.data.field.ArrayReader; import org.locationtech.geowave.core.store.data.field.ArrayWriter.VariableSizeObjectArrayWriter; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldWriter; import org.locationtech.geowave.core.store.data.field.base.BigDecimalSerializationProvider.BigDecimalReader; import org.locationtech.geowave.core.store.data.field.base.BigDecimalSerializationProvider.BigDecimalWriter; public class BigDecimalArraySerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new BigDecimalArrayReader(); } @Override public FieldWriter getFieldWriter() { return new BigDecimalArrayWriter(); } private static class BigDecimalArrayReader extends ArrayReader { public BigDecimalArrayReader() { super(new BigDecimalReader()); } } private static class BigDecimalArrayWriter extends VariableSizeObjectArrayWriter { public BigDecimalArrayWriter() { super(new BigDecimalWriter()); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/BigDecimalSerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field.base; import java.math.BigDecimal; import java.math.BigInteger; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldUtils; import org.locationtech.geowave.core.store.data.field.FieldWriter; public class BigDecimalSerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new BigDecimalReader(); } @Override public FieldWriter getFieldWriter() { return new BigDecimalWriter(); } protected static class BigDecimalReader implements FieldReader { @Override public BigDecimal readField(final byte[] fieldData) { if ((fieldData == null) || (fieldData.length < 2)) { return null; } final ByteBuffer bb = ByteBuffer.wrap(fieldData); final int scale = VarintUtils.readSignedInt(bb); final byte[] unscaled = new byte[bb.remaining()]; bb.get(unscaled); return new BigDecimal(new BigInteger(unscaled), scale); } @Override public BigDecimal readField(final byte[] fieldData, final byte serializationVersion) { if (serializationVersion < FieldUtils.SERIALIZATION_VERSION) { if ((fieldData == null) || (fieldData.length < 2)) { return null; } final ByteBuffer bb = ByteBuffer.wrap(fieldData); final int scale = bb.getInt(); final byte[] unscaled = new byte[bb.remaining()]; bb.get(unscaled); return new BigDecimal(new BigInteger(unscaled), scale); } else { return readField(fieldData); } } } protected static class BigDecimalWriter implements FieldWriter { @Override public byte[] writeField(final BigDecimal fieldValue) { if (fieldValue == null) { return new byte[] {}; } final byte[] unscaled = fieldValue.unscaledValue().toByteArray(); final ByteBuffer buf = ByteBuffer.allocate( VarintUtils.signedIntByteLength(fieldValue.scale()) + unscaled.length); VarintUtils.writeSignedInt(fieldValue.scale(), buf); buf.put(unscaled); return buf.array(); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/BigIntegerArraySerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field.base; import java.math.BigInteger; import org.locationtech.geowave.core.store.data.field.ArrayReader; import org.locationtech.geowave.core.store.data.field.ArrayWriter.VariableSizeObjectArrayWriter; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldWriter; import org.locationtech.geowave.core.store.data.field.base.BigIntegerSerializationProvider.BigIntegerReader; import org.locationtech.geowave.core.store.data.field.base.BigIntegerSerializationProvider.BigIntegerWriter; public class BigIntegerArraySerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new BigIntegerArrayReader(); } @Override public FieldWriter getFieldWriter() { return new BigIntegerArrayWriter(); } private static class BigIntegerArrayReader extends ArrayReader { public BigIntegerArrayReader() { super(new BigIntegerReader()); } } private static class BigIntegerArrayWriter extends VariableSizeObjectArrayWriter { public BigIntegerArrayWriter() { super(new BigIntegerWriter()); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/BigIntegerSerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field.base; import java.math.BigInteger; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldWriter; public class BigIntegerSerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new BigIntegerReader(); } @Override public FieldWriter getFieldWriter() { return new BigIntegerWriter(); } protected static class BigIntegerReader implements FieldReader { @Override public BigInteger readField(final byte[] fieldData) { if ((fieldData == null) || (fieldData.length < 4)) { return null; } return new BigInteger(fieldData); } } protected static class BigIntegerWriter implements FieldWriter { @Override public byte[] writeField(final BigInteger fieldValue) { if (fieldValue == null) { return new byte[] {}; } return fieldValue.toByteArray(); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/BooleanArraySerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field.base; import org.locationtech.geowave.core.store.data.field.ArrayReader; import org.locationtech.geowave.core.store.data.field.ArrayWriter.FixedSizeObjectArrayWriter; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldWriter; import org.locationtech.geowave.core.store.data.field.base.BooleanSerializationProvider.BooleanReader; import org.locationtech.geowave.core.store.data.field.base.BooleanSerializationProvider.BooleanWriter; public class BooleanArraySerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new BooleanArrayReader(); } @Override public FieldWriter getFieldWriter() { return new BooleanArrayWriter(); } private static class BooleanArrayReader extends ArrayReader { public BooleanArrayReader() { super(new BooleanReader()); } } private static class BooleanArrayWriter extends FixedSizeObjectArrayWriter { public BooleanArrayWriter() { super(new BooleanWriter()); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/BooleanSerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field.base; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldWriter; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; public class BooleanSerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new BooleanReader(); } @Override public FieldWriter getFieldWriter() { return new BooleanWriter(); } protected static class BooleanReader implements FieldReader { @SuppressFBWarnings( value = {"NP_BOOLEAN_RETURN_NULL"}, justification = "matches pattern of other read* methods") @Override public Boolean readField(final byte[] fieldData) { if ((fieldData == null) || (fieldData.length < 1)) { return null; } return fieldData[0] > 0; } } protected static class BooleanWriter implements FieldWriter { @Override public byte[] writeField(final Boolean fieldValue) { return new byte[] {((fieldValue == null) || !fieldValue) ? (byte) 0 : (byte) 1}; } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/ByteArraySerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field.base; import org.apache.commons.lang3.ArrayUtils; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldWriter; public class ByteArraySerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new ByteArrayReader(); } @Override public FieldWriter getFieldWriter() { return new ByteArrayWriter(); } public static class ByteArrayReader implements FieldReader { @Override public Byte[] readField(final byte[] fieldData) { if ((fieldData == null) || (fieldData.length < 1)) { return null; } return ArrayUtils.toObject(fieldData); } } public static class ByteArrayWriter implements FieldWriter { @Override public byte[] writeField(final Byte[] fieldValue) { if (fieldValue == null) { return new byte[] {}; } return ArrayUtils.toPrimitive(fieldValue); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/ByteSerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field.base; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldWriter; public class ByteSerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new ByteReader(); } @Override public FieldWriter getFieldWriter() { return new ByteWriter(); } private static class ByteReader implements FieldReader { @Override public Byte readField(final byte[] fieldData) { if ((fieldData == null) || (fieldData.length < 1)) { return null; } return fieldData[0]; } } public static class ByteWriter implements FieldWriter { @Override public byte[] writeField(final Byte fieldValue) { if (fieldValue == null) { return new byte[] {}; } return new byte[] {fieldValue}; } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/DoubleArraySerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field.base; import org.locationtech.geowave.core.store.data.field.ArrayReader; import org.locationtech.geowave.core.store.data.field.ArrayWriter.FixedSizeObjectArrayWriter; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldWriter; import org.locationtech.geowave.core.store.data.field.base.DoubleSerializationProvider.DoubleReader; import org.locationtech.geowave.core.store.data.field.base.DoubleSerializationProvider.DoubleWriter; public class DoubleArraySerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new DoubleArrayReader(); } @Override public FieldWriter getFieldWriter() { return new DoubleArrayWriter(); } private static class DoubleArrayReader extends ArrayReader { public DoubleArrayReader() { super(new DoubleReader()); } } private static class DoubleArrayWriter extends FixedSizeObjectArrayWriter { public DoubleArrayWriter() { super(new DoubleWriter()); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/DoubleSerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field.base; import java.nio.ByteBuffer; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldWriter; public class DoubleSerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new DoubleReader(); } @Override public FieldWriter getFieldWriter() { return new DoubleWriter(); } protected static class DoubleReader implements FieldReader { @Override public Double readField(final byte[] fieldData) { if ((fieldData == null) || (fieldData.length < 8)) { return null; } return ByteBuffer.wrap(fieldData).getDouble(); } } protected static class DoubleWriter implements FieldWriter { @Override public byte[] writeField(final Double fieldValue) { if (fieldValue == null) { return new byte[] {}; } final ByteBuffer buf = ByteBuffer.allocate(8); buf.putDouble(fieldValue); return buf.array(); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/FloatArraySerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field.base; import org.locationtech.geowave.core.store.data.field.ArrayReader; import org.locationtech.geowave.core.store.data.field.ArrayWriter.FixedSizeObjectArrayWriter; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldWriter; import org.locationtech.geowave.core.store.data.field.base.FloatSerializationProvider.FloatReader; import org.locationtech.geowave.core.store.data.field.base.FloatSerializationProvider.FloatWriter; public class FloatArraySerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new FloatArrayReader(); } @Override public FieldWriter getFieldWriter() { return new FloatArrayWriter(); } private static class FloatArrayReader extends ArrayReader { public FloatArrayReader() { super(new FloatReader()); } } private static class FloatArrayWriter extends FixedSizeObjectArrayWriter { public FloatArrayWriter() { super(new FloatWriter()); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/FloatSerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field.base; import java.nio.ByteBuffer; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldWriter; public class FloatSerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new FloatReader(); } @Override public FieldWriter getFieldWriter() { return new FloatWriter(); } protected static class FloatReader implements FieldReader { @Override public Float readField(final byte[] fieldData) { if ((fieldData == null) || (fieldData.length < 4)) { return null; } return ByteBuffer.wrap(fieldData).getFloat(); } } protected static class FloatWriter implements FieldWriter { @Override public byte[] writeField(final Float fieldValue) { if (fieldValue == null) { return new byte[] {}; } final ByteBuffer buf = ByteBuffer.allocate(4); buf.putFloat(fieldValue); return buf.array(); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/IntegerArraySerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field.base; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.data.field.ArrayReader; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldUtils; import org.locationtech.geowave.core.store.data.field.FieldWriter; import org.locationtech.geowave.core.store.data.field.base.IntegerSerializationProvider.IntegerReader; public class IntegerArraySerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new IntegerArrayReader(); } @Override public FieldWriter getFieldWriter() { return new IntegerArrayWriter(); } // @see PrimitiveIntArraySerializationProvider#PrimitiveIntArrayReader private static class IntegerArrayReader implements FieldReader { @Override public Integer[] readField(final byte[] fieldData) { if ((fieldData == null) || (fieldData.length == 0)) { return null; } final ByteBuffer buff = ByteBuffer.wrap(fieldData); final int count = VarintUtils.readUnsignedInt(buff); ByteArrayUtils.verifyBufferSize(buff, count); final Integer[] result = new Integer[count]; for (int i = 0; i < count; i++) { if (buff.get() > 0) { result[i] = VarintUtils.readSignedInt(buff); } else { result[i] = null; } } return result; } @Override public Integer[] readField(final byte[] fieldData, final byte serializationVersion) { if (serializationVersion < FieldUtils.SERIALIZATION_VERSION) { return new ArrayReader<>(new IntegerReader()).readField(fieldData, serializationVersion); } else { return readField(fieldData); } } } // @see PrimitiveIntArraySerializationProvider.PrimitiveIntArrayWriter private static class IntegerArrayWriter implements FieldWriter { @Override public byte[] writeField(final Integer[] fieldValue) { if (fieldValue == null) { return new byte[] {}; } int bytes = VarintUtils.unsignedIntByteLength(fieldValue.length); for (final Integer value : fieldValue) { bytes++; if (value != null) { bytes += VarintUtils.signedIntByteLength(value); } } final ByteBuffer buf = ByteBuffer.allocate(bytes); VarintUtils.writeUnsignedInt(fieldValue.length, buf); for (final Integer value : fieldValue) { if (value == null) { buf.put((byte) 0x0); } else { buf.put((byte) 0x1); VarintUtils.writeSignedInt(value, buf); } } return buf.array(); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/IntegerSerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field.base; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldUtils; import org.locationtech.geowave.core.store.data.field.FieldWriter; public class IntegerSerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new IntegerReader(); } @Override public FieldWriter getFieldWriter() { return new IntegerWriter(); } protected static class IntegerReader implements FieldReader { @Override public Integer readField(final byte[] fieldData) { if ((fieldData == null) || (fieldData.length == 0)) { return null; } return VarintUtils.readSignedInt(ByteBuffer.wrap(fieldData)); } @Override public Integer readField(final byte[] fieldData, final byte serializationVersion) { if ((fieldData == null) || (fieldData.length == 0)) { return null; } if (serializationVersion < FieldUtils.SERIALIZATION_VERSION) { return ByteBuffer.wrap(fieldData).getInt(); } else { return readField(fieldData); } } } protected static class IntegerWriter implements FieldWriter { @Override public byte[] writeField(final Integer fieldValue) { if (fieldValue == null) { return new byte[] {}; } final ByteBuffer buf = ByteBuffer.allocate(VarintUtils.signedIntByteLength(fieldValue)); VarintUtils.writeSignedInt(fieldValue, buf); return buf.array(); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/LongArraySerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field.base; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.data.field.ArrayReader; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldUtils; import org.locationtech.geowave.core.store.data.field.FieldWriter; import org.locationtech.geowave.core.store.data.field.base.LongSerializationProvider.LongReader; public class LongArraySerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new LongArrayReader(); } @Override public FieldWriter getFieldWriter() { return new LongArrayWriter(); } // @see PrimitiveLongArraySerializationProvider.PrimitiveLongArrayReader private static class LongArrayReader implements FieldReader { @Override public Long[] readField(final byte[] fieldData) { if ((fieldData == null) || (fieldData.length == 0)) { return null; } final ByteBuffer buff = ByteBuffer.wrap(fieldData); final int count = VarintUtils.readUnsignedInt(buff); ByteArrayUtils.verifyBufferSize(buff, count); final Long[] result = new Long[count]; for (int i = 0; i < count; i++) { if (buff.get() > 0) { result[i] = VarintUtils.readSignedLong(buff); } else { result[i] = null; } } return result; } @Override public Long[] readField(final byte[] fieldData, final byte serializationVersion) { if ((fieldData == null) || (fieldData.length == 0)) { return null; } if (serializationVersion < FieldUtils.SERIALIZATION_VERSION) { return new ArrayReader<>(new LongReader()).readField(fieldData, serializationVersion); } else { return readField(fieldData); } } } // @see PrimitiveLongArraySerializationProvider.PrimitiveLongArrayWriter private static class LongArrayWriter implements FieldWriter { @Override public byte[] writeField(final Long[] fieldValue) { if (fieldValue == null) { return new byte[] {}; } int bytes = VarintUtils.unsignedIntByteLength(fieldValue.length); for (final Long value : fieldValue) { bytes++; if (value != null) { bytes += VarintUtils.signedLongByteLength(value); } } final ByteBuffer buf = ByteBuffer.allocate(bytes); VarintUtils.writeUnsignedInt(fieldValue.length, buf); for (final Long value : fieldValue) { if (value == null) { buf.put((byte) 0x0); } else { buf.put((byte) 0x1); VarintUtils.writeSignedLong(value, buf); } } return buf.array(); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/LongSerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field.base; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldUtils; import org.locationtech.geowave.core.store.data.field.FieldWriter; public class LongSerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new LongReader(); } @Override public FieldWriter getFieldWriter() { return new LongWriter(); } protected static class LongReader implements FieldReader { @Override public Long readField(final byte[] fieldData) { if ((fieldData == null) || (fieldData.length == 0)) { return null; } return VarintUtils.readSignedLong(ByteBuffer.wrap(fieldData)); } @Override public Long readField(final byte[] fieldData, final byte serializationVersion) { if ((fieldData == null) || (fieldData.length == 0)) { return null; } if (serializationVersion < FieldUtils.SERIALIZATION_VERSION) { return ByteBuffer.wrap(fieldData).getLong(); } else { return readField(fieldData); } } } protected static class LongWriter implements FieldWriter { public LongWriter() { super(); } @Override public byte[] writeField(final Long fieldValue) { if (fieldValue == null) { return new byte[] {}; } final ByteBuffer buf = ByteBuffer.allocate(VarintUtils.signedLongByteLength(fieldValue)); VarintUtils.writeSignedLong(fieldValue, buf); return buf.array(); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/PrimitiveBooleanArraySerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field.base; import java.nio.ByteBuffer; import java.util.BitSet; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldWriter; public class PrimitiveBooleanArraySerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new PrimitiveBooleanArrayReader(); } @Override public FieldWriter getFieldWriter() { return new PrimitiveBooleanArrayWriter(); } private static class PrimitiveBooleanArrayReader implements FieldReader { @Override public boolean[] readField(final byte[] fieldData) { if ((fieldData == null) || (fieldData.length == 0)) { return null; } final ByteBuffer buff = ByteBuffer.wrap(fieldData); final int count = VarintUtils.readUnsignedInt(buff); final BitSet bits = BitSet.valueOf(buff); final boolean[] result = new boolean[count]; for (int i = 0; i < bits.length(); i++) { result[i] = bits.get(i); } return result; } } private static class PrimitiveBooleanArrayWriter implements FieldWriter { @Override public byte[] writeField(final boolean[] fieldValue) { if (fieldValue == null) { return new byte[] {}; } final BitSet bits = new BitSet(fieldValue.length); for (int i = 0; i < fieldValue.length; i++) { bits.set(i, fieldValue[i]); } final byte[] bytes = bits.toByteArray(); int size = VarintUtils.unsignedIntByteLength(fieldValue.length); size += bytes.length; final ByteBuffer buf = ByteBuffer.allocate(size); VarintUtils.writeUnsignedInt(fieldValue.length, buf); buf.put(bytes); return buf.array(); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/PrimitiveByteArraySerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field.base; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldWriter; public class PrimitiveByteArraySerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new PrimitiveByteArrayReader(); } @Override public FieldWriter getFieldWriter() { return new PrimitiveByteArrayWriter(); } private static class PrimitiveByteArrayReader implements FieldReader { @Override public byte[] readField(final byte[] fieldData) { if ((fieldData == null) || (fieldData.length < 1)) { return null; } return fieldData; } } private static class PrimitiveByteArrayWriter implements FieldWriter { @Override public byte[] writeField(final byte[] fieldValue) { if (fieldValue == null) { return new byte[] {}; } return fieldValue; } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/PrimitiveDoubleArraySerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field.base; import java.nio.ByteBuffer; import java.nio.DoubleBuffer; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldWriter; public class PrimitiveDoubleArraySerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new PrimitiveDoubleArrayReader(); } @Override public FieldWriter getFieldWriter() { return new PrimitiveDoubleArrayWriter(); } private static class PrimitiveDoubleArrayReader implements FieldReader { @Override public double[] readField(final byte[] fieldData) { if ((fieldData == null) || (fieldData.length < 8)) { return null; } final DoubleBuffer buff = ByteBuffer.wrap(fieldData).asDoubleBuffer(); final double[] result = new double[buff.remaining()]; buff.get(result); return result; } } private static class PrimitiveDoubleArrayWriter implements FieldWriter { @Override public byte[] writeField(final double[] fieldValue) { if (fieldValue == null) { return new byte[] {}; } final ByteBuffer buf = ByteBuffer.allocate(8 * fieldValue.length); for (final double value : fieldValue) { buf.putDouble(value); } return buf.array(); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/PrimitiveFloatArraySerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field.base; import java.nio.ByteBuffer; import java.nio.FloatBuffer; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldWriter; public class PrimitiveFloatArraySerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new PrimitiveFloatArrayReader(); } @Override public FieldWriter getFieldWriter() { return new PrimitiveFloatArrayWriter(); } private static class PrimitiveFloatArrayReader implements FieldReader { @Override public float[] readField(final byte[] fieldData) { if ((fieldData == null) || (fieldData.length < 4)) { return null; } final FloatBuffer buff = ByteBuffer.wrap(fieldData).asFloatBuffer(); final float[] result = new float[buff.remaining()]; buff.get(result); return result; } } private static class PrimitiveFloatArrayWriter implements FieldWriter { @Override public byte[] writeField(final float[] fieldValue) { if (fieldValue == null) { return new byte[] {}; } final ByteBuffer buf = ByteBuffer.allocate(4 * fieldValue.length); for (final float value : fieldValue) { buf.putFloat(value); } return buf.array(); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/PrimitiveIntArraySerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field.base; import java.nio.ByteBuffer; import java.nio.IntBuffer; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldUtils; import org.locationtech.geowave.core.store.data.field.FieldWriter; public class PrimitiveIntArraySerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new PrimitiveIntArrayReader(); } @Override public FieldWriter getFieldWriter() { return new PrimitiveIntArrayWriter(); } private static class PrimitiveIntArrayReader implements FieldReader { @Override public int[] readField(final byte[] fieldData) { if ((fieldData == null) || (fieldData.length == 0)) { return null; } final ByteBuffer buff = ByteBuffer.wrap(fieldData); final int count = VarintUtils.readUnsignedInt(buff); ByteArrayUtils.verifyBufferSize(buff, count); final int[] result = new int[count]; for (int i = 0; i < count; i++) { result[i] = VarintUtils.readSignedInt(buff); } return result; } @Override public int[] readField(final byte[] fieldData, final byte serializationVersion) { if ((fieldData == null) || (fieldData.length == 0)) { return null; } if (serializationVersion < FieldUtils.SERIALIZATION_VERSION) { final IntBuffer buff = ByteBuffer.wrap(fieldData).asIntBuffer(); final int[] result = new int[buff.remaining()]; buff.get(result); return result; } else { return readField(fieldData); } } } private static class PrimitiveIntArrayWriter implements FieldWriter { @Override public byte[] writeField(final int[] fieldValue) { if (fieldValue == null) { return new byte[] {}; } int bytes = VarintUtils.unsignedIntByteLength(fieldValue.length); for (final int value : fieldValue) { bytes += VarintUtils.signedIntByteLength(value); } final ByteBuffer buf = ByteBuffer.allocate(bytes); VarintUtils.writeUnsignedInt(fieldValue.length, buf); for (final int value : fieldValue) { VarintUtils.writeSignedInt(value, buf); } return buf.array(); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/PrimitiveLongArraySerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field.base; import java.nio.ByteBuffer; import java.nio.LongBuffer; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldUtils; import org.locationtech.geowave.core.store.data.field.FieldWriter; public class PrimitiveLongArraySerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new PrimitiveLongArrayReader(); } @Override public FieldWriter getFieldWriter() { return new PrimitiveLongArrayWriter(); } private static class PrimitiveLongArrayReader implements FieldReader { @Override public long[] readField(final byte[] fieldData) { if ((fieldData == null) || (fieldData.length == 0)) { return null; } final ByteBuffer buff = ByteBuffer.wrap(fieldData); final int count = VarintUtils.readUnsignedInt(buff); ByteArrayUtils.verifyBufferSize(buff, count); final long[] result = new long[count]; for (int i = 0; i < count; i++) { result[i] = VarintUtils.readSignedLong(buff); } return result; } @Override public long[] readField(final byte[] fieldData, final byte serializationVersion) { if ((fieldData == null) || (fieldData.length == 0)) { return null; } if (serializationVersion < FieldUtils.SERIALIZATION_VERSION) { final LongBuffer buff = ByteBuffer.wrap(fieldData).asLongBuffer(); final long[] result = new long[buff.remaining()]; buff.get(result); return result; } else { return readField(fieldData); } } } private static class PrimitiveLongArrayWriter implements FieldWriter { public PrimitiveLongArrayWriter() { super(); } @Override public byte[] writeField(final long[] fieldValue) { if (fieldValue == null) { return new byte[] {}; } int bytes = VarintUtils.unsignedIntByteLength(fieldValue.length); for (final long value : fieldValue) { bytes += VarintUtils.signedLongByteLength(value); } final ByteBuffer buf = ByteBuffer.allocate(bytes); VarintUtils.writeUnsignedInt(fieldValue.length, buf); for (final long value : fieldValue) { VarintUtils.writeSignedLong(value, buf); } return buf.array(); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/PrimitiveShortArraySerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field.base; import java.nio.ByteBuffer; import java.nio.ShortBuffer; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldWriter; public class PrimitiveShortArraySerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new PrimitiveShortArrayReader(); } @Override public FieldWriter getFieldWriter() { return new PrimitiveShortArrayWriter(); } private static class PrimitiveShortArrayReader implements FieldReader { @Override public short[] readField(final byte[] fieldData) { if ((fieldData == null) || (fieldData.length < 2)) { return null; } final ShortBuffer buff = ByteBuffer.wrap(fieldData).asShortBuffer(); final short[] result = new short[buff.remaining()]; buff.get(result); return result; } } private static class PrimitiveShortArrayWriter implements FieldWriter { @Override public byte[] writeField(final short[] fieldValue) { if (fieldValue == null) { return new byte[] {}; } final ByteBuffer buf = ByteBuffer.allocate(2 * fieldValue.length); for (final short value : fieldValue) { buf.putShort(value); } return buf.array(); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/ShortArraySerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field.base; import org.locationtech.geowave.core.store.data.field.ArrayReader; import org.locationtech.geowave.core.store.data.field.ArrayWriter.FixedSizeObjectArrayWriter; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldWriter; import org.locationtech.geowave.core.store.data.field.base.ShortSerializationProvider.ShortReader; import org.locationtech.geowave.core.store.data.field.base.ShortSerializationProvider.ShortWriter; public class ShortArraySerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new ShortArrayReader(); } @Override public FieldWriter getFieldWriter() { return new ShortArrayWriter(); } private static class ShortArrayWriter extends FixedSizeObjectArrayWriter { public ShortArrayWriter() { super(new ShortWriter()); } } private static class ShortArrayReader extends ArrayReader { public ShortArrayReader() { super(new ShortReader()); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/ShortSerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field.base; import java.nio.ByteBuffer; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldWriter; public class ShortSerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new ShortReader(); } @Override public FieldWriter getFieldWriter() { return new ShortWriter(); } protected static class ShortReader implements FieldReader { @Override public Short readField(final byte[] fieldData) { if ((fieldData == null) || (fieldData.length < 2)) { return null; } return ByteBuffer.wrap(fieldData).getShort(); } } protected static class ShortWriter implements FieldWriter { @Override public byte[] writeField(final Short fieldValue) { if (fieldValue == null) { return new byte[] {}; } final ByteBuffer buf = ByteBuffer.allocate(2); buf.putShort(fieldValue); return buf.array(); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/StringArraySerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field.base; import org.locationtech.geowave.core.store.data.field.ArrayReader; import org.locationtech.geowave.core.store.data.field.ArrayWriter.VariableSizeObjectArrayWriter; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldWriter; import org.locationtech.geowave.core.store.data.field.base.StringSerializationProvider.StringReader; import org.locationtech.geowave.core.store.data.field.base.StringSerializationProvider.StringWriter; public class StringArraySerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new StringArrayReader(); } @Override public FieldWriter getFieldWriter() { return new StringArrayWriter(); } private static class StringArrayReader extends ArrayReader { public StringArrayReader() { super(new StringReader()); } } private static class StringArrayWriter extends VariableSizeObjectArrayWriter { public StringArrayWriter() { super(new StringWriter()); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/field/base/StringSerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field.base; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi; import org.locationtech.geowave.core.store.data.field.FieldWriter; public class StringSerializationProvider implements FieldSerializationProviderSpi { @Override public FieldReader getFieldReader() { return new StringReader(); } @Override public FieldWriter getFieldWriter() { return new StringWriter(); } protected static class StringReader implements FieldReader { @Override public String readField(final byte[] fieldData) { if (fieldData == null) { return null; } return StringUtils.stringFromBinary(fieldData); // for field serialization ensure UTF-8? // return new String( // fieldData, // StringUtils.UTF8_CHAR_SET); } } protected static class StringWriter implements FieldWriter { @Override public byte[] writeField(final String fieldValue) { if (fieldValue == null) { return new byte[] {}; } return StringUtils.stringToBinary(fieldValue); // for field serialization ensure UTF-8? // return fieldValue.getBytes(StringUtils.UTF8_CHAR_SET); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/visibility/FallbackVisibilityHandler.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.visibility; import java.util.List; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.VisibilityHandler; /** * An implementation of visibility handler that will go through each visibility handler in a * provided array until it reaches a visibility that is non null. */ public class FallbackVisibilityHandler implements VisibilityHandler { private VisibilityHandler[] handlers; public FallbackVisibilityHandler() {} public FallbackVisibilityHandler(final VisibilityHandler[] handlers) { this.handlers = handlers; } @Override public String getVisibility( final DataTypeAdapter adapter, final T rowValue, final String fieldName) { for (VisibilityHandler handler : handlers) { final String visibility = handler.getVisibility(adapter, rowValue, fieldName); if (visibility != null) { return visibility; } } return null; } @Override public byte[] toBinary() { return PersistenceUtils.toBinary(handlers); } @Override public void fromBinary(byte[] bytes) { final List handlersList = PersistenceUtils.fromBinaryAsList(bytes); this.handlers = handlersList.toArray(new VisibilityHandler[handlersList.size()]); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/visibility/FieldLevelVisibilityHandler.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.visibility; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.VisibilityHandler; /** * Determines the visibility of a field based on the value in another field in the entry. */ public class FieldLevelVisibilityHandler implements VisibilityHandler { private String visibilityAttribute; public FieldLevelVisibilityHandler() {} public FieldLevelVisibilityHandler(final String visibilityAttribute) { super(); this.visibilityAttribute = visibilityAttribute; } public String getVisibilityAttribute() { return visibilityAttribute; } /** * Determine the visibility of the given field based on the value of the visibility field. * * @param visibilityObject the value of the visibility field * @param fieldName the field to determine the visibility of * @return the visibility of the field */ protected String translateVisibility(final Object visibilityObject, final String fieldName) { if (visibilityObject == null) { return null; } return visibilityObject.toString(); } @Override public String getVisibility( final DataTypeAdapter adapter, final T entry, final String fieldName) { final Object visibilityAttributeValue = adapter.getFieldValue(entry, visibilityAttribute); return translateVisibility(visibilityAttributeValue, fieldName); } @Override public byte[] toBinary() { return StringUtils.stringToBinary(visibilityAttribute); } @Override public void fromBinary(final byte[] bytes) { visibilityAttribute = StringUtils.stringFromBinary(bytes); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/visibility/FieldMappedVisibilityHandler.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.visibility; import java.nio.ByteBuffer; import java.util.List; import java.util.Map; import java.util.Map.Entry; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.VisibilityHandler; import com.google.common.collect.Lists; import com.google.common.collect.Maps; /** * Determines the visibility of a field by looking up the field in a visibility map. */ public class FieldMappedVisibilityHandler implements VisibilityHandler { private Map fieldVisibilities; public FieldMappedVisibilityHandler() {} public FieldMappedVisibilityHandler(final Map fieldVisibilities) { this.fieldVisibilities = fieldVisibilities; } @Override public String getVisibility( final DataTypeAdapter adapter, final T rowValue, final String fieldName) { if (fieldVisibilities.containsKey(fieldName)) { return fieldVisibilities.get(fieldName); } return null; } @Override public byte[] toBinary() { int byteLength = VarintUtils.unsignedIntByteLength(fieldVisibilities.size()); final List byteList = Lists.newArrayListWithCapacity(fieldVisibilities.size() * 2); for (Entry entry : fieldVisibilities.entrySet()) { final byte[] keyBytes = StringUtils.stringToBinary(entry.getKey()); byteList.add(keyBytes); byteLength += VarintUtils.unsignedIntByteLength(keyBytes.length); byteLength += keyBytes.length; final byte[] valueBytes = StringUtils.stringToBinary(entry.getValue()); byteList.add(valueBytes); byteLength += VarintUtils.unsignedIntByteLength(valueBytes.length); byteLength += valueBytes.length; } final ByteBuffer buffer = ByteBuffer.allocate(byteLength); VarintUtils.writeUnsignedInt(fieldVisibilities.size(), buffer); for (final byte[] bytes : byteList) { VarintUtils.writeUnsignedInt(bytes.length, buffer); buffer.put(bytes); } return buffer.array(); } @Override public void fromBinary(byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); final int size = VarintUtils.readUnsignedInt(buffer); fieldVisibilities = Maps.newHashMapWithExpectedSize(size); for (int i = 0; i < size; i++) { final byte[] keyBytes = new byte[VarintUtils.readUnsignedInt(buffer)]; buffer.get(keyBytes); final String key = StringUtils.stringFromBinary(keyBytes); final byte[] valueBytes = new byte[VarintUtils.readUnsignedInt(buffer)]; buffer.get(valueBytes); final String value = StringUtils.stringFromBinary(valueBytes); fieldVisibilities.put(key, value); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/visibility/GlobalVisibilityHandler.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.visibility; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.VisibilityHandler; /** * Basic implementation of a visibility handler where the decision of visibility is not determined * on a per field or even per row basis, but rather a single visibility is globally assigned for * every field written. */ public class GlobalVisibilityHandler implements VisibilityHandler { private String globalVisibility; public GlobalVisibilityHandler() {} public GlobalVisibilityHandler(final String globalVisibility) { this.globalVisibility = globalVisibility; } @Override public String getVisibility( final DataTypeAdapter adapter, final T rowValue, final String fieldName) { return globalVisibility; } @Override public byte[] toBinary() { return StringUtils.stringToBinary(globalVisibility); } @Override public void fromBinary(byte[] bytes) { this.globalVisibility = StringUtils.stringFromBinary(bytes); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/visibility/JsonFieldLevelVisibilityHandler.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.visibility; import java.io.IOException; import java.util.Iterator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; /** * Determines the visibility of a field by looking it up in a JSON object that's parsed from a * specified visibility field. * *

Example: { "geometry" : "S", "eventName": "TS"} * *

Json attributes can also be regular expressions, matching more than one field name. * *

Example: { "geo.*" : "S", ".*" : "TS"}. * *

The order of the expression must be considered if one expression is more general than * another, as shown in the example. The expression ".*" matches all attributes. The more specific * expression "geo.*" must be ordered first. */ public class JsonFieldLevelVisibilityHandler extends FieldLevelVisibilityHandler { private static final Logger LOGGER = LoggerFactory.getLogger(JsonFieldLevelVisibilityHandler.class); private final ObjectMapper mapper = new ObjectMapper(); public JsonFieldLevelVisibilityHandler() {} public JsonFieldLevelVisibilityHandler(final String visibilityAttribute) { super(visibilityAttribute); } @Override public String translateVisibility(final Object visibilityObject, final String fieldName) { if (visibilityObject == null) { return null; } try { final JsonNode attributeMap = mapper.readTree(visibilityObject.toString()); final JsonNode field = attributeMap.get(fieldName); if ((field != null) && field.isValueNode()) { return field.textValue(); } final Iterator attNameIt = attributeMap.fieldNames(); while (attNameIt.hasNext()) { final String attName = attNameIt.next(); if (fieldName.matches(attName)) { final JsonNode attNode = attributeMap.get(attName); if (attNode == null) { LOGGER.error( "Cannot parse visibility expression, JsonNode for attribute " + attName + " was null"); return null; } return attNode.textValue(); } } } catch (IOException | NullPointerException e) { LOGGER.error("Cannot parse visibility expression " + visibilityObject.toString(), e); } return null; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/visibility/UnconstrainedVisibilityHandler.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.visibility; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.VisibilityHandler; /** * Basic implementation of a visibility handler to allow all access */ public class UnconstrainedVisibilityHandler implements VisibilityHandler { @Override public String getVisibility( final DataTypeAdapter adapter, final T rowValue, final String fieldName) { return ""; } @Override public byte[] toBinary() { return new byte[0]; } @Override public void fromBinary(byte[] bytes) {} } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/visibility/VisibilityComposer.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.visibility; import java.util.Set; import java.util.stream.Collectors; import com.google.common.collect.Sets; /** * Builds up a simplified visibility expression from multiple input visibilities. */ public class VisibilityComposer { // Hash set would be faster, but tree set makes deterministic visibility expressions private final Set visibilityTokens = Sets.newTreeSet(); /** * Constructs an empty visibility composer. */ public VisibilityComposer() {} /** * Constructs a visibility composer with all of the tokens of another visibility composer. * * @param other the starting composer */ public VisibilityComposer(final VisibilityComposer other) { visibilityTokens.addAll(other.visibilityTokens); } /** * Add the given visibility expression to the composer. If possible, the expression will be broken * down into minimal components. * * @param visibility the visibility expression to add */ public void addVisibility(final String visibility) { if (visibility == null) { return; } VisibilityExpression.addMinimalTokens(visibility, visibilityTokens); } /** * Compose the simplified visibility expression. * * @return the simplified visibility expression */ public String composeVisibility() { return visibilityTokens.stream().collect(Collectors.joining(VisibilityExpression.AND_TOKEN)); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/data/visibility/VisibilityExpression.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.visibility; import java.text.ParseException; import java.util.Set; import com.github.benmanes.caffeine.cache.CacheLoader; import com.github.benmanes.caffeine.cache.Caffeine; import com.github.benmanes.caffeine.cache.LoadingCache; public class VisibilityExpression { public static final String OR_TOKEN = "|"; public static final String AND_TOKEN = "&"; // Split before and after the delimiter character so that it gets // included in the token list private static final String SPLIT_DELIMITER = "((?<=%1$s)|(?=%1$s))"; private static final String TOKEN_SPLIT; static { final StringBuilder sb = new StringBuilder(); sb.append(String.format(SPLIT_DELIMITER, "\\(")).append("|"); sb.append(String.format(SPLIT_DELIMITER, "\\)")).append("|"); sb.append(String.format(SPLIT_DELIMITER, "\\" + AND_TOKEN)).append("|"); sb.append(String.format(SPLIT_DELIMITER, "\\" + OR_TOKEN)); TOKEN_SPLIT = sb.toString(); } private static LoadingCache expressionCache = Caffeine.newBuilder().maximumSize(50).build(new VisibilityCacheLoader()); private static class VisibilityCacheLoader implements CacheLoader { @Override public VisibilityNode load(final String key) throws Exception { final String[] tokens = key.split(TOKEN_SPLIT); if ((tokens.length == 0) || ((tokens.length == 1) && (tokens[0].length() == 0))) { return new NoAuthNode(); } return parseTokens(0, tokens.length - 1, tokens); } } private static VisibilityNode getCached(final String expression) { final String trimmed = expression.replaceAll("\\s+", ""); return expressionCache.get(trimmed); } public static boolean evaluate(final String expression, final Set auths) { if (expression.isEmpty()) { return true; } return getCached(expression).evaluate(auths); } public static void addMinimalTokens(final String expression, final Set tokens) { addMinimalTokens(getCached(expression), tokens); } private static void addMinimalTokens(final VisibilityNode parsed, final Set tokens) { if (parsed instanceof ValueNode) { tokens.add(((ValueNode) parsed).toString()); } else if (parsed instanceof AndNode) { addMinimalTokens(((AndNode) parsed).getLeft(), tokens); addMinimalTokens(((AndNode) parsed).getRight(), tokens); } else if (parsed instanceof OrNode) { tokens.add("(" + parsed.toString() + ")"); } } private static VisibilityNode parseTokens( final int startIndex, final int endIndex, final String[] tokens) throws ParseException { VisibilityNode left = null; String operator = null; for (int i = startIndex; i <= endIndex; i++) { VisibilityNode newNode = null; if (tokens[i].equals("(")) { final int matchingParen = findMatchingParen(i, tokens); if (matchingParen < 0) { throw new ParseException("Left parenthesis found with no matching right parenthesis.", i); } newNode = parseTokens(i + 1, matchingParen - 1, tokens); i = matchingParen; } else if (tokens[i].equals(")")) { throw new ParseException("Right parenthesis found with no matching left parenthesis.", i); } else if (AND_TOKEN.equals(tokens[i]) || OR_TOKEN.equals(tokens[i])) { if (left == null) { throw new ParseException("Operator found with no left operand.", i); } else if (operator != null) { throw new ParseException("Multiple sequential operators.", i); } else { operator = tokens[i]; } } else { newNode = new ValueNode(tokens[i]); } if (newNode != null) { if (left == null) { left = newNode; } else if (operator == null) { throw new ParseException("Multiple sequential operands with no operator.", i); } else if (operator.equals(AND_TOKEN)) { left = new AndNode(left, newNode); operator = null; } else { left = new OrNode(left, newNode); operator = null; } } } if (left == null) { return new NoAuthNode(); } else if (operator != null) { throw new ParseException("Operator found with no right operand.", endIndex); } return left; } private static int findMatchingParen(final int start, final String[] tokens) { int match = -1; int parenDepth = 1; for (int i = start + 1; i < tokens.length; i++) { if (tokens[i].equals(")")) { parenDepth--; if (parenDepth == 0) { match = i; break; } } else if (tokens[i].equals("(")) { parenDepth++; } } return match; } public abstract static class VisibilityNode { public abstract boolean evaluate(Set auths); } public abstract static class OperatorNode extends VisibilityNode { public abstract VisibilityNode getLeft(); public abstract VisibilityNode getRight(); @Override public String toString() { return getExpression(); } protected abstract String getOperator(); public String getExpression() { final StringBuilder sb = new StringBuilder(); return buildExpression(sb); } protected String buildExpression(final StringBuilder sb) { return buildExpression(sb, getOperator()); } protected String buildExpression(final StringBuilder sb, final String operator) { if (getLeft() instanceof OperatorNode) { sb.append("("); ((OperatorNode) getLeft()).buildExpression(sb); sb.append(")"); } else { sb.append(getLeft().toString()); } sb.append(operator); if (getRight() instanceof OperatorNode) { sb.append("("); ((OperatorNode) getRight()).buildExpression(sb); sb.append(")"); } else { sb.append(getRight().toString()); } return sb.toString(); } } public static class NoAuthNode extends VisibilityNode { @Override public boolean evaluate(final Set auths) { return true; } @Override public String toString() { return ""; } } public static class ValueNode extends VisibilityNode { private final String value; public ValueNode(final String value) { this.value = value; } @Override public boolean evaluate(final Set auths) { return auths.contains(value); } @Override public String toString() { return value; } } public static class AndNode extends OperatorNode { private final VisibilityNode left; private final VisibilityNode right; public AndNode(final VisibilityNode left, final VisibilityNode right) { this.left = left; this.right = right; } @Override public boolean evaluate(final Set auths) { return left.evaluate(auths) && right.evaluate(auths); } @Override public VisibilityNode getLeft() { return left; } @Override public VisibilityNode getRight() { return right; } @Override protected String getOperator() { return AND_TOKEN; } } public static class OrNode extends OperatorNode { private final VisibilityNode left; private final VisibilityNode right; public OrNode(final VisibilityNode left, final VisibilityNode right) { this.left = left; this.right = right; } @Override public boolean evaluate(final Set auths) { return left.evaluate(auths) || right.evaluate(auths); } @Override public VisibilityNode getLeft() { return left; } @Override public VisibilityNode getRight() { return right; } @Override protected String getOperator() { return OR_TOKEN; } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/dimension/AbstractNumericDimensionField.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.dimension; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.dimension.bin.BinRange; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; abstract public class AbstractNumericDimensionField implements NumericDimensionField { protected NumericDimensionDefinition baseDefinition; public AbstractNumericDimensionField() {} public AbstractNumericDimensionField(final NumericDimensionDefinition baseDefinition) { this.baseDefinition = baseDefinition; } protected void setBaseDefinition(final NumericDimensionDefinition baseDefinition) { this.baseDefinition = baseDefinition; } @Override public double getRange() { return baseDefinition.getRange(); } @Override public double normalize(final double value) { return baseDefinition.normalize(value); } @Override public double denormalize(final double value) { return baseDefinition.denormalize(value); } @Override public BinRange[] getNormalizedRanges(final NumericData range) { return baseDefinition.getNormalizedRanges(range); } @Override public NumericRange getDenormalizedRange(final BinRange range) { return baseDefinition.getDenormalizedRange(range); } @Override public int getFixedBinIdSize() { return baseDefinition.getFixedBinIdSize(); } @Override public NumericRange getBounds() { return baseDefinition.getBounds(); } @Override public NumericData getFullRange() { return baseDefinition.getFullRange(); } @Override public NumericDimensionDefinition getBaseDefinition() { return baseDefinition; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/dimension/BasicNumericDimensionField.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.dimension; import java.nio.ByteBuffer; import java.util.Set; import org.apache.commons.lang3.Range; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.IndexDimensionHint; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.dimension.BasicDimensionDefinition; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericValue; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldUtils; import org.locationtech.geowave.core.store.data.field.FieldWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.internal.Sets; public class BasicNumericDimensionField extends AbstractNumericDimensionField { private static final Logger LOGGER = LoggerFactory.getLogger(BasicNumericDimensionField.class); private String fieldName; private Class myClass; public BasicNumericDimensionField() { super(); } public BasicNumericDimensionField(final String fieldName, final Class myClass) { this(fieldName, myClass, null); } public BasicNumericDimensionField( final String fieldName, final Class myClass, final Range range) { super( range == null ? null : new BasicDimensionDefinition(range.getMinimum(), range.getMaximum())); this.fieldName = fieldName; this.myClass = myClass; } @Override public NumericData getNumericData(final T dataElement) { return new NumericValue(dataElement.doubleValue()); } @Override public String getFieldName() { return fieldName; } @Override public FieldWriter getWriter() { return FieldUtils.getDefaultWriterForClass(myClass); } @Override public FieldReader getReader() { return FieldUtils.getDefaultReaderForClass(myClass); } @Override public Class getFieldClass() { return myClass; } @Override public byte[] toBinary() { final byte[] bytes; if (baseDefinition != null) { bytes = baseDefinition.toBinary(); } else { bytes = new byte[0]; } final byte[] strBytes = StringUtils.stringToBinary(fieldName); final byte[] classBytes = StringUtils.stringToBinary(myClass.getName()); final ByteBuffer buf = ByteBuffer.allocate( bytes.length + VarintUtils.unsignedIntByteLength(strBytes.length) + strBytes.length + VarintUtils.unsignedIntByteLength(classBytes.length) + classBytes.length); VarintUtils.writeUnsignedInt(strBytes.length, buf); buf.put(strBytes); VarintUtils.writeUnsignedInt(classBytes.length, buf); buf.put(classBytes); buf.put(bytes); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int fieldNameLength = VarintUtils.readUnsignedInt(buf); final byte[] strBytes = ByteArrayUtils.safeRead(buf, fieldNameLength); fieldName = StringUtils.stringFromBinary(strBytes); final int classNameLength = VarintUtils.readUnsignedInt(buf); final byte[] classBytes = ByteArrayUtils.safeRead(buf, classNameLength); final String className = StringUtils.stringFromBinary(classBytes); try { myClass = (Class) Class.forName(className); } catch (final ClassNotFoundException e) { LOGGER.warn("Unable to read class", e); } final int restLength = bytes.length - VarintUtils.unsignedIntByteLength(fieldNameLength) - fieldNameLength - VarintUtils.unsignedIntByteLength(classNameLength) - classNameLength; if (restLength > 0) { final byte[] rest = ByteArrayUtils.safeRead(buf, restLength); baseDefinition = new BasicDimensionDefinition(); baseDefinition.fromBinary(rest); } else { baseDefinition = null; } } @Override public Set getDimensionHints() { return Sets.newHashSet(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/dimension/NumericDimensionField.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.dimension; import java.util.Set; import org.locationtech.geowave.core.index.IndexDimensionHint; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.store.api.IndexFieldMapper.IndexFieldOptions; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldWriter; /** * This interface provides in addition to the index dimension definition, a way to read and write a * field and get a field ID * * @param */ public interface NumericDimensionField extends NumericDimensionDefinition { /** * Decode a numeric value or range from the raw field value * * @param dataElement the raw field value * @return a numeric value or range */ NumericData getNumericData(T dataElement); /** * Returns an identifier that is unique for a given data type (field IDs should be distinct per * row) * * @return the field name */ String getFieldName(); default IndexFieldOptions getIndexFieldOptions() { return null; } Set getDimensionHints(); /** * Get a writer that can handle serializing values for this field * * @return the field writer for this field */ FieldWriter getWriter(); /** * Get a reader that can handle deserializing binary data into values for this field * * @return the field reader for this field */ FieldReader getReader(); /** * Get the basic index definition for this field * * @return the base index definition for this dimension */ NumericDimensionDefinition getBaseDefinition(); Class getFieldClass(); /** * Determines if the given field type is compatible with this field. * * @param clazz the field type to check * @return true if the given field type is assignable */ default boolean isCompatibleWith(final Class clazz) { return getFieldClass().isAssignableFrom(clazz); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/entities/GeoWaveKey.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.entities; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.VarintUtils; public interface GeoWaveKey { public byte[] getDataId(); public short getAdapterId(); public byte[] getSortKey(); public byte[] getPartitionKey(); public int getNumberOfDuplicates(); public static byte[] getCompositeId(final GeoWaveKey key) { if ((key.getSortKey() == null) && (key.getPartitionKey() == null)) { // this is a data ID key return key.getDataId(); } final ByteBuffer buffer = ByteBuffer.allocate( (key.getPartitionKey() == null ? 0 : key.getPartitionKey().length) + key.getSortKey().length + key.getDataId().length + VarintUtils.unsignedIntByteLength(key.getAdapterId() & 0xFFFF) + VarintUtils.unsignedIntByteLength(key.getDataId().length) + VarintUtils.unsignedIntByteLength(key.getNumberOfDuplicates())); if (key.getPartitionKey() != null) { buffer.put(key.getPartitionKey()); } buffer.put(key.getSortKey()); VarintUtils.writeUnsignedIntReversed(key.getAdapterId() & 0xFFFF, buffer); buffer.put(key.getDataId()); VarintUtils.writeUnsignedIntReversed(key.getDataId().length, buffer); VarintUtils.writeUnsignedIntReversed(key.getNumberOfDuplicates(), buffer); buffer.rewind(); return buffer.array(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/entities/GeoWaveKeyImpl.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.entities; import java.nio.ByteBuffer; import java.util.Collection; import java.util.Iterator; import java.util.List; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.SinglePartitionInsertionIds; import org.locationtech.geowave.core.index.VarintUtils; public class GeoWaveKeyImpl implements GeoWaveKey { protected byte[] dataId = null; protected short internalAdapterId = 0; protected byte[] partitionKey = null; protected byte[] sortKey = null; protected int numberOfDuplicates = 0; private byte[] compositeInsertionId = null; protected GeoWaveKeyImpl() {} public GeoWaveKeyImpl(final byte[] compositeInsertionId, final int partitionKeyLength) { this(compositeInsertionId, partitionKeyLength, compositeInsertionId.length); } public GeoWaveKeyImpl( final byte[] compositeInsertionId, final int partitionKeyLength, final int length) { this(compositeInsertionId, partitionKeyLength, 0, length); } public GeoWaveKeyImpl( final byte[] compositeInsertionId, final int partitionKeyLength, final int offset, final int length) { this.compositeInsertionId = compositeInsertionId; final ByteBuffer buf = ByteBuffer.wrap(compositeInsertionId, offset, length); buf.position(buf.limit() - 1); final int numberOfDuplicates = Math.min(VarintUtils.readUnsignedIntReversed(buf), buf.limit()); final int dataIdLength = Math.min(VarintUtils.readUnsignedIntReversed(buf), buf.limit()); final byte[] dataId = new byte[dataIdLength]; buf.position((buf.position() - dataIdLength) + 1); buf.get(dataId); buf.position(buf.position() - dataIdLength - 1); internalAdapterId = (short) VarintUtils.readUnsignedIntReversed(buf); final int readLength = buf.limit() - 1 - buf.position(); buf.position(offset); final byte[] sortKey = new byte[length - readLength - partitionKeyLength]; final byte[] partitionKey = new byte[partitionKeyLength]; buf.get(partitionKey); buf.get(sortKey); this.dataId = dataId; this.partitionKey = partitionKey; this.sortKey = sortKey; this.numberOfDuplicates = numberOfDuplicates; } public GeoWaveKeyImpl( final byte[] dataId, final short internalAdapterId, final byte[] partitionKey, final byte[] sortKey, final int numberOfDuplicates) { this.dataId = dataId; this.internalAdapterId = internalAdapterId; this.partitionKey = partitionKey; this.sortKey = sortKey; this.numberOfDuplicates = numberOfDuplicates; } @Override public byte[] getDataId() { return dataId; } @Override public short getAdapterId() { return internalAdapterId; } @Override public byte[] getPartitionKey() { return partitionKey; } @Override public byte[] getSortKey() { return sortKey; } public byte[] getCompositeInsertionId() { if (compositeInsertionId != null) { return compositeInsertionId; } compositeInsertionId = GeoWaveKey.getCompositeId(this); return compositeInsertionId; } @Override public int getNumberOfDuplicates() { return numberOfDuplicates; } public boolean isDeduplicationEnabled() { return numberOfDuplicates >= 0; } public static GeoWaveKey[] createKeys( final InsertionIds insertionIds, final byte[] dataId, final short internalAdapterId) { if (insertionIds == null) { return new GeoWaveKey[] {new GeoWaveKeyImpl(dataId, internalAdapterId, null, null, 0)}; } final GeoWaveKey[] keys = new GeoWaveKey[insertionIds.getSize()]; final Collection partitionKeys = insertionIds.getPartitionKeys(); final Iterator it = partitionKeys.iterator(); final int numDuplicates = keys.length - 1; int i = 0; while (it.hasNext()) { final SinglePartitionInsertionIds partitionKey = it.next(); if ((partitionKey.getSortKeys() == null) || partitionKey.getSortKeys().isEmpty()) { keys[i++] = new GeoWaveKeyImpl( dataId, internalAdapterId, partitionKey.getPartitionKey(), new byte[] {}, numDuplicates); } else { byte[] partitionKeyBytes; if (partitionKey.getPartitionKey() == null) { partitionKeyBytes = new byte[] {}; } else { partitionKeyBytes = partitionKey.getPartitionKey(); } final List sortKeys = partitionKey.getSortKeys(); for (final byte[] sortKey : sortKeys) { keys[i++] = new GeoWaveKeyImpl( dataId, internalAdapterId, partitionKeyBytes, sortKey, numDuplicates); } } } return keys; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/entities/GeoWaveMetadata.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.entities; import java.util.Arrays; import com.google.common.primitives.UnsignedBytes; public class GeoWaveMetadata implements Comparable { protected byte[] primaryId; protected byte[] secondaryId; protected byte[] visibility; protected byte[] value; public GeoWaveMetadata( final byte[] primaryId, final byte[] secondaryId, final byte[] visibility, final byte[] value) { this.primaryId = primaryId; this.secondaryId = secondaryId; this.visibility = visibility; this.value = value; } public byte[] getPrimaryId() { return primaryId; } public byte[] getSecondaryId() { return secondaryId; } public byte[] getVisibility() { return visibility; } public byte[] getValue() { return value; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + (((primaryId == null) || (primaryId.length == 0)) ? 0 : Arrays.hashCode(primaryId)); result = (prime * result) + (((secondaryId == null) || (secondaryId.length == 0)) ? 0 : Arrays.hashCode(secondaryId)); result = (prime * result) + (((value == null) || (value.length == 0)) ? 0 : Arrays.hashCode(value)); result = (prime * result) + (((visibility == null) || (visibility.length == 0)) ? 0 : Arrays.hashCode(visibility)); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final GeoWaveMetadata other = (GeoWaveMetadata) obj; byte[] otherComp = (other.primaryId != null) && (other.primaryId.length == 0) ? null : other.primaryId; byte[] thisComp = (primaryId != null) && (primaryId.length == 0) ? null : primaryId; if (!Arrays.equals(thisComp, otherComp)) { return false; } otherComp = (other.secondaryId != null) && (other.secondaryId.length == 0) ? null : other.secondaryId; thisComp = (secondaryId != null) && (secondaryId.length == 0) ? null : secondaryId; if (!Arrays.equals(otherComp, thisComp)) { return false; } otherComp = (other.value != null) && (other.value.length == 0) ? null : other.value; thisComp = (value != null) && (value.length == 0) ? null : value; if (!Arrays.equals(otherComp, thisComp)) { return false; } otherComp = (other.visibility != null) && (other.visibility.length == 0) ? null : other.visibility; thisComp = (visibility != null) && (visibility.length == 0) ? null : visibility; if (!Arrays.equals(otherComp, thisComp)) { return false; } return true; } @Override public int compareTo(final GeoWaveMetadata obj) { if (this == obj) { return 0; } if (obj == null) { return 1; } if (getClass() != obj.getClass()) { return 1; } final GeoWaveMetadata other = obj; byte[] otherComp = other.primaryId == null ? new byte[0] : other.primaryId; byte[] thisComp = primaryId == null ? new byte[0] : primaryId; if (UnsignedBytes.lexicographicalComparator().compare(thisComp, otherComp) != 0) { return UnsignedBytes.lexicographicalComparator().compare(thisComp, otherComp); } otherComp = other.secondaryId == null ? new byte[0] : other.secondaryId; thisComp = secondaryId == null ? new byte[0] : secondaryId; if (UnsignedBytes.lexicographicalComparator().compare(thisComp, otherComp) != 0) { return UnsignedBytes.lexicographicalComparator().compare(thisComp, otherComp); } otherComp = other.value == null ? new byte[0] : other.value; thisComp = value == null ? new byte[0] : value; if (UnsignedBytes.lexicographicalComparator().compare(thisComp, otherComp) != 0) { return UnsignedBytes.lexicographicalComparator().compare(thisComp, otherComp); } otherComp = other.visibility == null ? new byte[0] : other.visibility; thisComp = visibility == null ? new byte[0] : visibility; if (UnsignedBytes.lexicographicalComparator().compare(thisComp, otherComp) != 0) { return UnsignedBytes.lexicographicalComparator().compare(thisComp, otherComp); } return 0; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/entities/GeoWaveRow.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.entities; public interface GeoWaveRow extends GeoWaveKey { public GeoWaveValue[] getFieldValues(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/entities/GeoWaveRowImpl.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.entities; public class GeoWaveRowImpl implements GeoWaveRow { private final GeoWaveKey key; private final GeoWaveValue[] fieldValues; public GeoWaveRowImpl(final GeoWaveKey key, final GeoWaveValue[] fieldValues) { this.key = key; this.fieldValues = fieldValues; } @Override public byte[] getDataId() { return key.getDataId(); } @Override public short getAdapterId() { return key.getAdapterId(); } @Override public byte[] getSortKey() { return key.getSortKey(); } @Override public byte[] getPartitionKey() { return key.getPartitionKey(); } @Override public int getNumberOfDuplicates() { return key.getNumberOfDuplicates(); } public GeoWaveKey getKey() { return key; } @Override public GeoWaveValue[] getFieldValues() { return fieldValues; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/entities/GeoWaveRowIteratorTransformer.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.entities; import java.util.Iterator; import com.google.common.base.Function; /** * Interface for a function that transforms an iterator of {@link GeoWaveRow}s to another type. The * interface transforms an iterator rather than an individual row to allow iterators to merge rows * before transforming them if needed. * * @param the type to transform each {@link GeoWaveRow} into */ public interface GeoWaveRowIteratorTransformer extends Function, Iterator> { public static GeoWaveRowIteratorTransformer NO_OP_TRANSFORMER = new GeoWaveRowIteratorTransformer() { @Override public Iterator apply(final Iterator input) { return input; } }; } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/entities/GeoWaveRowMergingIterator.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.entities; import java.util.Iterator; import com.google.common.collect.Iterators; import com.google.common.collect.PeekingIterator; public class GeoWaveRowMergingIterator implements Iterator { final Iterator source; final PeekingIterator peekingIterator; public GeoWaveRowMergingIterator(final Iterator source) { this.source = source; this.peekingIterator = Iterators.peekingIterator(source); } @Override public boolean hasNext() { return peekingIterator.hasNext(); } @Override public T next() { final T nextValue = peekingIterator.next(); while (peekingIterator.hasNext() && nextValue.shouldMerge(peekingIterator.peek())) { nextValue.mergeRow(peekingIterator.next()); } return nextValue; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/entities/GeoWaveRowMergingTransform.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.entities; import java.io.IOException; import java.util.Iterator; import org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter; import org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter.RowTransform; import org.locationtech.geowave.core.store.util.DataStoreUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Iterators; public class GeoWaveRowMergingTransform implements GeoWaveRowIteratorTransformer { private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveRowMergingTransform.class); private final RowTransform rowTransform; public GeoWaveRowMergingTransform( final RowMergingDataAdapter adapter, final short internalAdapterId) { super(); rowTransform = adapter.getTransform(); try { rowTransform.initOptions(adapter.getOptions(internalAdapterId, null)); } catch (final IOException e) { LOGGER.warn("Unable to initialize row merging adapter for type: " + adapter.getTypeName(), e); } } @Override public Iterator apply(final Iterator input) { if (input != null) { return Iterators.transform(input, row -> { return DataStoreUtils.mergeSingleRowValues(row, rowTransform); }); } return null; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/entities/GeoWaveValue.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.entities; public interface GeoWaveValue { public byte[] getFieldMask(); public byte[] getVisibility(); public byte[] getValue(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/entities/GeoWaveValueImpl.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.entities; import java.util.Arrays; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.store.flatten.BitmaskUtils; import org.locationtech.geowave.core.store.util.DataStoreUtils; public class GeoWaveValueImpl implements GeoWaveValue { private byte[] fieldMask; private byte[] visibility; private byte[] value; public GeoWaveValueImpl() {} public GeoWaveValueImpl(final GeoWaveValue[] values) { if ((values == null) || (values.length == 0)) { fieldMask = new byte[] {}; visibility = new byte[] {}; value = new byte[] {}; } else if (values.length == 1) { fieldMask = values[0].getFieldMask(); visibility = values[0].getVisibility(); value = values[0].getValue(); } else { byte[] intermediateFieldMask = values[0].getFieldMask(); byte[] intermediateVisibility = values[0].getVisibility(); byte[] intermediateValue = values[0].getValue(); for (int i = 1; i < values.length; i++) { intermediateFieldMask = BitmaskUtils.generateANDBitmask(intermediateFieldMask, values[i].getFieldMask()); intermediateVisibility = DataStoreUtils.mergeVisibilities(intermediateVisibility, values[i].getVisibility()); intermediateValue = ByteArrayUtils.combineArrays(intermediateValue, values[i].getValue()); } fieldMask = intermediateFieldMask; visibility = intermediateVisibility; value = intermediateValue; } } public GeoWaveValueImpl(final byte[] fieldMask, final byte[] visibility, final byte[] value) { this.fieldMask = fieldMask; this.visibility = visibility; this.value = value; } @Override public byte[] getFieldMask() { return fieldMask; } @Override public byte[] getVisibility() { return visibility; } @Override public byte[] getValue() { return value; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + Arrays.hashCode(fieldMask); result = (prime * result) + Arrays.hashCode(value); result = (prime * result) + Arrays.hashCode(visibility); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final GeoWaveValueImpl other = (GeoWaveValueImpl) obj; if (!Arrays.equals(fieldMask, other.fieldMask)) { return false; } if (!Arrays.equals(value, other.value)) { return false; } if (!Arrays.equals(visibility, other.visibility)) { return false; } return true; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/entities/MergeableGeoWaveRow.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.entities; import java.util.Arrays; public abstract class MergeableGeoWaveRow implements GeoWaveRow { protected GeoWaveValue[] attributeValues; public MergeableGeoWaveRow() {} public MergeableGeoWaveRow(final GeoWaveValue[] attributeValues) { this.attributeValues = attributeValues; } @Override public final GeoWaveValue[] getFieldValues() { return attributeValues; } public void mergeRow(final MergeableGeoWaveRow row) { final GeoWaveValue[] rowFieldValues = row.getFieldValues(); final GeoWaveValue[] newValues = Arrays.copyOf(attributeValues, attributeValues.length + rowFieldValues.length); System.arraycopy(rowFieldValues, 0, newValues, attributeValues.length, rowFieldValues.length); this.attributeValues = newValues; mergeRowInternal(row); } // In case any extending classes want to do something when rows are merged protected void mergeRowInternal(final MergeableGeoWaveRow row) {}; public boolean shouldMerge(final GeoWaveRow row) { return (getAdapterId() == row.getAdapterId()) && Arrays.equals(getDataId(), row.getDataId()) && Arrays.equals(getPartitionKey(), row.getPartitionKey()) && Arrays.equals(getSortKey(), row.getSortKey()) && (getNumberOfDuplicates() == row.getNumberOfDuplicates()); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/flatten/BitmaskUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.flatten; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.dimension.NumericDimensionField; import org.locationtech.geowave.core.store.index.CommonIndexModel; import com.github.benmanes.caffeine.cache.CacheLoader; import com.github.benmanes.caffeine.cache.Caffeine; import com.github.benmanes.caffeine.cache.LoadingCache; /** * Utility methods when dealing with bitmasks in Accumulo * * @since 0.9.1 */ public class BitmaskUtils { public static byte[] generateANDBitmask(final byte[] bitmask1, final byte[] bitmask2) { final byte[] result = new byte[Math.min(bitmask1.length, bitmask2.length)]; for (int i = 0; i < result.length; i++) { result[i] = bitmask1[i]; result[i] &= bitmask2[i]; } return result; } public static boolean isAnyBitSet(final byte[] array) { for (final byte b : array) { if (b != 0) { return true; } } return false; } public static boolean bitmaskOverlaps(final byte[] bitmask1, final byte[] bitmask2) { final int length = Math.min(bitmask1.length, bitmask2.length); for (int i = 0; i < length; i++) { if ((bitmask1[i] & bitmask2[i]) != 0) { return true; } } return false; } /** * Generates a composite bitmask given a list of field positions. The composite bitmask represents * a true bit for every positive field position * *

For example, given field 0, field 1, and field 2 this method will return 00000111 * * @param fieldPositions a list of field positions * @return a composite bitmask */ public static byte[] generateCompositeBitmask(final SortedSet fieldPositions) { final byte[] retVal = new byte[(fieldPositions.last() / 8) + 1]; for (final Integer fieldPosition : fieldPositions) { final int bytePosition = fieldPosition / 8; final int bitPosition = fieldPosition % 8; retVal[bytePosition] |= (1 << bitPosition); } return retVal; } /** * Generates a composite bitmask given a single field position. The composite bitmask represents a * true bit for this field position * *

For example, given field 2 this method will return 00000100 * * @param fieldPosition a field position * @return a composite bitmask */ public static byte[] generateCompositeBitmask(final Integer fieldPosition) { return generateCompositeBitmask(new TreeSet<>(Collections.singleton(fieldPosition))); } private static LoadingCache> fieldPositionCache = Caffeine.newBuilder().maximumSize(100).build(new CacheLoader>() { @Override public List load(final ByteArray key) throws Exception { final List fieldPositions = new ArrayList<>(); int currentByte = 0; for (final byte singleByteBitMask : key.getBytes()) { for (int bit = 0; bit < 8; ++bit) { if (((singleByteBitMask >>> bit) & 0x1) == 1) { fieldPositions.add((currentByte * 8) + bit); } } currentByte++; } return fieldPositions; } }); /** * Iterates the set (true) bits within the given composite bitmask and generates a list of field * positions. * * @param bitmask the composite bitmask * @return a list of field positions */ public static List getFieldPositions(final byte[] bitmask) { return fieldPositionCache.get(new ByteArray(bitmask)); } /** * Iterates the set (true) bits within the given composite bitmask and generates a list of field * positions. * * @param bitmask the composite bitmask * @return a list of field positions */ public static int getLowestFieldPosition(final byte[] bitmask) { int currentByte = 0; for (final byte singleByteBitMask : bitmask) { for (int bit = 0; bit < 8; ++bit) { if (((singleByteBitMask >>> bit) & 0x1) == 1) { return (currentByte * 8) + bit; } } currentByte++; } return Integer.MAX_VALUE; } /** * Generates a field subset bitmask for the given index, adapter, and fields * * @param indexModel the index's CommonIndexModel * @param fieldNames the fields to include in the subset, as Strings * @param adapterAssociatedWithFieldIds the adapter for the type whose fields are being subsetted * @return the field subset bitmask */ public static byte[] generateFieldSubsetBitmask( final CommonIndexModel indexModel, final String[] fieldNames, final InternalDataAdapter adapterAssociatedWithFieldIds) { final SortedSet fieldPositions = new TreeSet<>(); // dimension fields must also be included for (final NumericDimensionField dimension : indexModel.getDimensions()) { fieldPositions.add( adapterAssociatedWithFieldIds.getPositionOfOrderedField( indexModel, dimension.getFieldName())); } for (final String fieldName : fieldNames) { fieldPositions.add( adapterAssociatedWithFieldIds.getPositionOfOrderedField(indexModel, fieldName)); } return generateCompositeBitmask(fieldPositions); } /** * Generates a new value byte array representing a subset of fields of the given value * * @param value the original column value * @param originalBitmask the bitmask from the column qualifier * @param newBitmask the field subset bitmask * @return the subsetted value as a byte[] */ public static byte[] constructNewValue( final byte[] value, final byte[] originalBitmask, final byte[] newBitmask) { final ByteBuffer originalBytes = ByteBuffer.wrap(value); final List valsToKeep = new ArrayList<>(); int totalSize = 0; final List originalPositions = getFieldPositions(originalBitmask); // convert list to set for quick contains() final Set newPositions = new HashSet<>(getFieldPositions(newBitmask)); if (originalPositions.size() > 1) { for (final Integer originalPosition : originalPositions) { final int startPosition = originalBytes.position(); final int len = VarintUtils.readUnsignedInt(originalBytes); final byte[] val = new byte[len]; originalBytes.get(val); if (newPositions.contains(originalPosition)) { valsToKeep.add(val); totalSize += (originalBytes.position() - startPosition); } } } else if (!newPositions.isEmpty()) { // this shouldn't happen because we should already catch the case // where the bitmask is unchanged return value; } else { // and this shouldn't happen because we should already catch the // case where the resultant bitmask is empty return null; } if (valsToKeep.size() == 1) { final ByteBuffer retVal = ByteBuffer.allocate(valsToKeep.get(0).length); retVal.put(valsToKeep.get(0)); return retVal.array(); } final ByteBuffer retVal = ByteBuffer.allocate(totalSize); for (final byte[] val : valsToKeep) { VarintUtils.writeUnsignedInt(val.length, retVal); retVal.put(val); } return retVal.array(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/flatten/BitmaskedPairComparator.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.flatten; import java.util.Comparator; import org.apache.commons.lang3.tuple.Pair; /** * Comparator to sort FieldInfo's accordingly. Assumes FieldInfo.getDataValue().getId().getBytes() * returns the bitmasked representation of a fieldId * * @see BitmaskUtils * @since 0.9.1 */ public class BitmaskedPairComparator implements Comparator>, java.io.Serializable { private static final long serialVersionUID = 1L; @Override public int compare(final Pair o1, final Pair o2) { return o1.getLeft().compareTo(o2.getLeft()); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/flatten/FlattenedDataSet.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.flatten; import java.util.List; public class FlattenedDataSet { private final List fieldsRead; private final FlattenedUnreadData fieldsDeferred; public FlattenedDataSet( final List fieldsRead, final FlattenedUnreadData fieldsDeferred) { this.fieldsRead = fieldsRead; this.fieldsDeferred = fieldsDeferred; } public List getFieldsRead() { return fieldsRead; } public FlattenedUnreadData getFieldsDeferred() { return fieldsDeferred; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/flatten/FlattenedFieldInfo.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.flatten; public class FlattenedFieldInfo { private final int fieldPosition; private final byte[] value; public FlattenedFieldInfo(final int fieldPosition, final byte[] value) { this.fieldPosition = fieldPosition; this.value = value; } public int getFieldPosition() { return fieldPosition; } public byte[] getValue() { return value; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/flatten/FlattenedUnreadData.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.flatten; import java.util.List; public interface FlattenedUnreadData { public List finishRead(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/flatten/FlattenedUnreadDataSingleRow.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.flatten; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.VarintUtils; public class FlattenedUnreadDataSingleRow implements FlattenedUnreadData { private final ByteBuffer partiallyConsumedBuffer; private final int currentIndexInFieldPositions; private final List fieldPositions; private List cachedRead = null; public FlattenedUnreadDataSingleRow( final ByteBuffer partiallyConsumedBuffer, final int currentIndexInFieldPositions, final List fieldPositions) { this.partiallyConsumedBuffer = partiallyConsumedBuffer; this.currentIndexInFieldPositions = currentIndexInFieldPositions; this.fieldPositions = fieldPositions; } @Override public List finishRead() { if (cachedRead == null) { cachedRead = new ArrayList<>(); for (int i = currentIndexInFieldPositions; i < fieldPositions.size(); i++) { final int fieldLength = VarintUtils.readUnsignedInt(partiallyConsumedBuffer); final byte[] fieldValueBytes = ByteArrayUtils.safeRead(partiallyConsumedBuffer, fieldLength); final Integer fieldPosition = fieldPositions.get(i); cachedRead.add(new FlattenedFieldInfo(fieldPosition, fieldValueBytes)); } } return cachedRead; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/index/AttributeDimensionalityTypeProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.index; import java.util.ServiceLoader; import javax.annotation.Nullable; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.api.AttributeIndex; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.spi.DimensionalityTypeProviderSpi; import com.beust.jcommander.ParameterException; /** * Provides an attribute index for any field that supports them. */ public class AttributeDimensionalityTypeProvider implements DimensionalityTypeProviderSpi { private static ServiceLoader serviceLoader = null; public AttributeDimensionalityTypeProvider() {} @Override public String getDimensionalityTypeName() { return "attribute"; } @Override public String getDimensionalityTypeDescription() { return "This index type can be used to index any attribute of a type that supports indexing."; } @Override public AttributeIndexOptions createOptions() { return new AttributeIndexOptions(); } @Override public Index createIndex(final DataStore dataStore, final AttributeIndexOptions options) { return createIndexFromOptions(dataStore, options); } public static Index createIndexFromOptions( final DataStore dataStore, final AttributeIndexOptions options) { if ((options.getTypeName() == null) || (options.getTypeName().length() == 0)) { throw new ParameterException( "A type name must be specified when creating an attribute index."); } if ((options.getAttributeName() == null) || (options.getAttributeName().length() == 0)) { throw new ParameterException( "An attribute name must be specified when creating an attribute index."); } final DataTypeAdapter adapter = dataStore.getType(options.getTypeName()); if (adapter == null) { throw new ParameterException( "A type with name '" + options.getTypeName() + "' could not be found in the data store."); } final FieldDescriptor descriptor = adapter.getFieldDescriptor(options.getAttributeName()); if (descriptor == null) { throw new ParameterException( "An attribute with name '" + options.getAttributeName() + "' could not be found in the type."); } return createIndexForDescriptor(adapter, descriptor, options.getIndexName()); } public static Index createIndexForDescriptor( final DataTypeAdapter adapter, final FieldDescriptor descriptor, final @Nullable String indexName) { if (serviceLoader == null) { serviceLoader = ServiceLoader.load(AttributeIndexProviderSpi.class); } for (final AttributeIndexProviderSpi indexProvider : serviceLoader) { if (indexProvider.supportsDescriptor(descriptor)) { return indexProvider.buildIndex( indexName == null ? AttributeIndex.defaultAttributeIndexName( adapter.getTypeName(), descriptor.fieldName()) : indexName, adapter, descriptor); } } throw new ParameterException( "No attribute index implementations were found for the field type: " + descriptor.bindingClass().getName()); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/index/AttributeIndexImpl.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.index; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.api.AttributeIndex; /** * Basic implementation of an attribute index. */ public class AttributeIndexImpl extends CustomNameIndex implements AttributeIndex { private String attributeName; public AttributeIndexImpl() {} public AttributeIndexImpl( final NumericIndexStrategy indexStrategy, final CommonIndexModel indexModel, final String indexName, final String attributeName) { super(indexStrategy, indexModel, indexName); this.attributeName = attributeName; } @Override public NumericIndexStrategy getIndexStrategy() { return indexStrategy; } @Override public CommonIndexModel getIndexModel() { return indexModel; } @Override public String getAttributeName() { return attributeName; } @Override public boolean equals(final Object obj) { if (!(obj instanceof AttributeIndexImpl)) { return false; } return super.equals(obj) && attributeName.equals(((AttributeIndexImpl) obj).attributeName); } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + super.hashCode(); result = (prime * result) + attributeName.hashCode(); return result; } @Override public byte[] toBinary() { final byte[] superBinary = super.toBinary(); final byte[] attributeNameBytes = StringUtils.stringToBinary(attributeName); final ByteBuffer buffer = ByteBuffer.allocate( VarintUtils.unsignedIntByteLength(superBinary.length) + VarintUtils.unsignedIntByteLength(attributeNameBytes.length) + superBinary.length + attributeNameBytes.length); VarintUtils.writeUnsignedInt(superBinary.length, buffer); buffer.put(superBinary); VarintUtils.writeUnsignedInt(attributeNameBytes.length, buffer); buffer.put(attributeNameBytes); return buffer.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); final byte[] superBinary = ByteArrayUtils.safeRead(buffer, VarintUtils.readUnsignedInt(buffer)); final byte[] attributeNameBytes = ByteArrayUtils.safeRead(buffer, VarintUtils.readUnsignedInt(buffer)); super.fromBinary(superBinary); attributeName = StringUtils.stringFromBinary(attributeNameBytes); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/index/AttributeIndexOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.index; import org.locationtech.geowave.core.store.spi.DimensionalityTypeOptions; import com.beust.jcommander.Parameter; /** * Provides options for the creation of attribute indices. */ public class AttributeIndexOptions implements DimensionalityTypeOptions { @Parameter( names = {"--typeName"}, required = true, description = "The name of the type with the attribute to index.") protected String typeName; @Parameter( names = {"--attributeName"}, required = true, description = "The name of the attribute to index.") protected String attributeName; @Parameter(names = {"--indexName"}, required = false, description = "The name of the index.") protected String indexName; public AttributeIndexOptions() {} public AttributeIndexOptions(final String typeName, final String attributeName) { this(typeName, attributeName, null); } public AttributeIndexOptions( final String typeName, final String attributeName, final String indexName) { this.typeName = typeName; this.attributeName = attributeName; this.indexName = indexName; } public void setTypeName(final String typeName) { this.typeName = typeName; } public String getTypeName() { return typeName; } public void setAttributeName(final String attributeName) { this.attributeName = attributeName; } public String getAttributeName() { return attributeName; } public void setIndexName(final String indexName) { this.indexName = indexName; } public String getIndexName() { return indexName; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/index/AttributeIndexProviderSpi.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.index; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.api.AttributeIndex; import org.locationtech.geowave.core.store.api.DataTypeAdapter; /** * SPI interface for supporting new attribute indices. Implementing this interface can allow the * creation of attribute indices for field types that are not supported by core GeoWave. */ public interface AttributeIndexProviderSpi { /** * Determines if the supplied field descriptor is supported by this attribute index provider. * * @param fieldDescriptor the descriptor to check * @return {@code true} if this provider can create an attribute index for the descriptor */ boolean supportsDescriptor(FieldDescriptor fieldDescriptor); /** * Creates an attribute index for the given descriptor. * * @param indexName the name of the attribute index * @param adapter the adapter that the field descriptor belongs to * @param fieldDescriptor the field descriptor to create an index for * @return the attribute index */ AttributeIndex buildIndex( String indexName, DataTypeAdapter adapter, FieldDescriptor fieldDescriptor); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/index/BaseIndexBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.index; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.index.IndexPluginOptions.PartitionStrategy; public abstract class BaseIndexBuilder implements IndexBuilder { private final IndexPluginOptions options; public BaseIndexBuilder() { this(new IndexPluginOptions()); } private BaseIndexBuilder(final IndexPluginOptions options) { this.options = options; } public T setNumPartitions(final int numPartitions) { options.getBasicIndexOptions().setNumPartitions(numPartitions); return (T) this; } public T setPartitionStrategy(final PartitionStrategy partitionStrategy) { options.getBasicIndexOptions().setPartitionStrategy(partitionStrategy); return (T) this; } public T setName(final String indexName) { options.setName(indexName); return (T) this; } public Index createIndex(final Index dimensionalityIndex) { return IndexPluginOptions.wrapIndexWithOptions(dimensionalityIndex, options); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/index/BasicIndexModel.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.index; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldWriter; import org.locationtech.geowave.core.store.dimension.NumericDimensionField; /** * This class is a concrete implementation of a common index model. Data adapters will map their * adapter specific fields to these fields that are common for a given index. This way distributable * filters will not need to handle any adapter-specific transformation, but can use the common index * fields. */ public class BasicIndexModel implements CommonIndexModel { protected NumericDimensionField[] dimensions; // the first dimension of a particular field ID will be the persistence // model used private Map> fieldIdToPeristenceMap; private transient String id; public BasicIndexModel() {} public BasicIndexModel(final NumericDimensionField[] dimensions) { init(dimensions); } public void init(final NumericDimensionField[] dimensions) { this.dimensions = dimensions; fieldIdToPeristenceMap = new HashMap<>(); for (final NumericDimensionField d : dimensions) { if (!fieldIdToPeristenceMap.containsKey(d.getFieldName())) { fieldIdToPeristenceMap.put(d.getFieldName(), d); } } } @SuppressWarnings("unchecked") @Override public FieldWriter getWriter(final String fieldName) { final NumericDimensionField dimension = fieldIdToPeristenceMap.get(fieldName); if (dimension != null) { return (FieldWriter) dimension.getWriter(); } return null; } @SuppressWarnings("unchecked") @Override public FieldReader getReader(final String fieldName) { final NumericDimensionField dimension = fieldIdToPeristenceMap.get(fieldName); if (dimension != null) { return (FieldReader) dimension.getReader(); } return null; } @Override public NumericDimensionField[] getDimensions() { return dimensions; } @Override public int hashCode() { final int prime = 31; int result = 1; final String className = getClass().getName(); result = (prime * result) + ((className == null) ? 0 : className.hashCode()); result = (prime * result) + Arrays.hashCode(dimensions); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final BasicIndexModel other = (BasicIndexModel) obj; return Arrays.equals(dimensions, other.dimensions); } @Override public byte[] toBinary() { int byteBufferLength = VarintUtils.unsignedIntByteLength(dimensions.length); final List dimensionBinaries = new ArrayList<>(dimensions.length); for (final NumericDimensionField dimension : dimensions) { final byte[] dimensionBinary = PersistenceUtils.toBinary(dimension); byteBufferLength += (VarintUtils.unsignedIntByteLength(dimensionBinary.length) + dimensionBinary.length); dimensionBinaries.add(dimensionBinary); } final ByteBuffer buf = ByteBuffer.allocate(byteBufferLength); VarintUtils.writeUnsignedInt(dimensions.length, buf); for (final byte[] dimensionBinary : dimensionBinaries) { VarintUtils.writeUnsignedInt(dimensionBinary.length, buf); buf.put(dimensionBinary); } return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int numDimensions = VarintUtils.readUnsignedInt(buf); ByteArrayUtils.verifyBufferSize(buf, numDimensions); dimensions = new NumericDimensionField[numDimensions]; for (int i = 0; i < numDimensions; i++) { final byte[] dim = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); dimensions[i] = (NumericDimensionField) PersistenceUtils.fromBinary(dim); } init(dimensions); } @Override public String getId() { if (id == null) { id = StringUtils.intToString(hashCode()); } return id; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/index/CommonIndexModel.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.index; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.data.DataReader; import org.locationtech.geowave.core.store.data.DataWriter; import org.locationtech.geowave.core.store.dimension.NumericDimensionField; /** * This interface describes the common fields for all of the data within the index. It is up to data * adapters to map (encode) the native fields to these common fields for persistence. */ public interface CommonIndexModel extends DataReader, DataWriter, Persistable { NumericDimensionField[] getDimensions(); String getId(); default boolean useInSecondaryIndex() { return false; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/index/CompositeConstraints.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.index; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import org.locationtech.geowave.core.index.IndexConstraints; import org.locationtech.geowave.core.store.query.filter.FilterList; import org.locationtech.geowave.core.store.query.filter.QueryFilter; public class CompositeConstraints implements FilterableConstraints { private final List constraints = new LinkedList<>(); private boolean intersect = false; public CompositeConstraints() {} public CompositeConstraints(final List constraints) { super(); this.constraints.addAll(constraints); } public CompositeConstraints( final List constraints, final boolean intersect) { super(); this.constraints.addAll(constraints); this.intersect = intersect; } public List getConstraints() { return constraints; } @Override public int getDimensionCount() { return constraints == null ? 0 : constraints.size(); } @Override public boolean isEmpty() { return (constraints == null) || constraints.isEmpty(); } @Override public QueryFilter getFilter() { final List filters = new ArrayList<>(); for (final IndexConstraints constraint : constraints) { if (constraint instanceof FilterableConstraints) { final QueryFilter filter = ((FilterableConstraints) constraint).getFilter(); if (filter != null) { filters.add(filter); } } } if (filters.isEmpty()) { return null; } if (filters.size() == 1) { return filters.get(0); } return new FilterList(intersect, filters); } @Override public String getFieldName() { return constraints.get(0).getFieldName(); } @Override public FilterableConstraints intersect(final FilterableConstraints constraints) { final CompositeConstraints cc = new CompositeConstraints(this.constraints, true); cc.constraints.add(constraints); return cc; } @Override public FilterableConstraints union(final FilterableConstraints constraints) { final CompositeConstraints cc = new CompositeConstraints(this.constraints); cc.constraints.add(constraints); return cc; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/index/CoreRegisteredIndexFieldMappers.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.index; /** * Registered core adapter to index field mappers. */ public class CoreRegisteredIndexFieldMappers implements IndexFieldMapperRegistrySPI { @Override public RegisteredFieldMapper[] getRegisteredFieldMappers() { return new RegisteredFieldMapper[] { new RegisteredFieldMapper(NoOpIndexFieldMapper::new, (short) 202)}; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/index/CustomAttributeIndex.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.index; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.CustomIndexStrategy; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.api.AttributeIndex; /** * An implementation of {@link CustomIndex} that supports attribute indices. This can be used to * create attribute indices on non-numeric fields. * * @param The entry type (such as SimpleFeature, GridCoverage, or whatever type the adapter * uses) * @param The custom constraints type can be any arbitrary type, although should be persistable * so that it can work outside of just client code (such as server-side filtering, * map-reduce, or spark) */ public class CustomAttributeIndex extends CustomIndex implements AttributeIndex { private String attributeName; public CustomAttributeIndex() { super(); } public CustomAttributeIndex( final CustomIndexStrategy indexStrategy, final String id, final String attributeName) { super(indexStrategy, id); this.attributeName = attributeName; } @Override public String getAttributeName() { return attributeName; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + super.hashCode(); result = (prime * result) + attributeName.hashCode(); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final CustomAttributeIndex other = (CustomAttributeIndex) obj; return super.equals(obj) && attributeName.equals(other.attributeName); } @Override public byte[] toBinary() { final byte[] baseBinary = super.toBinary(); final byte[] attributeNameBytes = StringUtils.stringToBinary(attributeName); final ByteBuffer buf = ByteBuffer.allocate( VarintUtils.unsignedIntByteLength(baseBinary.length) + VarintUtils.unsignedIntByteLength(attributeNameBytes.length) + baseBinary.length + attributeNameBytes.length); VarintUtils.writeUnsignedInt(attributeNameBytes.length, buf); buf.put(attributeNameBytes); VarintUtils.writeUnsignedInt(baseBinary.length, buf); buf.put(baseBinary); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final byte[] attributeNameBytes = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); attributeName = StringUtils.stringFromBinary(attributeNameBytes); final byte[] baseBinary = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); super.fromBinary(baseBinary); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/index/CustomIndex.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.index; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.CustomIndexStrategy; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.QueryRanges; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; /** * * This is a basic wrapper around a custom index strategy * * @param The entry type (such as SimpleFeature, GridCoverage, or whatever type the adapter * uses) * @param The custom constraints type can be any arbitrary type, although should be persistable * so that it can work outside of just client code (such as server-side filtering, * map-reduce, or spark) */ public class CustomIndex extends NullIndex implements CustomIndexStrategy { private CustomIndexStrategy indexStrategy; public CustomIndex() { super(); } public CustomIndex(final CustomIndexStrategy indexStrategy, final String id) { super(id); this.indexStrategy = indexStrategy; } public CustomIndexStrategy getCustomIndexStrategy() { return indexStrategy; } @Override public InsertionIds getInsertionIds(final E entry) { return indexStrategy.getInsertionIds(entry); } @Override public QueryRanges getQueryRanges(final C constraints) { return indexStrategy.getQueryRanges(constraints); } @Override public PersistableBiPredicate getFilter(final C constraints) { return indexStrategy.getFilter(constraints); } @Override public int hashCode() { return getName().hashCode(); } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final IndexImpl other = (IndexImpl) obj; return getName().equals(other.getName()); } @Override public byte[] toBinary() { final byte[] baseBinary = super.toBinary(); final byte[] additionalBinary = PersistenceUtils.toBinary(indexStrategy); final ByteBuffer buf = ByteBuffer.allocate( VarintUtils.unsignedIntByteLength(baseBinary.length) + baseBinary.length + additionalBinary.length); VarintUtils.writeUnsignedInt(baseBinary.length, buf); buf.put(baseBinary); buf.put(additionalBinary); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final byte[] baseBinary = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); super.fromBinary(baseBinary); final byte[] additionalBinary = ByteArrayUtils.safeRead(buf, buf.remaining()); indexStrategy = (CustomIndexStrategy) PersistenceUtils.fromBinary(additionalBinary); } @Override public Class getConstraintsClass() { return indexStrategy.getConstraintsClass(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/index/CustomNameIndex.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.index; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; public class CustomNameIndex extends IndexImpl { private String name; public CustomNameIndex() { super(); } public CustomNameIndex( final NumericIndexStrategy indexStrategy, final CommonIndexModel indexModel, final String name) { super(indexStrategy, indexModel); this.name = name; } @Override public String getName() { return name; } @Override public byte[] toBinary() { final byte[] selfBinary = super.toBinary(); final byte[] idBinary = StringUtils.stringToBinary(name); final ByteBuffer buf = ByteBuffer.allocate( VarintUtils.unsignedIntByteLength(selfBinary.length) + idBinary.length + selfBinary.length); VarintUtils.writeUnsignedInt(selfBinary.length, buf); buf.put(selfBinary); buf.put(idBinary); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int selfBinaryLength = VarintUtils.readUnsignedInt(buf); final byte[] selfBinary = ByteArrayUtils.safeRead(buf, selfBinaryLength); super.fromBinary(selfBinary); final byte[] nameBinary = new byte[buf.remaining()]; buf.get(nameBinary); name = StringUtils.stringFromBinary(nameBinary); } @Override public boolean equals(final Object obj) { if (!(obj instanceof CustomNameIndex)) { return false; } return super.equals(obj); } @Override public int hashCode() { return super.hashCode(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/index/FilterableConstraints.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.index; import org.locationtech.geowave.core.index.IndexConstraints; import org.locationtech.geowave.core.store.query.filter.QueryFilter; public interface FilterableConstraints extends IndexConstraints { public String getFieldName(); public QueryFilter getFilter(); public FilterableConstraints intersect(FilterableConstraints constaints); public FilterableConstraints union(FilterableConstraints constaints); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/index/IndexBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.index; import org.locationtech.geowave.core.store.api.Index; public interface IndexBuilder { public Index createIndex(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/index/IndexFieldMapperPersistableRegistry.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.index; import org.locationtech.geowave.core.index.persist.InternalPersistableRegistry; import org.locationtech.geowave.core.index.persist.PersistableRegistrySpi; /** * Registers index field mappers with the GeoWave persistable registry. */ public class IndexFieldMapperPersistableRegistry implements PersistableRegistrySpi, InternalPersistableRegistry { @Override public PersistableIdAndConstructor[] getSupportedPersistables() { return IndexFieldMapperRegistry.instance().getPersistables(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/index/IndexFieldMapperRegistry.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.index; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.function.Supplier; import org.locationtech.geowave.core.index.SPIServiceRegistry; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistableRegistrySpi.PersistableIdAndConstructor; import org.locationtech.geowave.core.store.api.IndexFieldMapper; import org.locationtech.geowave.core.store.index.IndexFieldMapperRegistrySPI.RegisteredFieldMapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Lists; import com.google.common.collect.Maps; /** * Uses SPI to find registered adapter to index field mappers. */ public class IndexFieldMapperRegistry { private static final Logger LOGGER = LoggerFactory.getLogger(IndexFieldMapperRegistry.class); private static IndexFieldMapperRegistry INSTANCE = null; private Map, List> indexFieldMappings = Maps.newHashMap(); private final int totalFieldMappings; private IndexFieldMapperRegistry() { final Iterator spiIter = new SPIServiceRegistry(IndexFieldMapperRegistry.class).load( IndexFieldMapperRegistrySPI.class); int mappingCount = 0; while (spiIter.hasNext()) { final IndexFieldMapperRegistrySPI providedFieldMappers = spiIter.next(); for (RegisteredFieldMapper registeredMapper : providedFieldMappers.getRegisteredFieldMappers()) { Class indexFieldType = registeredMapper.getConstructor().get().indexFieldType(); if (!indexFieldMappings.containsKey(indexFieldType)) { indexFieldMappings.put(indexFieldType, Lists.newArrayList()); } indexFieldMappings.get(indexFieldType).add(registeredMapper); mappingCount++; } } this.totalFieldMappings = mappingCount; } public static IndexFieldMapperRegistry instance() { if (INSTANCE == null) { INSTANCE = new IndexFieldMapperRegistry(); } return INSTANCE; } @SuppressWarnings("unchecked") public PersistableIdAndConstructor[] getPersistables() { final Collection> registeredFieldMappers = indexFieldMappings.values(); final PersistableIdAndConstructor[] persistables = new PersistableIdAndConstructor[totalFieldMappings]; int persistableIndex = 0; for (final List mappers : registeredFieldMappers) { for (final RegisteredFieldMapper mapper : mappers) { persistables[persistableIndex++] = new PersistableIdAndConstructor( mapper.getPersistableId(), (Supplier) (Supplier) mapper.getConstructor()); } } return persistables; }; /** * Returns all field mappers that are available for the given index field class. * * @param indexFieldClass the index field class * @return a list of available mappers */ public List> getAvailableMappers(final Class indexFieldClass) { List registeredMappers = indexFieldMappings.get(indexFieldClass); List> fieldMappers = Lists.newArrayListWithCapacity( registeredMappers != null ? registeredMappers.size() + 1 : 1); if (registeredMappers != null) { registeredMappers.forEach(mapper -> fieldMappers.add(mapper.getConstructor().get())); } fieldMappers.add(new NoOpIndexFieldMapper<>(indexFieldClass)); return fieldMappers; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/index/IndexFieldMapperRegistrySPI.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.index; import java.util.function.Supplier; import org.locationtech.geowave.core.store.api.IndexFieldMapper; /** * A base interface for registering new index field mappers with GeoWave via SPI. */ public interface IndexFieldMapperRegistrySPI { /** * @return a list of index field mappers to register */ RegisteredFieldMapper[] getRegisteredFieldMappers(); /** * A registered field mapper contains the constructor for the field mapper and a persistable ID. */ public static class RegisteredFieldMapper { private final Supplier> constructor; private final short persistableId; public RegisteredFieldMapper( final Supplier> constructor, final short persistableId) { this.constructor = constructor; this.persistableId = persistableId; } @SuppressWarnings("unchecked") public Supplier> getConstructor() { return (Supplier>) constructor; } public short getPersistableId() { return persistableId; } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/index/IndexFilter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.index; import java.util.function.Predicate; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.api.Index; /** * A persistable predicate for filtering indices. */ public interface IndexFilter extends Predicate, Persistable { } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/index/IndexImpl.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.index; import java.nio.ByteBuffer; import java.util.List; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.statistics.DefaultStatisticsProvider; import org.locationtech.geowave.core.store.statistics.binning.CompositeBinningStrategy; import org.locationtech.geowave.core.store.statistics.binning.DataTypeBinningStrategy; import org.locationtech.geowave.core.store.statistics.binning.PartitionBinningStrategy; import org.locationtech.geowave.core.store.statistics.index.DifferingVisibilityCountStatistic; import org.locationtech.geowave.core.store.statistics.index.DuplicateEntryCountStatistic; import org.locationtech.geowave.core.store.statistics.index.FieldVisibilityCountStatistic; import org.locationtech.geowave.core.store.statistics.index.IndexMetaDataSetStatistic; import org.locationtech.geowave.core.store.statistics.index.PartitionsStatistic; import org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic; import com.google.common.collect.Lists; /** * This class fully describes everything necessary to index data within GeoWave. The key components * are the indexing strategy and the common index model. */ public class IndexImpl implements Index, DefaultStatisticsProvider { protected NumericIndexStrategy indexStrategy; protected CommonIndexModel indexModel; public IndexImpl() {} public IndexImpl(final NumericIndexStrategy indexStrategy, final CommonIndexModel indexModel) { this.indexStrategy = indexStrategy; this.indexModel = indexModel; } @Override public NumericIndexStrategy getIndexStrategy() { return indexStrategy; } @Override public CommonIndexModel getIndexModel() { return indexModel; } @Override public String getName() { return indexStrategy.getId() + "_" + indexModel.getId(); } @Override public int hashCode() { return getName().hashCode(); } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final IndexImpl other = (IndexImpl) obj; return getName().equals(other.getName()); } @Override public byte[] toBinary() { final byte[] indexStrategyBinary = PersistenceUtils.toBinary(indexStrategy); final byte[] indexModelBinary = PersistenceUtils.toBinary(indexModel); final ByteBuffer buf = ByteBuffer.allocate( indexStrategyBinary.length + indexModelBinary.length + VarintUtils.unsignedIntByteLength(indexStrategyBinary.length)); VarintUtils.writeUnsignedInt(indexStrategyBinary.length, buf); buf.put(indexStrategyBinary); buf.put(indexModelBinary); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int indexStrategyLength = VarintUtils.readUnsignedInt(buf); final byte[] indexStrategyBinary = ByteArrayUtils.safeRead(buf, indexStrategyLength); indexStrategy = (NumericIndexStrategy) PersistenceUtils.fromBinary(indexStrategyBinary); final byte[] indexModelBinary = new byte[buf.remaining()]; buf.get(indexModelBinary); indexModel = (CommonIndexModel) PersistenceUtils.fromBinary(indexModelBinary); } @Override public List>> getDefaultStatistics() { List>> statistics = Lists.newArrayListWithCapacity(6); IndexMetaDataSetStatistic metadata = new IndexMetaDataSetStatistic(getName(), indexStrategy.createMetaData()); metadata.setBinningStrategy(new DataTypeBinningStrategy()); metadata.setInternal(); statistics.add(metadata); DuplicateEntryCountStatistic duplicateCounts = new DuplicateEntryCountStatistic(getName()); duplicateCounts.setBinningStrategy(new DataTypeBinningStrategy()); duplicateCounts.setInternal(); statistics.add(duplicateCounts); PartitionsStatistic partitions = new PartitionsStatistic(getName()); partitions.setBinningStrategy(new DataTypeBinningStrategy()); partitions.setInternal(); statistics.add(partitions); DifferingVisibilityCountStatistic differingFieldVisibility = new DifferingVisibilityCountStatistic(getName()); differingFieldVisibility.setBinningStrategy(new DataTypeBinningStrategy()); differingFieldVisibility.setInternal(); statistics.add(differingFieldVisibility); FieldVisibilityCountStatistic fieldVisibilityCount = new FieldVisibilityCountStatistic(getName()); fieldVisibilityCount.setBinningStrategy(new DataTypeBinningStrategy()); fieldVisibilityCount.setInternal(); statistics.add(fieldVisibilityCount); RowRangeHistogramStatistic rowRangeHistogram = new RowRangeHistogramStatistic(getName()); rowRangeHistogram.setBinningStrategy( new CompositeBinningStrategy( new DataTypeBinningStrategy(), new PartitionBinningStrategy())); rowRangeHistogram.setInternal(); statistics.add(rowRangeHistogram); return statistics; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/index/IndexPluginOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.index; import org.locationtech.geowave.core.cli.api.DefaultPluginOptions; import org.locationtech.geowave.core.cli.api.PluginOptions; import org.locationtech.geowave.core.index.CompoundIndexStrategy; import org.locationtech.geowave.core.index.simple.HashKeyIndexStrategy; import org.locationtech.geowave.core.index.simple.RoundRobinKeyIndexStrategy; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.operations.remote.options.BasicIndexOptions; import org.locationtech.geowave.core.store.spi.DimensionalityTypeOptions; import org.locationtech.geowave.core.store.spi.DimensionalityTypeProviderSpi; import org.locationtech.geowave.core.store.spi.DimensionalityTypeRegistry; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.ParameterException; import com.beust.jcommander.ParametersDelegate; /** * This class is responsible for loading index SPI plugins and populating parameters delegate with * relevant options for that index. */ public class IndexPluginOptions extends DefaultPluginOptions implements PluginOptions { public static final String INDEX_PROPERTY_NAMESPACE = "index"; public static final String DEFAULT_PROPERTY_NAMESPACE = "indexdefault"; private static final Logger LOGGER = LoggerFactory.getLogger(IndexPluginOptions.class); private String indexType; private String indexName = null; @ParametersDelegate private BasicIndexOptions basicIndexOptions = new BasicIndexOptions(); // This is the plugin loaded from SPI based on "type" private DimensionalityTypeProviderSpi indexPlugin = null; // These are the options loaded from indexPlugin based on "type" @ParametersDelegate private DimensionalityTypeOptions indexOptions = null; /** Constructor */ public IndexPluginOptions() {} public void setBasicIndexOptions(final BasicIndexOptions basicIndexOptions) { this.basicIndexOptions = basicIndexOptions; } @Override public void selectPlugin(final String qualifier) { // Load the Index options. indexType = qualifier; if (qualifier != null) { indexPlugin = DimensionalityTypeRegistry.getSelectedDimensionalityProvider(qualifier); if (indexPlugin == null) { throw new ParameterException("Unknown index type specified"); } indexOptions = indexPlugin.createOptions(); } else { indexPlugin = null; indexOptions = null; } } public DimensionalityTypeOptions getDimensionalityOptions() { return indexOptions; } public void setDimensionalityTypeOptions(final DimensionalityTypeOptions indexOptions) { this.indexOptions = indexOptions; } @Override public String getType() { return indexType; } public int getNumPartitions() { return basicIndexOptions.getNumPartitions(); } public void setName(final String indexName) { this.indexName = indexName; } public String getName() { return indexName; } public PartitionStrategy getPartitionStrategy() { return basicIndexOptions.getPartitionStrategy(); } public BasicIndexOptions getBasicIndexOptions() { return basicIndexOptions; } public DimensionalityTypeProviderSpi getIndexPlugin() { return indexPlugin; } public Index createIndex(final DataStore dataStore) { final Index index = indexPlugin.createIndex(dataStore, indexOptions); return wrapIndexWithOptions(index, this); } static Index wrapIndexWithOptions(final Index index, final IndexPluginOptions options) { Index retVal = index; if ((options.basicIndexOptions.getNumPartitions() > 1) && options.basicIndexOptions.getPartitionStrategy().equals(PartitionStrategy.ROUND_ROBIN)) { retVal = new CustomNameIndex( new CompoundIndexStrategy( new RoundRobinKeyIndexStrategy(options.basicIndexOptions.getNumPartitions()), index.getIndexStrategy()), index.getIndexModel(), index.getName() + "_" + PartitionStrategy.ROUND_ROBIN.name() + "_" + options.basicIndexOptions.getNumPartitions()); } else if (options.basicIndexOptions.getNumPartitions() > 1) { // default to round robin partitioning (none is not valid if there // are more than 1 partition) if (options.basicIndexOptions.getPartitionStrategy().equals(PartitionStrategy.NONE)) { LOGGER.warn( "Partition strategy is necessary when using more than 1 partition, defaulting to 'hash' partitioning."); } retVal = new CustomNameIndex( new CompoundIndexStrategy( new HashKeyIndexStrategy(options.basicIndexOptions.getNumPartitions()), index.getIndexStrategy()), index.getIndexModel(), index.getName() + "_" + PartitionStrategy.HASH.name() + "_" + options.basicIndexOptions.getNumPartitions()); } if ((options.getName() != null) && (options.getName().length() > 0)) { retVal = new CustomNameIndex(retVal.getIndexStrategy(), retVal.getIndexModel(), options.getName()); } return retVal; } public static String getIndexNamespace(final String name) { return String.format("%s.%s", INDEX_PROPERTY_NAMESPACE, name); } public static enum PartitionStrategy { NONE, HASH, ROUND_ROBIN; // converter that will be used later public static PartitionStrategy fromString(final String code) { for (final PartitionStrategy output : PartitionStrategy.values()) { if (output.toString().equalsIgnoreCase(code)) { return output; } } return null; } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/index/IndexStore.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.index; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.api.Index; public interface IndexStore { public void addIndex(Index index); public Index getIndex(String indexName); public boolean indexExists(String indexName); public CloseableIterator getIndices(); public void removeIndex(String indexName); public void removeAll(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/index/NoOpIndexFieldMapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.index; import java.nio.ByteBuffer; import java.util.List; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.api.RowBuilder; import org.locationtech.geowave.core.store.api.IndexFieldMapper; /** * A basic index field mapper that maps an adapter field to an index field of the same class. No * transformations are done on the data. * * @param the index and adapter field type */ public class NoOpIndexFieldMapper extends IndexFieldMapper { private Class indexFieldClass = null; public NoOpIndexFieldMapper() {} public NoOpIndexFieldMapper(final Class indexFieldClass) { this.indexFieldClass = indexFieldClass; } @Override protected void initFromOptions( List> inputFieldDescriptors, IndexFieldOptions options) {} @Override public I toIndex(List nativeFieldValues) { return nativeFieldValues.get(0); } @Override public void toAdapter(final I indexFieldValue, final RowBuilder rowBuilder) { rowBuilder.setField(adapterFields[0], indexFieldValue); } @Override public Class indexFieldType() { return indexFieldClass; } @Override public Class adapterFieldType() { return indexFieldClass; } @Override public short adapterFieldCount() { return 1; } private byte[] classBytes = null; @Override protected int byteLength() { classBytes = StringUtils.stringToBinary(indexFieldClass.getName()); return super.byteLength() + VarintUtils.unsignedShortByteLength((short) classBytes.length) + classBytes.length; } @Override protected void writeBytes(final ByteBuffer buffer) { VarintUtils.writeUnsignedShort((short) classBytes.length, buffer); buffer.put(classBytes); super.writeBytes(buffer); } @SuppressWarnings({"unchecked", "rawtypes"}) @Override protected void readBytes(final ByteBuffer buffer) { classBytes = new byte[VarintUtils.readUnsignedShort(buffer)]; buffer.get(classBytes); try { indexFieldClass = (Class) Class.forName(StringUtils.stringFromBinary(classBytes)); } catch (ClassNotFoundException e) { throw new RuntimeException("Unable to find class for no-op index field mapper."); } super.readBytes(buffer); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/index/NullIndex.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.index; import org.locationtech.geowave.core.index.NullNumericIndexStrategy; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.dimension.NumericDimensionField; /** * This can be used as a pass-through for an index. In other words, it represents an index with no * dimensions. It will create a GeoWave-compliant table named with the provided ID and primarily * useful to access the data by row ID. Because it has no dimensions, range scans will result in * full table scans. */ public class NullIndex extends IndexImpl { public NullIndex() { super(); } public NullIndex(final String id) { super(new NullNumericIndexStrategy(id), new BasicIndexModel(new NumericDimensionField[] {})); } @Override public String getName() { return indexStrategy.getId(); } @Override public byte[] toBinary() { return StringUtils.stringToBinary(indexStrategy.getId()); } @Override public void fromBinary(final byte[] bytes) { indexModel = new BasicIndexModel(new NumericDimensionField[] {}); indexStrategy = new NullNumericIndexStrategy(StringUtils.stringFromBinary(bytes)); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/index/NumericAttributeIndexProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.index; import java.util.Set; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.index.simple.SimpleByteIndexStrategy; import org.locationtech.geowave.core.index.simple.SimpleDoubleIndexStrategy; import org.locationtech.geowave.core.index.simple.SimpleFloatIndexStrategy; import org.locationtech.geowave.core.index.simple.SimpleIntegerIndexStrategy; import org.locationtech.geowave.core.index.simple.SimpleLongIndexStrategy; import org.locationtech.geowave.core.index.simple.SimpleShortIndexStrategy; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.api.AttributeIndex; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.dimension.BasicNumericDimensionField; import org.locationtech.geowave.core.store.dimension.NumericDimensionField; import com.beust.jcommander.ParameterException; import com.google.common.collect.Sets; /** * Provides attribute indices for numeric fields. */ public class NumericAttributeIndexProvider implements AttributeIndexProviderSpi { private static Set> SUPPORTED_CLASSES = Sets.newHashSet( Byte.class, Short.class, Integer.class, Long.class, Float.class, Double.class); @Override public boolean supportsDescriptor(final FieldDescriptor fieldDescriptor) { return SUPPORTED_CLASSES.contains(fieldDescriptor.bindingClass()); } @Override public AttributeIndex buildIndex( final String indexName, final DataTypeAdapter adapter, final FieldDescriptor fieldDescriptor) { final Class bindingClass = fieldDescriptor.bindingClass(); final String fieldName = fieldDescriptor.fieldName(); final NumericIndexStrategy indexStrategy; final CommonIndexModel indexModel; if (Byte.class.isAssignableFrom(bindingClass)) { indexStrategy = new SimpleByteIndexStrategy(); indexModel = new BasicIndexModel( new NumericDimensionField[] { new BasicNumericDimensionField<>(fieldName, Byte.class)}); } else if (Short.class.isAssignableFrom(bindingClass)) { indexStrategy = new SimpleShortIndexStrategy(); indexModel = new BasicIndexModel( new NumericDimensionField[] { new BasicNumericDimensionField<>(fieldName, Short.class)}); } else if (Integer.class.isAssignableFrom(bindingClass)) { indexStrategy = new SimpleIntegerIndexStrategy(); indexModel = new BasicIndexModel( new NumericDimensionField[] { new BasicNumericDimensionField<>(fieldName, Integer.class)}); } else if (Long.class.isAssignableFrom(bindingClass)) { indexStrategy = new SimpleLongIndexStrategy(); indexModel = new BasicIndexModel( new NumericDimensionField[] { new BasicNumericDimensionField<>(fieldName, Long.class)}); } else if (Float.class.isAssignableFrom(bindingClass)) { indexStrategy = new SimpleFloatIndexStrategy(); indexModel = new BasicIndexModel( new NumericDimensionField[] { new BasicNumericDimensionField<>(fieldName, Float.class)}); } else if (Double.class.isAssignableFrom(bindingClass)) { indexStrategy = new SimpleDoubleIndexStrategy(); indexModel = new BasicIndexModel( new NumericDimensionField[] { new BasicNumericDimensionField<>(fieldName, Double.class)}); } else { throw new ParameterException( "Unsupported numeric attribute index class: " + bindingClass.getName()); } return new AttributeIndexImpl(indexStrategy, indexModel, indexName, fieldName); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/index/TextAttributeIndexProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.index; import java.nio.ByteBuffer; import java.util.EnumSet; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.index.text.CaseSensitivity; import org.locationtech.geowave.core.index.text.TextIndexEntryConverter; import org.locationtech.geowave.core.index.text.TextIndexStrategy; import org.locationtech.geowave.core.index.text.TextSearchType; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.api.AttributeIndex; import org.locationtech.geowave.core.store.api.DataTypeAdapter; /** * Provides attribute indices for string fields. */ public class TextAttributeIndexProvider implements AttributeIndexProviderSpi { @Override public boolean supportsDescriptor(final FieldDescriptor fieldDescriptor) { return String.class.isAssignableFrom(fieldDescriptor.bindingClass()); } @Override public AttributeIndex buildIndex( final String indexName, final DataTypeAdapter adapter, final FieldDescriptor fieldDescriptor) { return new CustomAttributeIndex<>( new TextIndexStrategy<>( EnumSet.of( TextSearchType.BEGINS_WITH, TextSearchType.ENDS_WITH, TextSearchType.EXACT_MATCH), EnumSet.of(CaseSensitivity.CASE_SENSITIVE, CaseSensitivity.CASE_INSENSITIVE), new AdapterFieldTextIndexEntryConverter<>(adapter, fieldDescriptor.fieldName())), indexName, fieldDescriptor.fieldName()); } /** * A converter that pulls the string value to be indexed from a specific field of the entry using * the data adapter that the entry belongs to. * * @param the type of each entry and the adapter */ public static class AdapterFieldTextIndexEntryConverter implements TextIndexEntryConverter { private DataTypeAdapter adapter; private String fieldName; public AdapterFieldTextIndexEntryConverter() {} public AdapterFieldTextIndexEntryConverter( final DataTypeAdapter adapter, final String fieldName) { this.adapter = adapter; this.fieldName = fieldName; } public String getFieldName() { return fieldName; } public DataTypeAdapter getAdapter() { return adapter; } @Override public String apply(final T t) { return (String) adapter.getFieldValue(t, fieldName); } @Override public byte[] toBinary() { final byte[] adapterBytes = PersistenceUtils.toBinary(adapter); final byte[] fieldNameBytes = StringUtils.stringToBinary(fieldName); final ByteBuffer buffer = ByteBuffer.allocate( VarintUtils.unsignedIntByteLength(adapterBytes.length) + VarintUtils.unsignedIntByteLength(fieldNameBytes.length) + adapterBytes.length + fieldNameBytes.length); VarintUtils.writeUnsignedInt(adapterBytes.length, buffer); buffer.put(adapterBytes); VarintUtils.writeUnsignedInt(fieldNameBytes.length, buffer); buffer.put(fieldNameBytes); return buffer.array(); } @SuppressWarnings({"unchecked", "rawtypes"}) @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); final byte[] adapterBytes = ByteArrayUtils.safeRead(buffer, VarintUtils.readUnsignedInt(buffer)); adapter = (DataTypeAdapter) PersistenceUtils.fromBinary(adapterBytes); final byte[] fieldNameBytes = ByteArrayUtils.safeRead(buffer, VarintUtils.readUnsignedInt(buffer)); fieldName = StringUtils.stringFromBinary(fieldNameBytes); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/index/writer/IndependentAdapterIndexWriter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.index.writer; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.function.Function; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.SinglePartitionInsertionIds; import org.locationtech.geowave.core.store.adapter.IndexDependentDataAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.VisibilityHandler; import org.locationtech.geowave.core.store.api.WriteResults; import org.locationtech.geowave.core.store.api.Writer; import com.google.common.collect.Maps; public class IndependentAdapterIndexWriter implements Writer { final IndexDependentDataAdapter adapter; final Index index; final VisibilityHandler visibilityHandler; final Writer writer; public IndependentAdapterIndexWriter( final IndexDependentDataAdapter adapter, final Index index, final VisibilityHandler visibilityHandler, final Writer writer) { super(); this.writer = writer; this.index = index; this.visibilityHandler = visibilityHandler; this.adapter = adapter; } @Override public WriteResults write(final T entry, final VisibilityHandler visibilityHandler) { return internalWrite(entry, (e -> writer.write(e, visibilityHandler))); } private WriteResults internalWrite( final T entry, final Function internalWriter) { final Iterator indexedEntries = adapter.convertToIndex(index, entry); final Map> insertionIdsPerIndex = new HashMap<>(); while (indexedEntries.hasNext()) { final WriteResults ids = internalWriter.apply(indexedEntries.next()); for (final String indexName : ids.getWrittenIndexNames()) { List partitionInsertionIds = insertionIdsPerIndex.get(indexName); if (partitionInsertionIds == null) { partitionInsertionIds = new ArrayList<>(); insertionIdsPerIndex.put(indexName, partitionInsertionIds); } partitionInsertionIds.addAll(ids.getInsertionIdsWritten(indexName).getPartitionKeys()); } } return new WriteResults(Maps.transformValues(insertionIdsPerIndex, v -> new InsertionIds(v))); } @Override public void close() { writer.close(); } @Override public WriteResults write(final T entry) { return internalWrite(entry, (e -> writer.write(e, visibilityHandler))); } @Override public Index[] getIndices() { return writer.getIndices(); } @Override public void flush() { writer.flush(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/index/writer/IndexCompositeWriter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.index.writer; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.function.Function; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.SinglePartitionInsertionIds; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.VisibilityHandler; import org.locationtech.geowave.core.store.api.WriteResults; import org.locationtech.geowave.core.store.api.Writer; import com.google.common.collect.Maps; public class IndexCompositeWriter implements Writer { final Writer[] writers; public IndexCompositeWriter(final Writer[] writers) { super(); this.writers = writers; } @Override public void close() { for (final Writer indexWriter : writers) { indexWriter.close(); } } @Override public WriteResults write(final T entry) { return internalWrite(entry, (w -> w.write(entry))); } @Override public WriteResults write(final T entry, final VisibilityHandler visibilityHandler) { return internalWrite(entry, (w -> w.write(entry, visibilityHandler))); } protected WriteResults internalWrite( final T entry, final Function, WriteResults> internalWriter) { final Map> insertionIdsPerIndex = new HashMap<>(); for (final Writer indexWriter : writers) { final WriteResults ids = internalWriter.apply(indexWriter); for (final String indexName : ids.getWrittenIndexNames()) { List partitionInsertionIds = insertionIdsPerIndex.get(indexName); if (partitionInsertionIds == null) { partitionInsertionIds = new ArrayList<>(); insertionIdsPerIndex.put(indexName, partitionInsertionIds); } partitionInsertionIds.addAll(ids.getInsertionIdsWritten(indexName).getPartitionKeys()); } } return new WriteResults(Maps.transformValues(insertionIdsPerIndex, v -> new InsertionIds(v))); } @Override public Index[] getIndices() { final List ids = new ArrayList<>(); for (final Writer indexWriter : writers) { ids.addAll(Arrays.asList(indexWriter.getIndices())); } return ids.toArray(new Index[ids.size()]); } @Override public void flush() { for (final Writer indexWriter : writers) { indexWriter.flush(); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/ingest/AbstractLocalFileDriver.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.ingest; import java.io.File; import java.io.IOException; import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; import java.util.Map; import java.util.Properties; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.dimension.NumericDimensionField; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class can be sub-classed to handle recursing over a local directory structure and passing * along the plugin specific handling of any supported file for a discovered plugin. * * @param

The type of the plugin this driver supports. * @param The type for intermediate data that can be used throughout the life of the process and * is passed along for each call to process a file. */ public abstract class AbstractLocalFileDriver

{ private static final Logger LOGGER = LoggerFactory.getLogger(AbstractLocalFileDriver.class); protected LocalInputCommandLineOptions localInput; protected Properties configProperties; public static boolean checkIndexesAgainstProvider( final String providerName, final DataAdapterProvider adapterProvider, final List indices) { boolean valid = true; for (final Index index : indices) { if (!isCompatible(adapterProvider, index)) { // HP Fortify "Log Forging" false positive // What Fortify considers "user input" comes only // from users with OS-level access anyway LOGGER.warn( "Local file ingest plugin for ingest type '" + providerName + "' is not supported by index '" + index.getName() + "'"); valid = false; } } return valid; } /** * Determine whether an index is compatible with the visitor * * @param index an index that an ingest type supports * @return whether the adapter is compatible with the common index model */ protected static boolean isCompatible( final DataAdapterProvider adapterProvider, final Index index) { final String[] supportedTypes = adapterProvider.getSupportedIndexTypes(); if ((supportedTypes == null) || (supportedTypes.length == 0)) { return false; } final NumericDimensionField[] requiredDimensions = index.getIndexModel().getDimensions(); for (final NumericDimensionField requiredDimension : requiredDimensions) { boolean fieldFound = false; for (final String supportedType : supportedTypes) { if (requiredDimension.getFieldName().equals(supportedType)) { fieldFound = true; break; } } if (!fieldFound) { return false; } } return true; } public AbstractLocalFileDriver() {} public AbstractLocalFileDriver(final LocalInputCommandLineOptions input) { localInput = input; } protected void processInput( final String inputPath, final File configFile, final Map localPlugins, final R runData) throws IOException { if (inputPath == null) { LOGGER.error("Unable to ingest data, base directory or file input not specified"); return; } if ((configFile != null) && configFile.exists()) { configProperties = ConfigOptions.loadProperties(configFile); } Path path = IngestUtils.handleIngestUrl(inputPath, configProperties); if (path == null) { final File f = new File(inputPath); if (!f.exists()) { LOGGER.error("Input file '" + f.getAbsolutePath() + "' does not exist"); throw new IllegalArgumentException(inputPath + " does not exist"); } path = Paths.get(inputPath); } for (final LocalPluginBase localPlugin : localPlugins.values()) { localPlugin.init(path.toUri().toURL()); } Files.walkFileTree( path, new LocalPluginFileVisitor<>(localPlugins, this, runData, getExtensions())); } protected String[] getExtensions() { return localInput.getExtensions(); } protected abstract void processFile(final URL file, String typeName, P plugin, R runData) throws IOException; } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/ingest/AbstractLocalFileIngestDriver.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.ingest; import java.io.File; import java.io.IOException; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import org.apache.commons.io.FilenameUtils; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.VisibilityHandler; import org.locationtech.geowave.core.store.api.Writer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This extends the local file driver to directly ingest data into GeoWave utilizing the * LocalFileIngestPlugin's that are discovered by the system. */ abstract public class AbstractLocalFileIngestDriver extends AbstractLocalFileDriver, LocalIngestRunData> { private static final int INGEST_BATCH_SIZE = 50000; private static final Logger LOGGER = LoggerFactory.getLogger(AbstractLocalFileIngestDriver.class); protected ExecutorService ingestExecutor; public AbstractLocalFileIngestDriver() { super(); } public AbstractLocalFileIngestDriver(final LocalInputCommandLineOptions inputOptions) { super(inputOptions); } public boolean runOperation(final String inputPath, final File configFile) { // first collect the local file ingest plugins final Map> localFileIngestPlugins = new HashMap<>(); final List> adapters = new ArrayList<>(); for (final Entry> pluginEntry : getIngestPlugins().entrySet()) { if (!isSupported(pluginEntry.getKey(), pluginEntry.getValue())) { continue; } localFileIngestPlugins.put(pluginEntry.getKey(), pluginEntry.getValue()); adapters.addAll(Arrays.asList(pluginEntry.getValue().getDataAdapters())); } final DataStore dataStore = getDataStore(); try (LocalIngestRunData runData = new LocalIngestRunData(adapters, dataStore, getVisibilityHandler())) { startExecutor(); processInput(inputPath, configFile, localFileIngestPlugins, runData); // We place this here and not just in finally because of the way // that try-with-resources works. // We want to wait for our ingesting threads to finish before we // kill our index writers, which // are cached in LocalIngestRunData. If we were don't, then the // index writers will be // closed before they are finished processing the file entries. shutdownExecutor(); } catch (final IOException e) { LOGGER.error("Unexpected I/O exception when reading input files", e); return false; } finally { shutdownExecutor(); } return true; } /** * Create a basic thread pool to ingest file data. We limit it to the amount of threads specified * on the command line. */ public void startExecutor() { if (getNumThreads() > 1) { ingestExecutor = Executors.newFixedThreadPool(getNumThreads()); } } /** This function will wait for executing tasks to complete for up to 10 seconds. */ public void shutdownExecutor() { if (ingestExecutor != null) { try { ingestExecutor.shutdown(); while (!ingestExecutor.awaitTermination(10, TimeUnit.SECONDS)) { LOGGER.debug("Waiting for ingest executor to terminate"); } } catch (final InterruptedException e) { LOGGER.error("Failed to terminate executor service"); } finally { ingestExecutor = null; } } } @Override public void processFile( final URL file, final String typeName, final LocalFileIngestPlugin plugin, final LocalIngestRunData ingestRunData) throws IOException { LOGGER.info( String.format( "Beginning ingest for file: [%s]", // file.getName())); FilenameUtils.getName(file.getPath()))); // This loads up the primary indexes that are specified on the command // line. // Usually spatial or spatial-temporal final Map specifiedPrimaryIndexes = getIndices(); // This gets the list of required indexes from the Plugin. // If for some reason a GeoWaveData specifies an index that isn't // originally // in the specifiedPrimaryIndexes list, then this array is used to // determine // if the Plugin supports it. If it does, then we allow the creation of // the // index. final Map requiredIndexMap = new HashMap<>(); final Index[] requiredIndices = plugin.getRequiredIndices(); if ((requiredIndices != null) && (requiredIndices.length > 0)) { for (final Index requiredIndex : requiredIndices) { requiredIndexMap.put(requiredIndex.getName(), requiredIndex); } } if (getNumThreads() == 1) { processFileSingleThreaded( file, typeName, plugin, ingestRunData, specifiedPrimaryIndexes, requiredIndexMap, getVisibilityHandler()); } else { processFileMultiThreaded( file, typeName, plugin, ingestRunData, specifiedPrimaryIndexes, requiredIndexMap, getVisibilityHandler()); } LOGGER.info(String.format("Finished ingest for file: [%s]", file.getFile())); } public void processFileSingleThreaded( final URL file, final String typeName, final LocalFileIngestPlugin plugin, final LocalIngestRunData ingestRunData, final Map specifiedPrimaryIndexes, final Map requiredIndexMap, final VisibilityHandler visibilityHandler) throws IOException { int count = 0; long dbWriteMs = 0L; final Map> indexWriters = new HashMap<>(); // Read files until EOF from the command line. try (CloseableIterator geowaveDataIt = plugin.toGeoWaveData(file, specifiedPrimaryIndexes.keySet().toArray(new String[0]))) { while (geowaveDataIt.hasNext()) { final GeoWaveData geowaveData = (GeoWaveData) geowaveDataIt.next(); try { final DataTypeAdapter adapter = ingestRunData.getDataAdapter(geowaveData); if (adapter == null) { LOGGER.warn( String.format( "Adapter not found for [%s] file [%s]", geowaveData.getValue(), FilenameUtils.getName(file.getPath()))); continue; } // Ingest the data! dbWriteMs += ingestData( geowaveData, adapter, ingestRunData, specifiedPrimaryIndexes, requiredIndexMap, indexWriters, visibilityHandler); count++; } catch (final Exception e) { throw new RuntimeException("Interrupted ingesting GeoWaveData", e); } } LOGGER.debug( String.format( "Finished ingest for file: [%s]; Ingested %d items in %d seconds", FilenameUtils.getName(file.getPath()), count, (int) dbWriteMs / 1000)); } finally { // Clean up index writers for (final Entry> writerEntry : indexWriters.entrySet()) { try { ingestRunData.releaseIndexWriter(writerEntry.getKey(), writerEntry.getValue()); } catch (final Exception e) { LOGGER.warn( String.format("Could not return index writer: [%s]", writerEntry.getKey()), e); } } } } private long ingestData( final GeoWaveData geowaveData, final DataTypeAdapter adapter, final LocalIngestRunData runData, final Map specifiedPrimaryIndexes, final Map requiredIndexMap, final Map> indexWriters, final VisibilityHandler visibilityHandler) throws Exception { try { final String adapterId = adapter.getTypeName(); // Write the data to the data store. Writer writer = indexWriters.get(adapterId); if (writer == null) { final List indices = new ArrayList<>(); for (final String indexName : geowaveData.getIndexNames()) { Index index = specifiedPrimaryIndexes.get(indexName); if (index == null) { index = requiredIndexMap.get(indexName); if (index == null) { LOGGER.warn( String.format("Index '%s' not found for %s", indexName, geowaveData.getValue())); continue; } } indices.add(index); } runData.addAdapter(adapter); // If we have the index checked out already, use that. writer = runData.getIndexWriter(adapterId, indices); indexWriters.put(adapterId, writer); } // Time the DB write final long hack = System.currentTimeMillis(); write(writer, geowaveData); final long durMs = System.currentTimeMillis() - hack; return durMs; } catch (final Exception e) { // This should really never happen, because we don't limit the // amount of items in the IndexWriter pool. LOGGER.error("Fatal error occured while trying write to an index writer.", e); throw new RuntimeException("Fatal error occured while trying write to an index writer.", e); } } protected void write(final Writer writer, final GeoWaveData geowaveData) { writer.write(geowaveData.getValue()); } public void processFileMultiThreaded( final URL file, final String typeName, final LocalFileIngestPlugin plugin, final LocalIngestRunData ingestRunData, final Map specifiedPrimaryIndexes, final Map requiredIndexMap, final VisibilityHandler visibilityHandler) throws IOException { // Create our queue. We will post GeoWaveData items to these queue until // there are no more items, at which point we will tell the workers to // complete. Ingest batch size is the total max number of items to read // from the file at a time for the worker threads to execute. final BlockingQueue> queue = createBlockingQueue(INGEST_BATCH_SIZE); // Create our Jobs. We submit as many jobs as we have executors for. // These folks will read our blocking queue LOGGER.debug( String.format( "Creating [%d] threads to ingest file: [%s]", getNumThreads(), FilenameUtils.getName(file.getPath()))); final List ingestTasks = new ArrayList<>(); try { for (int i = 0; i < getNumThreads(); i++) { final String id = String.format("%s-%d", FilenameUtils.getName(file.getPath()), i); final IngestTask task = new IngestTask( id, ingestRunData, specifiedPrimaryIndexes, requiredIndexMap, queue, this); ingestTasks.add(task); ingestExecutor.submit(task); } // Read files until EOF from the command line. try (CloseableIterator geowaveDataIt = plugin.toGeoWaveData(file, specifiedPrimaryIndexes.keySet().toArray(new String[0]))) { while (geowaveDataIt.hasNext()) { final GeoWaveData geowaveData = (GeoWaveData) geowaveDataIt.next(); try { while (!queue.offer(geowaveData, 100, TimeUnit.MILLISECONDS)) { // Determine if we have any workers left. The point // of this code is so we // aren't hanging after our workers exit (before the // file is done) due to // some un-handled exception. boolean workerAlive = false; for (final IngestTask task : ingestTasks) { if (!task.isFinished()) { workerAlive = true; break; } } // If the workers are still there, then just try to // offer again. // This will loop forever until there are no workers // left. if (workerAlive) { LOGGER.debug("Worker threads are overwhelmed, waiting 1 second"); } else { final String message = "Datastore error, all workers have terminated! Aborting..."; LOGGER.error(message); throw new RuntimeException(message); } } } catch (final InterruptedException e) { // I can't see how this will ever happen, except maybe // someone kills the process? throw new RuntimeException("Interrupted placing GeoWaveData on queue"); } } } } finally { // Terminate our ingest tasks. for (final IngestTask task : ingestTasks) { task.terminate(); } } } abstract protected int getNumThreads(); abstract protected VisibilityHandler getVisibilityHandler(); abstract protected Map> getIngestPlugins(); abstract protected DataStore getDataStore(); abstract protected Map getIndices() throws IOException; abstract protected boolean isSupported( final String providerName, final DataAdapterProvider adapterProvider); private static BlockingQueue> createBlockingQueue(final int batchSize) { return new LinkedBlockingQueue<>(batchSize); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/ingest/BaseDataStoreIngestDriver.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.ingest; import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.function.Function; import java.util.function.Predicate; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.IngestOptions; import org.locationtech.geowave.core.store.api.VisibilityHandler; import org.locationtech.geowave.core.store.api.IngestOptions.IngestCallback; import org.locationtech.geowave.core.store.api.WriteResults; import org.locationtech.geowave.core.store.api.Writer; public class BaseDataStoreIngestDriver extends AbstractLocalFileIngestDriver { private final DataStore store; private final IngestOptions ingestOptions; private final Index[] indices; public BaseDataStoreIngestDriver( final DataStore store, final IngestOptions ingestOptions, final Index... indices) { super(); this.store = store; this.indices = indices; this.ingestOptions = ingestOptions; configProperties = ingestOptions.getProperties(); } @Override protected int getNumThreads() { return ingestOptions.getThreads(); } @Override protected VisibilityHandler getVisibilityHandler() { return ingestOptions.getVisibilityHandler(); } @Override protected Map> getIngestPlugins() { if (ingestOptions.getFormat() != null) { return Collections.singletonMap("provided", ingestOptions.getFormat()); } return IngestUtils.getDefaultLocalIngestPlugins(); } @Override protected DataStore getDataStore() { return store; } public boolean runIngest(final String inputPath) { return super.runOperation(inputPath, null); } @Override protected Map getIndices() throws IOException { final Map indexMap = new HashMap<>(indices.length); for (final Index i : indices) { indexMap.put(i.getName(), i); } return indexMap; } @Override protected String[] getExtensions() { return ingestOptions.getFileExtensions(); } @Override protected boolean isSupported( final String providerName, final DataAdapterProvider adapterProvider) { return true; } @Override protected void write(final Writer writer, final GeoWaveData geowaveData) { Object obj = geowaveData.getValue(); if (ingestOptions.getTransform() != null) { obj = ((Function) ingestOptions.getTransform()).apply(obj); } if ((ingestOptions.getFilter() != null) && !((Predicate) ingestOptions.getFilter()).test(geowaveData.getValue())) { return; } final WriteResults results = writer.write(obj); if (ingestOptions.getCallback() != null) { ((IngestCallback) ingestOptions.getCallback()).dataWritten(results, obj); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/ingest/DataAdapterProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.ingest; import org.locationtech.geowave.core.store.api.DataTypeAdapter; /** * This interface is applicable for plugins that need to provide writable data adapters for ingest. * * @param the java type for the data being ingested */ public interface DataAdapterProvider { /** * Get all writable adapters used by this plugin * * @return An array of adapters that may be used by this plugin */ public DataTypeAdapter[] getDataAdapters(); /** * return a set of index types that can be indexed by this data adapter provider, used for * compatibility checking with an index provider * * @return the named dimensions that are indexable by this adapter provider */ public String[] getSupportedIndexTypes(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/ingest/GeoWaveData.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.ingest; import org.locationtech.geowave.core.store.adapter.TransientAdapterStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; /** * This models any information that is necessary to ingest an entry into GeoWave: the adapter and * index you wish to use as well as the actual data * * @param The java type for the actual data being ingested */ public class GeoWaveData { protected String typeName; private final String[] indexNames; private final T data; private transient DataTypeAdapter adapter; public GeoWaveData(final String typeName, final String[] indexNames, final T data) { this.typeName = typeName; this.indexNames = indexNames; this.data = data; } public GeoWaveData(final DataTypeAdapter adapter, final String[] indexNames, final T data) { this.adapter = adapter; this.indexNames = indexNames; this.data = data; } public String[] getIndexNames() { return indexNames; } public T getValue() { return data; } public DataTypeAdapter getAdapter() { return adapter; } public DataTypeAdapter getAdapter(final TransientAdapterStore adapterCache) { if (adapter != null) { return adapter; } return (DataTypeAdapter) adapterCache.getAdapter(typeName); } public String getTypeName() { return typeName; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/ingest/IndexProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.ingest; import org.locationtech.geowave.core.store.api.Index; public interface IndexProvider { /** * Get an array of indices that are required by this ingest implementation. This should be a * subset of supported indices. All of these indices will automatically be persisted with * GeoWave's metadata store (and in the job configuration if run as a job), whereas indices that * are just "supported" will not automatically be persisted (only if they are the primary index). * This is primarily useful if there is a supplemental index required by the ingest process that * is not the primary index. * * @return the array of indices that are supported by this ingest implementation */ public Index[] getRequiredIndices(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/ingest/IngestFormatOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.ingest; /** * This interface is strictly for implementation purposes, and doesn't actually provide any * interface. */ public interface IngestFormatOptions { } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/ingest/IngestOptionsBuilderImpl.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.ingest; import java.util.Properties; import java.util.function.Function; import java.util.function.Predicate; import org.apache.commons.lang3.ArrayUtils; import org.locationtech.geowave.core.store.api.IngestOptions; import org.locationtech.geowave.core.store.api.VisibilityHandler; import org.locationtech.geowave.core.store.api.IngestOptions.Builder; import org.locationtech.geowave.core.store.api.IngestOptions.IngestCallback; public class IngestOptionsBuilderImpl implements Builder { private LocalFileIngestPlugin format = null; private int threads = 1; private VisibilityHandler visibilityHandler = null; private String[] fileExtensions = new String[0]; private Predicate filter = null; private Function transform = null; private IngestCallback callback = null; private Properties properties = null; @Override public Builder format(final LocalFileIngestPlugin format) { this.format = format; return this; } @Override public Builder threads(final int threads) { this.threads = threads; return this; } @Override public Builder visibility(final VisibilityHandler visibilityHandler) { this.visibilityHandler = visibilityHandler; return this; } @Override public Builder extensions(final String[] fileExtensions) { this.fileExtensions = fileExtensions; return this; } @Override public Builder addExtension(final String fileExtension) { fileExtensions = ArrayUtils.add(fileExtensions, fileExtension); return this; } @Override public Builder filter(final Predicate filter) { this.filter = filter; return this; } @Override public Builder transform(final Function transform) { this.transform = transform; return this; } @Override public Builder callback(final IngestCallback callback) { this.callback = callback; return this; } @Override public Builder properties(final Properties properties) { this.properties = properties; return this; } @Override public IngestOptions build() { return new IngestOptions<>( format, threads, visibilityHandler, fileExtensions, filter, transform, callback, properties); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/ingest/IngestPluginBase.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.ingest; import java.net.URL; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.api.DataTypeAdapter; /** * An interface required for ingest plugins to implement a conversion from an expected input format * to GeoWave data which can in turn be ingested into the system. * * @param The type for the input data * @param The type that represents each data entry being ingested */ public interface IngestPluginBase extends DataAdapterProvider { /** * Get all writable adapters used by this plugin for the given URL * * @param url the URL of the data to ingest * @return An array of adapters that may be used by this plugin */ default DataTypeAdapter[] getDataAdapters(final URL url) { return getDataAdapters(); } /** * Convert from an expected input format to a data format that can be directly ingested into * GeoWave * * @param input The expected input. * @param indexNames The set of index IDs specified via a commandline argument (this is typically * either the default spatial index or default spatial-temporal index) * @return The objects that can be directly ingested into GeoWave */ CloseableIterator> toGeoWaveData(I input, String[] indexNames); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/ingest/IngestTask.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.ingest; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.BlockingQueue; import java.util.concurrent.TimeUnit; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.Writer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * An IngestTask is a thread which listens to items from a blocking queue, and writes those items to * IndexWriter objects obtained from LocalIngestRunData (where they are constructed but also cached * from the DataStore object). Read items until isTerminated == true. */ public class IngestTask implements Runnable { private static final Logger LOGGER = LoggerFactory.getLogger(IngestTask.class); private final String id; private final BlockingQueue> readQueue; private final LocalIngestRunData runData; private final Map specifiedPrimaryIndexes; private final Map requiredIndexMap; private volatile boolean isTerminated = false; private volatile boolean isFinished = false; private final Map indexWriters; private final Map adapterMappings; private final AbstractLocalFileIngestDriver localFileIngestDriver; public IngestTask( final String id, final LocalIngestRunData runData, final Map specifiedPrimaryIndexes, final Map requiredIndexMap, final BlockingQueue> queue, final AbstractLocalFileIngestDriver localFileIngestDriver) { this.id = id; this.runData = runData; this.specifiedPrimaryIndexes = specifiedPrimaryIndexes; this.requiredIndexMap = requiredIndexMap; this.localFileIngestDriver = localFileIngestDriver; readQueue = queue; indexWriters = new HashMap<>(); adapterMappings = new HashMap<>(); } /** This function is called by the thread placing items on the blocking queue. */ public void terminate() { isTerminated = true; } /** * An identifier, usually (filename)-(counter) * * @return the ID of the task */ public String getId() { return id; } /** * Whether this worker has terminated. * * @return {@code true} if the task is finished */ public boolean isFinished() { return isFinished; } /** * This function will continue to read from the BlockingQueue until isTerminated is true and the * queue is empty. */ @SuppressWarnings({"unchecked", "rawtypes"}) @Override public void run() { int count = 0; long dbWriteMs = 0L; try { LOGGER.debug(String.format("Worker executing for plugin [%s]", getId())); while (true) { final GeoWaveData geowaveData = readQueue.poll(100, TimeUnit.MILLISECONDS); if (geowaveData == null) { if (isTerminated && (readQueue.size() == 0)) { // Done! break; } // Didn't receive an item. Make sure we haven't been // terminated. LOGGER.debug(String.format("Worker waiting for item [%s]", getId())); continue; } final DataTypeAdapter adapter = runData.getDataAdapter(geowaveData); if (adapter == null) { LOGGER.warn( String.format( "Adapter not found for [%s] worker [%s]", geowaveData.getValue(), getId())); continue; } // Ingest the data! dbWriteMs += ingestData(geowaveData, adapter); count++; } } catch (final Exception e) { // This should really never happen, because we don't limit the // amount of items in the IndexWriter pool. LOGGER.error("Fatal error occured while trying to get an index writer.", e); throw new RuntimeException("Fatal error occured while trying to get an index writer.", e); } finally { // Clean up index writers for (final Entry writerEntry : indexWriters.entrySet()) { try { runData.releaseIndexWriter(writerEntry.getKey(), writerEntry.getValue()); } catch (final Exception e) { LOGGER.warn( String.format("Could not return index writer: [%s]", writerEntry.getKey()), e); } } LOGGER.debug( String.format( "Worker exited for plugin [%s]; Ingested %d items in %d seconds", getId(), count, (int) dbWriteMs / 1000)); isFinished = true; } } private long ingestData(final GeoWaveData geowaveData, final DataTypeAdapter adapter) throws Exception { final String typeName = adapter.getTypeName(); // Write the data to the data store. Writer writer = indexWriters.get(typeName); if (writer == null) { final List indices = new ArrayList<>(); for (final String indexName : geowaveData.getIndexNames()) { Index index = specifiedPrimaryIndexes.get(indexName); if (index == null) { index = requiredIndexMap.get(indexName); if (index == null) { LOGGER.warn( String.format( "Index '%s' not found for %s; worker [%s]", indexName, geowaveData.getValue(), getId())); continue; } } indices.add(index); } runData.addAdapter(adapter); // If we have the index checked out already, use that. writer = runData.getIndexWriter(typeName, indices); indexWriters.put(typeName, writer); } // Time the DB write final long hack = System.currentTimeMillis(); localFileIngestDriver.write(writer, geowaveData); final long durMs = System.currentTimeMillis() - hack; return durMs; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/ingest/IngestUrlHandlerSpi.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.ingest; import java.io.IOException; import java.nio.file.Path; import java.util.Properties; /** * This SPI interface is used to circumvent the need of core store to require HDFS or S3 libraries. * However, if libraries are on the classpath, it will handle URLs from hdfs and S3 appropriately. * * */ public interface IngestUrlHandlerSpi { public Path handlePath(String path, Properties configProperties) throws IOException; } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/ingest/IngestUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.ingest; import java.io.IOException; import java.nio.file.Path; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; import org.locationtech.geowave.core.index.SPIServiceRegistry; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.dimension.NumericDimensionField; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Lists; public class IngestUtils { private static final Logger LOGGER = LoggerFactory.getLogger(IngestUtils.class); private static List urlHandlerList = null; private static Map> localIngestPlugins = null; public static boolean checkIndexesAgainstProvider( final String providerName, final DataAdapterProvider adapterProvider, final List indices) { boolean valid = true; for (final Index index : indices) { if (!isCompatible(adapterProvider, index)) { // HP Fortify "Log Forging" false positive // What Fortify considers "user input" comes only // from users with OS-level access anyway LOGGER.warn( "Local file ingest plugin for ingest type '" + providerName + "' is not supported by index '" + index.getName() + "'"); valid = false; } } return valid; } /** * Determine whether an index is compatible with the visitor * * @param index an index that an ingest type supports * @return whether the adapter is compatible with the common index model */ public static boolean isCompatible( final DataAdapterProvider adapterProvider, final Index index) { final String[] supportedTypes = adapterProvider.getSupportedIndexTypes(); if ((supportedTypes == null) || (supportedTypes.length == 0)) { return false; } final NumericDimensionField[] requiredDimensions = index.getIndexModel().getDimensions(); for (final NumericDimensionField requiredDimension : requiredDimensions) { boolean fieldFound = false; for (final String supportedType : supportedTypes) { if (requiredDimension.getFieldName().equals(supportedType)) { fieldFound = true; break; } } if (!fieldFound) { return false; } } return true; } public static synchronized Path handleIngestUrl( final String ingestUrl, final Properties configProperties) throws IOException { if (urlHandlerList == null) { final Iterator handlers = new SPIServiceRegistry(IngestUrlHandlerSpi.class).load(IngestUrlHandlerSpi.class); urlHandlerList = Lists.newArrayList(handlers); } for (final IngestUrlHandlerSpi h : urlHandlerList) { final Path path = h.handlePath(ingestUrl, configProperties); if (path != null) { return path; } } return null; } public static synchronized Map> getDefaultLocalIngestPlugins() { if (localIngestPlugins == null) { final Iterator registries = new SPIServiceRegistry(LocalFileIngestPluginRegistrySpi.class).load( LocalFileIngestPluginRegistrySpi.class); localIngestPlugins = new HashMap<>(); while (registries.hasNext()) { localIngestPlugins.putAll(registries.next().getDefaultLocalIngestPlugins()); } } return localIngestPlugins; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/ingest/LocalFileIngestPlugin.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.ingest; import java.net.URL; /** * This is the primary plugin for directly ingesting data to GeoWave from local files. It will write * any GeoWaveData that is emitted for any supported file. * * @param The type of data to write to GeoWave */ public interface LocalFileIngestPlugin extends LocalPluginBase, IngestPluginBase, IndexProvider { } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/ingest/LocalFileIngestPluginRegistrySpi.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.ingest; import java.util.Map; public interface LocalFileIngestPluginRegistrySpi { Map> getDefaultLocalIngestPlugins(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/ingest/LocalIngestRunData.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.ingest; import java.io.Closeable; import java.io.IOException; import java.util.List; import org.apache.commons.pool2.BaseKeyedPooledObjectFactory; import org.apache.commons.pool2.KeyedObjectPool; import org.apache.commons.pool2.PooledObject; import org.apache.commons.pool2.impl.DefaultPooledObject; import org.apache.commons.pool2.impl.GenericKeyedObjectPool; import org.locationtech.geowave.core.store.adapter.TransientAdapterStore; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.VisibilityHandler; import org.locationtech.geowave.core.store.api.Writer; import org.locationtech.geowave.core.store.memory.MemoryAdapterStore; import com.clearspring.analytics.util.Lists; /** * This class maintains a pool of index writers keyed by the primary index. In addition, it contains * a static method to help create the blocking queue needed by threads to execute ingest of * individual GeoWaveData items. */ public class LocalIngestRunData implements Closeable { private static class TypeNameKeyWithIndices { private final String typeName; private final Index[] indices; public TypeNameKeyWithIndices(final String typeName, final Index[] indices) { super(); this.typeName = typeName; this.indices = indices; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((typeName == null) ? 0 : typeName.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final TypeNameKeyWithIndices other = (TypeNameKeyWithIndices) obj; if (typeName == null) { if (other.typeName != null) { return false; } } else if (!typeName.equals(other.typeName)) { return false; } return true; } } private final KeyedObjectPool indexWriterPool; private final TransientAdapterStore adapterStore; private final DataStore dataStore; private final VisibilityHandler visibilityHandler; public LocalIngestRunData( final List> adapters, final DataStore dataStore, final VisibilityHandler visibilityHandler) { this.dataStore = dataStore; this.visibilityHandler = visibilityHandler; indexWriterPool = new GenericKeyedObjectPool<>(new IndexWriterFactory()); adapterStore = new MemoryAdapterStore(adapters.toArray(new DataTypeAdapter[0])); } public DataTypeAdapter getDataAdapter(final GeoWaveData data) { return data.getAdapter(adapterStore); } public void addAdapter(final DataTypeAdapter adapter) { adapterStore.addAdapter(adapter); } /** * Return an index writer from the pool. The pool will create a new one if it doesn't exist. The * pool will not be cleaned up until the end. * * @param typeName the type being written * @param indices the indices to write to * @return the index writer * @throws Exception */ public Writer getIndexWriter(final String typeName, final List indices) throws Exception { return indexWriterPool.borrowObject( new TypeNameKeyWithIndices(typeName, indices.toArray(new Index[0]))); } /** * Return an index writer to the pool. * * @param typeName the type for the writer * @param writer the writer to return * @throws Exception */ public void releaseIndexWriter(final String typeName, final Writer writer) throws Exception { indexWriterPool.returnObject(new TypeNameKeyWithIndices(typeName, new Index[0]), writer); } @Override public void close() throws IOException { indexWriterPool.close(); } /** * A factory implementing the default Apache Commons Pool interface to return new instances of an * index writer for a given primary index. */ public class IndexWriterFactory extends BaseKeyedPooledObjectFactory { @Override public synchronized Writer create(final TypeNameKeyWithIndices adapterWithIndices) throws Exception { dataStore.addType( adapterStore.getAdapter(adapterWithIndices.typeName), visibilityHandler, Lists.newArrayList(), adapterWithIndices.indices); return dataStore.createWriter(adapterWithIndices.typeName, visibilityHandler); } @Override public void destroyObject(final TypeNameKeyWithIndices key, final PooledObject p) throws Exception { super.destroyObject(key, p); p.getObject().close(); } @Override public PooledObject wrap(final Writer writer) { return new DefaultPooledObject<>(writer); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/ingest/LocalInputCommandLineOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.ingest; import java.io.Serializable; import com.beust.jcommander.IStringConverter; import com.beust.jcommander.Parameter; /** * This class encapsulates all of the options and parsed values specific to directing the ingestion * framework to a local file system. The user must set an input file or directory and can set a list * of extensions to narrow the ingestion to. The process will recurse a directory and filter by the * extensions if provided. */ public class LocalInputCommandLineOptions implements Serializable { /** * */ private static final long serialVersionUID = 1L; @Parameter( names = {"-x", "--extension"}, description = "individual or comma-delimited set of file extensions to accept (optional)", converter = SplitConverter.class) private String[] extensions; @Parameter( names = {"-f", "--formats"}, description = "Explicitly set the ingest formats by name (or multiple comma-delimited formats), if not set all available ingest formats will be used") private String formats; public String[] getExtensions() { return extensions; } public String getFormats() { return formats; } public static class SplitConverter implements IStringConverter { @Override public String[] convert(final String value) { return value.trim().split(","); } } public void setExtensions(final String[] extensions) { this.extensions = extensions; } public void setFormats(final String formats) { this.formats = formats; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/ingest/LocalPluginBase.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.ingest; import java.net.URL; /** * This is a base interface for any plugin that reads files from a local file system. The plugin * gets an init call at the start of ingestion with the base directory, and can filter files based * on extension or act on a file-by-file basis for anything more complex. */ public interface LocalPluginBase { /** * Gets a list of file extensions that this plugin supports. If not provided, this plugin will * accept all file extensions. * * @return The array of file extensions supported ('.' is unnecessary) */ public String[] getFileExtensionFilters(); /** * Initialize the plugin and give it the base directory that is provided by the user. * * @param url The base directory provided as a command-line argument (if the argument is a file, * the base directory given will be its parent directory). */ public void init(URL url); /** * This method will be called for every file that matches the given extensions. It is an * opportunity for the plugin to perform arbitrarily complex acceptance filtering on a per file * basis, but it is important to understand performance implications if the acceptance test is too * intensive and the directory of files to recurse is large. * * @param file The file to determine if this plugin supports for ingestion * @return Whether the file is supported or not */ public boolean supportsFile(URL file); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/ingest/LocalPluginFileVisitor.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.ingest; import java.io.IOException; import java.net.URL; import java.nio.file.FileVisitResult; import java.nio.file.FileVisitor; import java.nio.file.Path; import java.nio.file.attribute.BasicFileAttributes; import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Map.Entry; import java.util.regex.Pattern; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.ArrayUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class is used by any local file driver to recurse a directory of files. It will provide the * plugin with any supported file with the appropriate extension within a directory structure. * * @param

the type of the plugin * @param the type for intermediate data that can be used throughout the life of the file * recursion */ public class LocalPluginFileVisitor

implements FileVisitor { private static final Logger LOGGER = LoggerFactory.getLogger(LocalPluginFileVisitor.class); public static class PluginVisitor

{ private final Pattern pattern; private final String typeName; private final P localPluginBase; public PluginVisitor( final P localPluginBase, final String typeName, final String[] userExtensions) { final String[] combinedExtensions = ArrayUtils.addAll(localPluginBase.getFileExtensionFilters(), userExtensions); if ((combinedExtensions != null) && (combinedExtensions.length > 0)) { final String[] lowerCaseExtensions = new String[combinedExtensions.length]; for (int i = 0; i < combinedExtensions.length; i++) { lowerCaseExtensions[i] = combinedExtensions[i].toLowerCase(Locale.ENGLISH); } final String extStr = String.format("([^\\s]+(\\.(?i)(%s))$)", StringUtils.join(lowerCaseExtensions, "|")); pattern = Pattern.compile(extStr); } else { pattern = null; } this.localPluginBase = localPluginBase; this.typeName = typeName; } public P getLocalPluginBase() { return localPluginBase; } public Pattern getPattern() { return pattern; } public String getTypeName() { return typeName; } public boolean supportsFile(final URL file) { if ((pattern != null) && !pattern.matcher(file.getFile().toLowerCase(Locale.ENGLISH)).matches()) { return false; } else if (!localPluginBase.supportsFile(file)) { return false; } return true; } } private final AbstractLocalFileDriver driver; private final List> pluginVisitors; private final R runData; public LocalPluginFileVisitor( final Map localPlugins, final AbstractLocalFileDriver driver, final R runData, final String[] userExtensions) { pluginVisitors = new ArrayList<>(localPlugins.size()); for (final Entry localPluginBase : localPlugins.entrySet()) { pluginVisitors.add( new PluginVisitor<>( localPluginBase.getValue(), localPluginBase.getKey(), userExtensions)); } this.driver = driver; this.runData = runData; } @Override public FileVisitResult postVisitDirectory(final Path path, final IOException e) throws IOException { return FileVisitResult.CONTINUE; } @Override public FileVisitResult preVisitDirectory(final Path path, final BasicFileAttributes bfa) throws IOException { return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFile(final Path path, final BasicFileAttributes bfa) throws IOException { final URL file = path.toUri().toURL(); for (final PluginVisitor

visitor : pluginVisitors) { if (visitor.supportsFile(file)) { driver.processFile(file, visitor.typeName, visitor.localPluginBase, runData); } } return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFileFailed(final Path path, final IOException bfa) throws IOException { LOGGER.error("Cannot visit path: " + path); return FileVisitResult.CONTINUE; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/memory/MemoryAdapterIndexMappingStore.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.memory; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; public class MemoryAdapterIndexMappingStore implements AdapterIndexMappingStore { private Map> indexMappings; public MemoryAdapterIndexMappingStore() { indexMappings = Collections.synchronizedMap(new HashMap>()); } @Override public AdapterToIndexMapping[] getIndicesForAdapter(short internalAdapterId) { if (indexMappings.containsKey(internalAdapterId)) { final Collection mappings = indexMappings.get(internalAdapterId).values(); return mappings.toArray(new AdapterToIndexMapping[mappings.size()]); } return null; } @Override public AdapterToIndexMapping getMapping(short adapterId, String indexName) { if (indexMappings.containsKey(adapterId)) { return indexMappings.get(adapterId).get(indexName); } return null; } @Override public void addAdapterIndexMapping(AdapterToIndexMapping mapping) { if (!indexMappings.containsKey(mapping.getAdapterId())) { indexMappings.put( mapping.getAdapterId(), Collections.synchronizedMap(new HashMap())); } indexMappings.get(mapping.getAdapterId()).put(mapping.getIndexName(), mapping); } @Override public void remove(short adapterId) { indexMappings.remove(adapterId); } @Override public boolean remove(short adapterId, String indexName) { if (indexMappings.containsKey(adapterId)) { return indexMappings.get(adapterId).remove(indexName) != null; } return false; } @Override public void removeAll() { indexMappings.clear(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/memory/MemoryAdapterStore.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.memory; import java.io.IOException; import java.io.Serializable; import java.util.Collections; import java.util.HashMap; import java.util.Map; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.adapter.TransientAdapterStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; /** * This is a simple HashMap based in-memory implementation of the AdapterStore and can be useful if * it is undesirable to persist and query objects within another storage mechanism such as an * Accumulo table. */ public class MemoryAdapterStore implements TransientAdapterStore, Serializable { /** */ private static final long serialVersionUID = 1L; private Map> adapterMap; public MemoryAdapterStore() { adapterMap = Collections.synchronizedMap(new HashMap>()); } public MemoryAdapterStore(final DataTypeAdapter[] adapters) { adapterMap = Collections.synchronizedMap(new HashMap>()); for (final DataTypeAdapter adapter : adapters) { adapterMap.put(adapter.getTypeName(), adapter); } } @Override public void addAdapter(final DataTypeAdapter adapter) { adapterMap.put(adapter.getTypeName(), adapter); } @Override public DataTypeAdapter getAdapter(final String typeName) { return adapterMap.get(typeName); } @Override public boolean adapterExists(final String typeName) { return adapterMap.containsKey(typeName); } @Override public DataTypeAdapter[] getAdapters() { return adapterMap.values().toArray(new DataTypeAdapter[adapterMap.size()]); } @Override public void removeAll() { adapterMap.clear(); } private void writeObject(final java.io.ObjectOutputStream out) throws IOException { final int count = adapterMap.size(); out.writeInt(count); for (final Map.Entry> entry : adapterMap.entrySet()) { out.writeUTF(entry.getKey()); final byte[] val = PersistenceUtils.toBinary(entry.getValue()); out.writeObject(val); } } private void readObject(final java.io.ObjectInputStream in) throws IOException, ClassNotFoundException { final int count = in.readInt(); adapterMap = Collections.synchronizedMap(new HashMap>()); for (int i = 0; i < count; i++) { final String id = in.readUTF(); final byte[] data = (byte[]) in.readObject(); adapterMap.put(id, (DataTypeAdapter) PersistenceUtils.fromBinary(data)); } } @Override public void removeAdapter(final String typeName) { adapterMap.remove(typeName); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/memory/MemoryDataStoreOperations.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.memory; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.SortedSet; import java.util.TreeSet; import java.util.UUID; import java.util.stream.Collectors; import org.apache.commons.lang3.ArrayUtils; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.ByteArrayRange; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.SinglePartitionQueryRanges; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.BaseDataStoreOptions; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.DataStoreOptions; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.data.DeferredReadCommonIndexedPersistenceEncoding; import org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset; import org.locationtech.geowave.core.store.data.PersistentDataset; import org.locationtech.geowave.core.store.data.UnreadFieldDataList; import org.locationtech.geowave.core.store.entities.GeoWaveKeyImpl; import org.locationtech.geowave.core.store.entities.GeoWaveMetadata; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.entities.GeoWaveRowImpl; import org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer; import org.locationtech.geowave.core.store.entities.GeoWaveValue; import org.locationtech.geowave.core.store.flatten.FlattenedUnreadData; import org.locationtech.geowave.core.store.metadata.AbstractGeoWavePersistence; import org.locationtech.geowave.core.store.metadata.MetadataIterators; import org.locationtech.geowave.core.store.operations.DataStoreOperations; import org.locationtech.geowave.core.store.operations.MetadataDeleter; import org.locationtech.geowave.core.store.operations.MetadataQuery; import org.locationtech.geowave.core.store.operations.MetadataReader; import org.locationtech.geowave.core.store.operations.MetadataType; import org.locationtech.geowave.core.store.operations.MetadataWriter; import org.locationtech.geowave.core.store.operations.ReaderParams; import org.locationtech.geowave.core.store.operations.RowDeleter; import org.locationtech.geowave.core.store.operations.RowReader; import org.locationtech.geowave.core.store.operations.RowWriter; import org.locationtech.geowave.core.store.util.DataStoreUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Predicate; import com.google.common.collect.Iterators; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; import com.google.common.primitives.UnsignedBytes; public class MemoryDataStoreOperations implements DataStoreOperations { private static final Logger LOGGER = LoggerFactory.getLogger(MemoryDataStoreOperations.class); private final Map> storeData = Collections.synchronizedMap(new HashMap>()); private final Map> metadataStore = Collections.synchronizedMap(new HashMap>()); private final DataStoreOptions options; public MemoryDataStoreOperations() { this(new BaseDataStoreOptions() { @Override public boolean isServerSideLibraryEnabled() { // memory datastore doesn't have a serverside option return false; } }); } public MemoryDataStoreOperations(final DataStoreOptions options) { this.options = options; } @Override public boolean indexExists(final String indexName) throws IOException { if (AbstractGeoWavePersistence.METADATA_TABLE.equals(indexName)) { return !metadataStore.isEmpty(); } return storeData.containsKey(indexName); } @Override public void deleteAll() throws Exception { storeData.clear(); metadataStore.clear(); } @Override public boolean deleteAll( final String tableName, final String typeName, final Short internalAdapterId, final String... additionalAuthorizations) { return false; } @Override public RowWriter createWriter(final Index index, final InternalDataAdapter adapter) { return new MyIndexWriter<>(index.getName()); } @Override public RowDeleter createRowDeleter( final String indexName, final PersistentAdapterStore adapterStore, final InternalAdapterStore internalAdapterStore, final String... authorizations) { return new MyIndexDeleter(indexName, authorizations); } protected SortedSet getRowsForIndex(final String id) { SortedSet set = storeData.get(id); if (set == null) { set = Collections.synchronizedSortedSet(new TreeSet()); storeData.put(id, set); } return set; } @Override public RowReader createReader(final ReaderParams readerParams) { final SortedSet internalData = storeData.get(readerParams.getIndex().getName()); int counter = 0; List retVal = new ArrayList<>(); final Collection partitionRanges = readerParams.getQueryRanges().getPartitionQueryRanges(); if ((partitionRanges == null) || partitionRanges.isEmpty()) { retVal.addAll(internalData); // remove unauthorized final Iterator it = retVal.iterator(); while (it.hasNext()) { if (!isAuthorized(it.next(), readerParams.getAdditionalAuthorizations())) { it.remove(); } } if ((readerParams.getLimit() != null) && (readerParams.getLimit() > 0) && (retVal.size() > readerParams.getLimit())) { retVal = retVal.subList(0, readerParams.getLimit()); } } else { for (final SinglePartitionQueryRanges p : partitionRanges) { for (final ByteArrayRange r : p.getSortKeyRanges()) { final SortedSet set; if (r.isSingleValue()) { set = Sets.newTreeSet( internalData.subSet( new MemoryStoreEntry(p.getPartitionKey(), r.getStart()), new MemoryStoreEntry( p.getPartitionKey(), ByteArrayUtils.getNextPrefix(r.getStart())))); } else { set = Sets.newTreeSet( internalData.tailSet( new MemoryStoreEntry(p.getPartitionKey(), r.getStart())).headSet( new MemoryStoreEntry(p.getPartitionKey(), r.getEndAsNextPrefix()))); } // remove unauthorized final Iterator it = set.iterator(); while (it.hasNext()) { final MemoryStoreEntry entry = it.next(); if (!isAuthorized(entry, readerParams.getAdditionalAuthorizations())) { it.remove(); } else if (!ArrayUtils.contains( readerParams.getAdapterIds(), entry.row.getAdapterId())) { it.remove(); } } if ((readerParams.getLimit() != null) && (readerParams.getLimit() > 0) && ((counter + set.size()) > readerParams.getLimit())) { final List subset = new ArrayList<>(set); retVal.addAll(subset.subList(0, readerParams.getLimit() - counter)); break; } else { retVal.addAll(set); counter += set.size(); if ((readerParams.getLimit() != null) && (readerParams.getLimit() > 0) && (counter >= readerParams.getLimit())) { break; } } } } } return new MyIndexReader<>( Iterators.filter(retVal.iterator(), new Predicate() { @Override public boolean apply(final MemoryStoreEntry input) { if ((readerParams.getFilter() != null) && options.isServerSideLibraryEnabled()) { final PersistentDataset commonData = new MultiFieldPersistentDataset<>(); final List unreadData = new ArrayList<>(); final List commonIndexFieldNames = DataStoreUtils.getUniqueDimensionFields(readerParams.getIndex().getIndexModel()); for (final GeoWaveValue v : input.getRow().getFieldValues()) { unreadData.add( DataStoreUtils.aggregateFieldData( input.getRow(), v, commonData, readerParams.getIndex().getIndexModel(), commonIndexFieldNames)); } return readerParams.getFilter().accept( readerParams.getIndex().getIndexModel(), new DeferredReadCommonIndexedPersistenceEncoding( input.getRow().getAdapterId(), input.getRow().getDataId(), input.getRow().getPartitionKey(), input.getRow().getSortKey(), input.getRow().getNumberOfDuplicates(), commonData, unreadData.isEmpty() ? null : new UnreadFieldDataList(unreadData))); } return true; } }), readerParams.getRowTransformer()); } private boolean isAuthorized(final MemoryStoreEntry row, final String... authorizations) { for (final GeoWaveValue value : row.getRow().getFieldValues()) { if (!MemoryStoreUtils.isAuthorized(value.getVisibility(), authorizations)) { return false; } } return true; } private static class MyIndexReader implements RowReader { private final Iterator it; public MyIndexReader( final Iterator it, final GeoWaveRowIteratorTransformer rowTransformer) { super(); this.it = rowTransformer.apply(Iterators.transform(it, e -> e.row)); } @Override public void close() {} @Override public boolean hasNext() { return it.hasNext(); } @Override public T next() { return it.next(); } } private class MyIndexWriter implements RowWriter { final String indexName; public MyIndexWriter(final String indexName) { super(); this.indexName = indexName; } @Override public void close() throws IOException {} @Override public void flush() { try { close(); } catch (final IOException e) { LOGGER.error("Error closing index writer", e); } } @Override public void write(final GeoWaveRow[] rows) { for (final GeoWaveRow r : rows) { write(r); } } @Override public void write(final GeoWaveRow row) { SortedSet rowTreeSet = storeData.get(indexName); if (rowTreeSet == null) { rowTreeSet = new TreeSet<>(); storeData.put(indexName, rowTreeSet); } if (rowTreeSet.contains(new MemoryStoreEntry(row))) { rowTreeSet.remove(new MemoryStoreEntry(row)); } if (!rowTreeSet.add(new MemoryStoreEntry(row))) { LOGGER.warn("Unable to add new entry"); } } } private class MyIndexDeleter implements RowDeleter { private final String indexName; private final String[] authorizations; public MyIndexDeleter(final String indexName, final String... authorizations) { this.indexName = indexName; this.authorizations = authorizations; } @Override public void close() {} @Override public void delete(final GeoWaveRow row) { final MemoryStoreEntry entry = new MemoryStoreEntry(row); if (isAuthorized(entry, authorizations)) { final SortedSet rowTreeSet = storeData.get(indexName); if (rowTreeSet != null) { if (!rowTreeSet.remove(entry)) { LOGGER.warn("Unable to remove entry"); } } } } @Override public void flush() { // Do nothing, delete is done immediately. } } public static class MemoryStoreEntry implements Comparable { private final GeoWaveRow row; public MemoryStoreEntry(final byte[] comparisonPartitionKey, final byte[] comparisonSortKey) { row = new GeoWaveRowImpl( new GeoWaveKeyImpl( new byte[] {0}, (short) 0, // new byte[] {}, comparisonPartitionKey, comparisonSortKey, 0), null); } public MemoryStoreEntry(final GeoWaveRow row) { this.row = row; } public GeoWaveRow getRow() { return row; } public byte[] getCompositeInsertionId() { return ((GeoWaveKeyImpl) ((GeoWaveRowImpl) row).getKey()).getCompositeInsertionId(); } @Override public int compareTo(final MemoryStoreEntry other) { final int indexIdCompare = UnsignedBytes.lexicographicalComparator().compare( getCompositeInsertionId(), other.getCompositeInsertionId()); if (indexIdCompare != 0) { return indexIdCompare; } final int dataIdCompare = UnsignedBytes.lexicographicalComparator().compare( row.getDataId(), other.getRow().getDataId()); if (dataIdCompare != 0) { return dataIdCompare; } final int adapterIdCompare = UnsignedBytes.lexicographicalComparator().compare( ByteArrayUtils.shortToByteArray(row.getAdapterId()), ByteArrayUtils.shortToByteArray(other.getRow().getAdapterId())); if (adapterIdCompare != 0) { return adapterIdCompare; } return 0; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((row == null) ? 0 : row.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final MemoryStoreEntry other = (MemoryStoreEntry) obj; if (row == null) { if (other.row != null) { return false; } } return compareTo(other) == 0; } } @Override public MetadataWriter createMetadataWriter(final MetadataType metadataType) { return new MyMetadataWriter<>(metadataType); } @Override public MetadataReader createMetadataReader(final MetadataType metadataType) { return new MyMetadataReader(metadataType); } @Override public MetadataDeleter createMetadataDeleter(final MetadataType metadataType) { return new MyMetadataDeleter(metadataType); } private class MyMetadataReader implements MetadataReader { protected final MetadataType type; public MyMetadataReader(final MetadataType type) { super(); this.type = type; } @SuppressWarnings({"rawtypes", "unchecked"}) @Override public CloseableIterator query(final MetadataQuery query) { final SortedSet typeStore = metadataStore.get(type); if (typeStore == null) { return new CloseableIterator.Empty<>(); } final SortedSet set; if (query.hasPrimaryIdRanges()) { set = new TreeSet( Arrays.stream(query.getPrimaryIdRanges()).flatMap( r -> typeStore.subSet( new MemoryMetadataEntry( new GeoWaveMetadata(r.getStart(), query.getSecondaryId(), null, null), null), new MemoryMetadataEntry( new GeoWaveMetadata( r.getEndAsNextPrefix(), getNextPrefix(query.getSecondaryId()), // this should be sufficient new byte[] { (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF}, // this should be sufficient new byte[] { (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF}), new byte[] { (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF})).stream()).collect(Collectors.toSet())); } else { set = typeStore.subSet( new MemoryMetadataEntry( new GeoWaveMetadata(query.getPrimaryId(), query.getSecondaryId(), null, null), null), new MemoryMetadataEntry( new GeoWaveMetadata( getNextPrefix(query.getPrimaryId()), getNextPrefix(query.getSecondaryId()), // this should be sufficient new byte[] { (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF}, // this should be sufficient new byte[] { (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF}), new byte[] { (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF})); } Iterator it = set.iterator(); if ((query.getAuthorizations() != null) && (query.getAuthorizations().length > 0)) { it = Iterators.filter( it, input -> MemoryStoreUtils.isAuthorized( input.getMetadata().getVisibility(), query.getAuthorizations())); } final Iterator itTransformed = Iterators.transform( it, input -> new GeoWaveMetadataWithUUID( input.metadata.getPrimaryId(), input.metadata.getSecondaryId(), input.metadata.getVisibility(), input.metadata.getValue(), input.uuidBytes)); // convert to and from array just to avoid concurrent modification // issues on the iterator that is linked back to the metadataStore // sortedSet (basically clone the iterator, so for example deletes // can occur while iterating through this query result) final CloseableIterator converted = new MemoryMetadataFilteringIterator( new CloseableIterator.Wrapper( Iterators.forArray(Iterators.toArray(itTransformed, GeoWaveMetadata.class))), query); if (type.isStatValues()) { return MetadataIterators.clientVisibilityFilter(converted, query.getAuthorizations()); } return converted; } } private static final byte[] AMPRISAND = StringUtils.stringToBinary("&"); private static byte[] combineVisibilities(final byte[] vis1, final byte[] vis2) { if ((vis1 == null) || (vis1.length == 0)) { return vis2; } if ((vis2 == null) || (vis2.length == 0)) { return vis1; } return ArrayUtils.addAll(ArrayUtils.addAll(vis1, AMPRISAND), vis2); } private static byte[] getNextPrefix(final byte[] bytes) { if (bytes == null) { // this is simply for memory data store test purposes and is just an // attempt to go to the end of the memory datastore table return new byte[] { (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF,}; } return new ByteArray(bytes).getNextPrefix(); } private class MyMetadataWriter implements MetadataWriter { private final MetadataType type; public MyMetadataWriter(final MetadataType type) { super(); this.type = type; } @Override public void close() throws IOException {} @Override public void flush() { try { close(); } catch (final IOException e) { LOGGER.error("Error closing metadata writer", e); } } @Override public void write(final GeoWaveMetadata metadata) { SortedSet typeStore = metadataStore.get(type); if (typeStore == null) { typeStore = new TreeSet<>(); metadataStore.put(type, typeStore); } if (typeStore.contains(new MemoryMetadataEntry(metadata))) { typeStore.remove(new MemoryMetadataEntry(metadata)); } if (!typeStore.add(new MemoryMetadataEntry(metadata))) { LOGGER.warn("Unable to add new metadata"); } } } private class MyMetadataDeleter extends MyMetadataReader implements MetadataDeleter { public MyMetadataDeleter(final MetadataType type) { super(type); } @Override public void close() throws Exception {} @Override public boolean delete(final MetadataQuery query) { final List toRemove = new ArrayList<>(); try (CloseableIterator it = query(query)) { while (it.hasNext()) { toRemove.add(it.next()); } } for (final GeoWaveMetadata r : toRemove) { metadataStore.get(type).remove( new MemoryMetadataEntry(r, ((GeoWaveMetadataWithUUID) r).uuidBytes)); } return true; } @Override public void flush() {} } public static class MemoryMetadataEntry implements Comparable { private final GeoWaveMetadata metadata; // this is just to allow storing duplicates in the treemap private final byte[] uuidBytes; public MemoryMetadataEntry(final GeoWaveMetadata metadata) { this(metadata, UUID.randomUUID().toString().getBytes(StringUtils.getGeoWaveCharset())); } public MemoryMetadataEntry(final GeoWaveMetadata metadata, final byte[] uuidBytes) { this.metadata = metadata; this.uuidBytes = uuidBytes; } public GeoWaveMetadata getMetadata() { return metadata; } @Override public int compareTo(final MemoryMetadataEntry other) { final Comparator lexyWithNullHandling = Ordering.from(UnsignedBytes.lexicographicalComparator()).nullsFirst(); final int primaryIdCompare = lexyWithNullHandling.compare(metadata.getPrimaryId(), other.metadata.getPrimaryId()); if (primaryIdCompare != 0) { return primaryIdCompare; } final int secondaryIdCompare = lexyWithNullHandling.compare(metadata.getSecondaryId(), other.metadata.getSecondaryId()); if (secondaryIdCompare != 0) { return secondaryIdCompare; } final int visibilityCompare = lexyWithNullHandling.compare(metadata.getVisibility(), other.metadata.getVisibility()); if (visibilityCompare != 0) { return visibilityCompare; } // this is just to allow storing duplicates in the treemap return lexyWithNullHandling.compare(uuidBytes, other.uuidBytes); } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((metadata == null) ? 0 : metadata.hashCode()); result = (prime * result) + Arrays.hashCode(uuidBytes); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final MemoryMetadataEntry other = (MemoryMetadataEntry) obj; if (metadata == null) { if (other.metadata != null) { return false; } } return compareTo(other) == 0; } } @Override public boolean metadataExists(final MetadataType type) throws IOException { return metadataStore.containsKey(type); } public static class GeoWaveMetadataWithUUID extends GeoWaveMetadata { byte[] uuidBytes; public GeoWaveMetadataWithUUID( final byte[] primaryId, final byte[] secondaryId, final byte[] visibility, final byte[] value, final byte[] uuidBytes) { super(primaryId, secondaryId, visibility, value); this.uuidBytes = uuidBytes; } @Override public boolean equals(final Object o) { if (this == o) { return true; } if ((o == null) || (getClass() != o.getClass())) { return false; } if (!super.equals(o)) { return false; } final GeoWaveMetadataWithUUID that = (GeoWaveMetadataWithUUID) o; return Arrays.equals(uuidBytes, that.uuidBytes); } @Override public int hashCode() { int result = super.hashCode(); result = (31 * result) + Arrays.hashCode(uuidBytes); return result; } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/memory/MemoryFactoryHelper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.memory; import java.util.HashMap; import java.util.Map; import org.locationtech.geowave.core.store.StoreFactoryHelper; import org.locationtech.geowave.core.store.StoreFactoryOptions; import org.locationtech.geowave.core.store.operations.DataStoreOperations; public class MemoryFactoryHelper implements StoreFactoryHelper { // this operations cache is essential to re-using the same objects in memory private static final Map OPERATIONS_CACHE = new HashMap<>(); /** * Return the default options instance. This is actually a method that should be implemented by * the individual factories, but is placed here since it's the same. * * @return the default options instance */ @Override public StoreFactoryOptions createOptionsInstance() { return new MemoryRequiredOptions(); } @Override public DataStoreOperations createOperations(final StoreFactoryOptions options) { synchronized (OPERATIONS_CACHE) { DataStoreOperations operations = OPERATIONS_CACHE.get(options.getGeoWaveNamespace()); if (operations == null) { operations = new MemoryDataStoreOperations(options.getStoreOptions()); OPERATIONS_CACHE.put(options.getGeoWaveNamespace(), operations); } return operations; } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/memory/MemoryIndexStore.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.memory; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.Map; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.index.IndexStore; /** * This is a simple HashMap based in-memory implementation of the IndexStore and can be useful if it * is undesirable to persist and query objects within another storage mechanism such as an accumulo * table. */ public class MemoryIndexStore implements IndexStore { private final Map indexMap = Collections.synchronizedMap(new HashMap()); public MemoryIndexStore() {} public MemoryIndexStore(final Index[] initialIndices) { for (final Index index : initialIndices) { addIndex(index); } } @Override public void addIndex(final Index index) { indexMap.put(index.getName(), index); } @Override public Index getIndex(final String indexName) { return indexMap.get(indexName); } @Override public boolean indexExists(final String indexName) { return indexMap.containsKey(indexName); } @Override public CloseableIterator getIndices() { return new CloseableIterator.Wrapper<>(new ArrayList<>(indexMap.values()).iterator()); } @Override public void removeAll() { indexMap.clear(); } @Override public void removeIndex(final String indexName) { if (indexExists(indexName)) { indexMap.remove(indexName); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/memory/MemoryMetadataFilteringIterator.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.memory; import java.util.Arrays; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.entities.GeoWaveMetadata; import org.locationtech.geowave.core.store.operations.MetadataQuery; /** * Filters memory metadata based on the given metadata query. */ public class MemoryMetadataFilteringIterator implements CloseableIterator { private final CloseableIterator source; private final MetadataQuery query; private final boolean hasSecondaryId; private GeoWaveMetadata next = null; public MemoryMetadataFilteringIterator( final CloseableIterator source, final MetadataQuery query) { this.source = source; this.query = query; this.hasSecondaryId = query.getSecondaryId() != null; } private boolean secondaryIdMatches(final GeoWaveMetadata metadata) { return !hasSecondaryId || Arrays.equals(metadata.getSecondaryId(), query.getSecondaryId()); } private boolean passesExactFilter(final GeoWaveMetadata metadata) { return (!query.hasPrimaryId() || Arrays.equals(metadata.getPrimaryId(), query.getPrimaryId())) && secondaryIdMatches(metadata); } private boolean passesPrefixFilter(final GeoWaveMetadata metadata) { return (!query.hasPrimaryId() || ByteArrayUtils.startsWith(metadata.getPrimaryId(), query.getPrimaryId())) && secondaryIdMatches(metadata); } private void computeNext() { while (next == null && source.hasNext()) { GeoWaveMetadata possibleNext = source.next(); if (query.isPrefix()) { if (passesPrefixFilter(possibleNext)) { next = possibleNext; } } else if (passesExactFilter(possibleNext)) { next = possibleNext; } } } @Override public boolean hasNext() { if (next == null) { computeNext(); } return next != null; } @Override public GeoWaveMetadata next() { if (next == null) { computeNext(); } GeoWaveMetadata retVal = next; next = null; return retVal; } @Override public void close() { source.close(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/memory/MemoryPersistentAdapterStore.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.memory; import java.io.IOException; import java.io.Serializable; import java.util.Collections; import java.util.HashMap; import java.util.Map; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; /** * This is a simple HashMap based in-memory implementation of the PersistentAdapterStore and can be * useful if it is undesirable to persist and query objects within another storage mechanism such as * an Accumulo table. */ public class MemoryPersistentAdapterStore implements PersistentAdapterStore, Serializable { /** */ private static final long serialVersionUID = 1L; private Map> adapterMap; public MemoryPersistentAdapterStore() { adapterMap = Collections.synchronizedMap(new HashMap>()); } public MemoryPersistentAdapterStore(final InternalDataAdapter[] adapters) { adapterMap = Collections.synchronizedMap(new HashMap>()); for (final InternalDataAdapter adapter : adapters) { adapterMap.put(adapter.getAdapterId(), adapter); } } @Override public void addAdapter(final InternalDataAdapter InternalDataadapter) { adapterMap.put(InternalDataadapter.getAdapterId(), InternalDataadapter); } @Override public InternalDataAdapter getAdapter(final Short internalAdapterId) { return adapterMap.get(internalAdapterId); } @Override public boolean adapterExists(final Short internalAdapterId) { return adapterMap.containsKey(internalAdapterId); } @Override public InternalDataAdapter[] getAdapters() { return adapterMap.values().toArray(new InternalDataAdapter[adapterMap.size()]); } @Override public void removeAll() { adapterMap.clear(); } private void writeObject(final java.io.ObjectOutputStream out) throws IOException { final int count = adapterMap.size(); out.writeInt(count); for (final Map.Entry> entry : adapterMap.entrySet()) { out.writeObject(entry.getKey()); final byte[] val = PersistenceUtils.toBinary(entry.getValue()); out.writeObject(val); } } private void readObject(final java.io.ObjectInputStream in) throws IOException, ClassNotFoundException { final int count = in.readInt(); adapterMap = Collections.synchronizedMap(new HashMap>()); for (int i = 0; i < count; i++) { final Short id = (Short) in.readObject(); final byte[] data = (byte[]) in.readObject(); adapterMap.put(id, (InternalDataAdapter) PersistenceUtils.fromBinary(data)); } } @Override public void removeAdapter(final Short adapterId) {} } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/memory/MemoryRequiredOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.memory; import org.locationtech.geowave.core.store.BaseDataStoreOptions; import org.locationtech.geowave.core.store.DataStoreOptions; import org.locationtech.geowave.core.store.StoreFactoryFamilySpi; import org.locationtech.geowave.core.store.StoreFactoryOptions; /** No additional options for memory. */ public class MemoryRequiredOptions extends StoreFactoryOptions { private final DataStoreOptions options = new BaseDataStoreOptions() { @Override public boolean isServerSideLibraryEnabled() { // memory datastore doesn't have a serverside option return false; } }; @Override public StoreFactoryFamilySpi getStoreFactory() { return new MemoryStoreFactoryFamily(); } @Override public DataStoreOptions getStoreOptions() { return options; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/memory/MemoryStoreFactoryFamily.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.memory; import org.locationtech.geowave.core.store.BaseDataStoreFamily; import org.locationtech.geowave.core.store.StoreFactoryFamilySpi; public class MemoryStoreFactoryFamily extends BaseDataStoreFamily implements StoreFactoryFamilySpi { private static final String TYPE = "memory"; private static final String DESCRIPTION = "A GeoWave store that is in memory typically only used for test purposes"; public MemoryStoreFactoryFamily() { super(TYPE, DESCRIPTION, new MemoryFactoryHelper()); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/memory/MemoryStoreUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.memory; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class MemoryStoreUtils { private static final Logger LOGGER = LoggerFactory.getLogger(MemoryStoreUtils.class); protected static boolean isAuthorized(final byte[] visibility, final String[] authorizations) { if ((visibility == null) || (visibility.length == 0)) { return true; } VisibilityExpression expr; try { expr = new VisibilityExpressionParser().parse(visibility); } catch (final IOException e) { LOGGER.error("invalid visibility", e); return false; } return expr.ok(authorizations); } private abstract static class VisibilityExpression { public abstract boolean ok(String[] auths); public VisibilityExpression and() { final AndExpression exp = new AndExpression(); exp.add(this); return exp; } public VisibilityExpression or() { final OrExpression exp = new OrExpression(); exp.add(this); return exp; } public abstract List children(); public abstract VisibilityExpression add(VisibilityExpression expression); } public static enum NodeType { TERM, OR, AND, } private static class VisibilityExpressionParser { private int index = 0; private int parens = 0; public VisibilityExpressionParser() {} VisibilityExpression parse(final byte[] expression) throws IOException { if (expression.length > 0) { final VisibilityExpression expr = parse_(expression); if (expr == null) { badArgumentException("operator or missing parens", expression, index - 1); } if (parens != 0) { badArgumentException("parenthesis mis-match", expression, index - 1); } return expr; } return null; } VisibilityExpression processTerm( final int start, final int end, final VisibilityExpression expr, final byte[] expression) throws UnsupportedEncodingException { if (start != end) { if (expr != null) { badArgumentException("expression needs | or &", expression, start); } return new ChildExpression(new String(Arrays.copyOfRange(expression, start, end), "UTF-8")); } if (expr == null) { badArgumentException("empty term", Arrays.copyOfRange(expression, start, end), start); } return expr; } VisibilityExpression parse_(final byte[] expression) throws IOException { VisibilityExpression result = null; VisibilityExpression expr = null; int termStart = index; while (index < expression.length) { switch (expression[index++]) { case '&': { expr = processTerm(termStart, index - 1, expr, expression); if (result != null) { if (!(result instanceof AndExpression)) { badArgumentException("cannot mix & and |", expression, index - 1); } } else { result = new AndExpression(); } result.add(expr); expr = null; termStart = index; break; } case '|': { expr = processTerm(termStart, index - 1, expr, expression); if (result != null) { if (!(result instanceof OrExpression)) { badArgumentException("cannot mix | and &", expression, index - 1); } } else { result = new OrExpression(); } result.add(expr); expr = null; termStart = index; break; } case '(': { parens++; if ((termStart != (index - 1)) || (expr != null)) { badArgumentException("expression needs & or |", expression, index - 1); } expr = parse_(expression); termStart = index; break; } case ')': { parens--; final VisibilityExpression child = processTerm(termStart, index - 1, expr, expression); if ((child == null) && (result == null)) { badArgumentException("empty expression not allowed", expression, index); } if (result == null) { return child; } result.add(child); return result; } } } final VisibilityExpression child = processTerm(termStart, index, expr, expression); if (result != null) { result.add(child); } else { result = child; } if (!(result instanceof ChildExpression)) { if (result.children().size() < 2) { badArgumentException("missing term", expression, index); } } return result; } } public abstract static class CompositeExpression extends VisibilityExpression { protected final List expressions = new ArrayList<>(); @Override public VisibilityExpression add(final VisibilityExpression expression) { if (expression.getClass().equals(this.getClass())) { for (final VisibilityExpression child : expression.children()) { add(child); } } else { expressions.add(expression); } return this; } } public static class ChildExpression extends VisibilityExpression { private final String value; public ChildExpression(final String value) { super(); this.value = value; } @Override public boolean ok(final String[] auths) { if (auths != null) { for (final String auth : auths) { if (value.equals(auth)) { return true; } } } return false; } @Override public List children() { return Collections.emptyList(); } @Override public VisibilityExpression add(final VisibilityExpression expression) { return this; } } public static class AndExpression extends CompositeExpression { @Override public List children() { return expressions; } @Override public boolean ok(final String[] auth) { for (final VisibilityExpression expression : expressions) { if (!expression.ok(auth)) { return false; } } return true; } public VisibilityExpression and(final VisibilityExpression expression) { return this; } } public static class OrExpression extends CompositeExpression { @Override public boolean ok(final String[] auths) { for (final VisibilityExpression expression : expressions) { if (expression.ok(auths)) { return true; } } return false; } @Override public List children() { return expressions; } public VisibilityExpression or(final VisibilityExpression expression) { return this; } } private static final void badArgumentException( final String msg, final byte[] expression, final int place) { throw new IllegalArgumentException( msg + " for " + Arrays.toString(expression) + " at " + place); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/metadata/AbstractGeoWavePersistence.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.metadata; import java.io.IOException; import java.nio.charset.Charset; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.DataStoreOptions; import org.locationtech.geowave.core.store.entities.GeoWaveMetadata; import org.locationtech.geowave.core.store.operations.DataStoreOperations; import org.locationtech.geowave.core.store.operations.MetadataDeleter; import org.locationtech.geowave.core.store.operations.MetadataQuery; import org.locationtech.geowave.core.store.operations.MetadataReader; import org.locationtech.geowave.core.store.operations.MetadataType; import org.locationtech.geowave.core.store.operations.MetadataWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.github.benmanes.caffeine.cache.Cache; import com.github.benmanes.caffeine.cache.Caffeine; /** * This abstract class does most of the work for storing persistable objects in Geowave datastores * and can be easily extended for any object that needs to be persisted. * *

There is an LRU cache associated with it so staying in sync with external updates is not * practical - it assumes the objects are not updated often or at all. The objects are stored in * their own table. * * @param The type of persistable object that this stores */ public abstract class AbstractGeoWavePersistence { private static final Logger LOGGER = LoggerFactory.getLogger(AbstractGeoWavePersistence.class); // TODO: should we concern ourselves with multiple distributed processes // updating and looking up objects simultaneously that would require some // locking/synchronization mechanism, and even possibly update // notifications? protected static final int MAX_ENTRIES = 1000; public static final String METADATA_TABLE = "GEOWAVE_METADATA"; protected final DataStoreOperations operations; protected final DataStoreOptions options; protected final MetadataType type; @SuppressWarnings("rawtypes") protected Cache cache; public AbstractGeoWavePersistence( final DataStoreOperations operations, final DataStoreOptions options, final MetadataType type) { this.operations = operations; this.options = options; this.type = type; buildCache(); } protected void buildCache() { final Caffeine cacheBuilder = Caffeine.newBuilder().maximumSize(MAX_ENTRIES); this.cache = cacheBuilder.build(); } protected MetadataType getType() { return type; } protected ByteArray getSecondaryId(final T persistedObject) { // this is the default implementation, if the persistence store requires // secondary indices, it needs to override this method return null; } protected abstract ByteArray getPrimaryId(final T persistedObject); public void removeAll() { deleteObject(null, null); cache.invalidateAll(); } protected ByteArray getCombinedId(final ByteArray primaryId, final ByteArray secondaryId) { // the secondaryId is optional so check for null or empty if ((secondaryId != null) && (secondaryId.getBytes() != null) && (secondaryId.getBytes().length > 0)) { return new ByteArray(primaryId.getString() + "_" + secondaryId.getString()); } return primaryId; } @SuppressWarnings("unchecked") protected void addObjectToCache( final ByteArray primaryId, final ByteArray secondaryId, final T object, final String... authorizations) { final ByteArray combinedId = getCombinedId(primaryId, secondaryId); cache.put(combinedId, object); } protected Object getObjectFromCache( final ByteArray primaryId, final ByteArray secondaryId, final String... authorizations) { final ByteArray combinedId = getCombinedId(primaryId, secondaryId); return cache.getIfPresent(combinedId); } protected boolean deleteObjectFromCache(final ByteArray primaryId, final ByteArray secondaryId) { final ByteArray combinedId = getCombinedId(primaryId, secondaryId); if (combinedId != null) { final boolean present = cache.getIfPresent(combinedId) != null; if (present) { cache.invalidate(combinedId); } return present; } return false; } public void remove(final ByteArray adapterId) { deleteObject(adapterId, null); } protected boolean deleteObject( final ByteArray primaryId, final ByteArray secondaryId, final String... authorizations) { if (deleteObjects(primaryId, secondaryId, authorizations)) { deleteObjectFromCache(primaryId, secondaryId); return true; } return false; } protected void addObject(final T object) { addObject(getPrimaryId(object), getSecondaryId(object), object); } protected byte[] getVisibility(final T entry) { return null; } protected byte[] toBytes(final String s) { if (s == null) { return null; } return s.getBytes(Charset.forName("UTF-8")); } protected void addObject(final ByteArray id, final ByteArray secondaryId, final T object) { addObjectToCache(id, secondaryId, object); try (final MetadataWriter writer = operations.createMetadataWriter(getType())) { if (writer != null) { final GeoWaveMetadata metadata = new GeoWaveMetadata( id.getBytes(), secondaryId != null ? secondaryId.getBytes() : null, getVisibility(object), getValue(object)); writer.write(metadata); } } catch (final Exception e) { LOGGER.warn("Unable to close metadata writer", e); e.printStackTrace(); } } protected byte[] getValue(final T object) { final byte[] value = PersistenceUtils.toBinary(object); if ((object != null) && ((value == null) || (value.length == 0))) { throw new UnsupportedOperationException( "Object of class " + object.getClass().getName() + " was not found in the persistable registry and cannot be persisted!"); } return value; } protected CloseableIterator getAllObjectsWithSecondaryId( final ByteArray secondaryId, final String... authorizations) { return internalGetObjects(new MetadataQuery(secondaryId.getBytes(), authorizations)); } protected T getObject( final ByteArray primaryId, final ByteArray secondaryId, final String... authorizations) { return internalGetObject(primaryId, secondaryId, true, authorizations); } @SuppressWarnings("unchecked") protected T internalGetObject( final ByteArray primaryId, final ByteArray secondaryId, final boolean warnIfNotExists, final String... authorizations) { final Object cacheResult = getObjectFromCache(primaryId, secondaryId, authorizations); if (cacheResult != null) { return (T) cacheResult; } try { if (!operations.metadataExists(getType())) { if (warnIfNotExists) { LOGGER.warn( "Object '" + getCombinedId(primaryId, secondaryId).getString() + "' not found. '" + METADATA_TABLE + "' table does not exist"); } return null; } } catch (final IOException e1) { if (warnIfNotExists) { LOGGER.error("Unable to check for existence of metadata to get object", e1); } return null; } final MetadataReader reader = operations.createMetadataReader(getType()); try (final CloseableIterator it = reader.query( new MetadataQuery( primaryId.getBytes(), secondaryId == null ? null : secondaryId.getBytes(), authorizations))) { if (!it.hasNext()) { if (warnIfNotExists) { LOGGER.warn( "Object '" + getCombinedId(primaryId, secondaryId).getString() + "' not found"); } return null; } final GeoWaveMetadata entry = it.next(); return entryToValue(entry, authorizations); } } protected boolean objectExists( final ByteArray primaryId, final ByteArray secondaryId, final String... authorizations) { return internalGetObject(primaryId, secondaryId, false, authorizations) != null; } protected CloseableIterator getObjects(final String... authorizations) { return internalGetObjects(new MetadataQuery(null, authorizations)); } protected CloseableIterator internalGetObjects(final MetadataQuery query) { try { if (!operations.metadataExists(getType())) { return new CloseableIterator.Empty<>(); } } catch (final IOException e1) { LOGGER.error("Unable to check for existence of metadata to get objects", e1); return new CloseableIterator.Empty<>(); } final MetadataReader reader = operations.createMetadataReader(getType()); final CloseableIterator it = reader.query(query); return new NativeIteratorWrapper(it, query.getAuthorizations()); } @SuppressWarnings("unchecked") protected T fromValue(final GeoWaveMetadata entry) { return (T) PersistenceUtils.fromBinary(entry.getValue()); } protected T entryToValue(final GeoWaveMetadata entry, final String... authorizations) { final T result = fromValue(entry); if (result != null) { addObjectToCache( new ByteArray(entry.getPrimaryId()), (entry.getSecondaryId() == null) || (entry.getSecondaryId().length == 0) ? null : new ByteArray(entry.getSecondaryId()), result, authorizations); } return result; } public boolean deleteObjects(final ByteArray secondaryId, final String... authorizations) { return deleteObjects(null, secondaryId, authorizations); } public boolean deleteObjects( final ByteArray primaryId, final ByteArray secondaryId, final String... authorizations) { return deleteObjects(primaryId, secondaryId, operations, getType(), this, authorizations); } protected static boolean deleteObjects( final ByteArray primaryId, final ByteArray secondaryId, final DataStoreOperations operations, final MetadataType type, final AbstractGeoWavePersistence cacheDeleter, final String... authorizations) { return deleteObjects( primaryId, secondaryId, false, operations, type, cacheDeleter, authorizations); } protected static boolean deleteObjects( final ByteArray primaryId, final ByteArray secondaryId, final boolean primaryIdPrefix, final DataStoreOperations operations, final MetadataType type, final AbstractGeoWavePersistence cacheDeleter, final String... authorizations) { try { if (!operations.metadataExists(type)) { return false; } } catch (final IOException e1) { LOGGER.error("Unable to check for existence of metadata to delete objects", e1); return false; } try (final MetadataDeleter deleter = operations.createMetadataDeleter(type)) { if ((primaryId == null) && (secondaryId == null)) { // this is trying to delete everything, let's clear the cache (although there's an // off-chance authorizations might not force the entire stats to be cleared, the cache is // merely a performance optimization) if (cacheDeleter != null) { cacheDeleter.cache.invalidateAll(); } return deleter.delete(new MetadataQuery((byte[]) null, (byte[]) null, authorizations)); } boolean retVal = false; final MetadataReader reader = operations.createMetadataReader(type); try (final CloseableIterator it = reader.query( new MetadataQuery( primaryId != null ? primaryId.getBytes() : null, secondaryId != null ? secondaryId.getBytes() : null, primaryIdPrefix, authorizations))) { while (it.hasNext()) { retVal = true; final GeoWaveMetadata entry = it.next(); if (cacheDeleter != null) { cacheDeleter.deleteObjectFromCache( new ByteArray(entry.getPrimaryId()), new ByteArray(entry.getSecondaryId())); } deleter.delete( new MetadataQuery(entry.getPrimaryId(), entry.getSecondaryId(), authorizations)); } } return retVal; } catch (final Exception e) { LOGGER.error("Unable to delete objects", e); } return false; } private class NativeIteratorWrapper implements CloseableIterator { private final CloseableIterator it; private final String[] authorizations; private NativeIteratorWrapper( final CloseableIterator it, final String[] authorizations) { this.it = it; this.authorizations = authorizations; } @Override public boolean hasNext() { return it.hasNext(); } @Override public T next() { return entryToValue(it.next(), authorizations); } @Override public void remove() { it.remove(); } @Override public void close() { it.close(); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/metadata/AdapterIndexMappingStoreFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.metadata; import org.locationtech.geowave.core.store.BaseStoreFactory; import org.locationtech.geowave.core.store.StoreFactoryHelper; import org.locationtech.geowave.core.store.StoreFactoryOptions; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; public class AdapterIndexMappingStoreFactory extends BaseStoreFactory { public AdapterIndexMappingStoreFactory( final String typeName, final String description, final StoreFactoryHelper helper) { super(typeName, description, helper); } @Override public AdapterIndexMappingStore createStore(final StoreFactoryOptions options) { return new AdapterIndexMappingStoreImpl( helper.createOperations(options), options.getStoreOptions()); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/metadata/AdapterIndexMappingStoreImpl.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.metadata; import java.util.Set; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.DataStoreOptions; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils; import org.locationtech.geowave.core.store.operations.DataStoreOperations; import org.locationtech.geowave.core.store.operations.MetadataQuery; import org.locationtech.geowave.core.store.operations.MetadataType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.internal.Sets; import com.google.common.collect.Lists; /** * This class will persist Adapter Index Mappings within a table for GeoWave metadata. The mappings * will be persisted in an "INDEX_MAPPINGS" metadata table. * *

There is an LRU cache associated with it so staying in sync with external updates is not * practical - it assumes the objects are not updated often or at all. The objects are stored in * their own table. * *

Objects are maintained with regard to visibility. The assumption is that a mapping between an * adapter and indexing is consistent across all visibility constraints. */ public class AdapterIndexMappingStoreImpl extends AbstractGeoWavePersistence implements AdapterIndexMappingStore { private static final Logger LOGGER = LoggerFactory.getLogger(AdapterIndexMappingStoreImpl.class); public AdapterIndexMappingStoreImpl( final DataStoreOperations operations, final DataStoreOptions options) { super(operations, options, MetadataType.INDEX_MAPPINGS); } public boolean mappingExists(final AdapterToIndexMapping persistedObject) { return objectExists(getPrimaryId(persistedObject), getSecondaryId(persistedObject)); } @Override protected ByteArray getPrimaryId(final AdapterToIndexMapping persistedObject) { return new ByteArray(ByteArrayUtils.shortToByteArray(persistedObject.getAdapterId())); } @Override protected ByteArray getSecondaryId(final AdapterToIndexMapping persistedObject) { return new ByteArray(StringUtils.stringToBinary(persistedObject.getIndexName())); } @Override public AdapterToIndexMapping[] getIndicesForAdapter(final short adapterId) { final Set indexMappings = Sets.newHashSet(); try (CloseableIterator iter = super.internalGetObjects( new MetadataQuery(ByteArrayUtils.shortToByteArray(adapterId), null, false))) { while (iter.hasNext()) { indexMappings.add(iter.next()); } } return indexMappings.toArray(new AdapterToIndexMapping[indexMappings.size()]); } @Override public AdapterToIndexMapping getMapping(final short adapterId, final String indexName) { if (indexName.equals(DataIndexUtils.DATA_ID_INDEX.getName())) { return new AdapterToIndexMapping(adapterId, indexName, Lists.newArrayList()); } final ByteArray primaryId = new ByteArray(ByteArrayUtils.shortToByteArray(adapterId)); final ByteArray secondaryId = new ByteArray(StringUtils.stringToBinary(indexName)); return super.getObject(primaryId, secondaryId); } @Override public void addAdapterIndexMapping(final AdapterToIndexMapping mapping) { final ByteArray primaryId = getPrimaryId(mapping); final ByteArray secondaryId = getSecondaryId(mapping); if (objectExists(primaryId, secondaryId)) { LOGGER.info("Adapter to index mapping already existed, skipping add."); } else { addObject(mapping); } } @Override public void remove(final short internalAdapterId) { super.remove(new ByteArray(ByteArrayUtils.shortToByteArray(internalAdapterId))); } @Override public boolean remove(final short internalAdapterId, final String indexName) { final ByteArray adapterId = new ByteArray(ByteArrayUtils.shortToByteArray(internalAdapterId)); final ByteArray secondaryId = new ByteArray(StringUtils.stringToBinary(indexName)); if (!objectExists(adapterId, secondaryId)) { return false; } return super.deleteObject(adapterId, secondaryId); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/metadata/AdapterStoreFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.metadata; import org.locationtech.geowave.core.store.BaseStoreFactory; import org.locationtech.geowave.core.store.StoreFactoryHelper; import org.locationtech.geowave.core.store.StoreFactoryOptions; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; public class AdapterStoreFactory extends BaseStoreFactory { public AdapterStoreFactory( final String typeName, final String description, final StoreFactoryHelper helper) { super(typeName, description, helper); } @Override public PersistentAdapterStore createStore(final StoreFactoryOptions options) { return new AdapterStoreImpl(helper.createOperations(options), options.getStoreOptions()); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/metadata/AdapterStoreImpl.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.metadata; import java.util.HashMap; import java.util.Map; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.DataStoreOptions; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.entities.GeoWaveMetadata; import org.locationtech.geowave.core.store.operations.DataStoreOperations; import org.locationtech.geowave.core.store.operations.MetadataType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class will persist Data Adapters within an Accumulo table for GeoWave metadata. The adapters * will be persisted in an "ADAPTER" column family. * *

There is an LRU cache associated with it so staying in sync with external updates is not * practical - it assumes the objects are not updated often or at all. The objects are stored in * their own table. */ public class AdapterStoreImpl extends AbstractGeoWavePersistence> implements PersistentAdapterStore { private static final Logger LOGGER = LoggerFactory.getLogger(AdapterStoreImpl.class); public AdapterStoreImpl(final DataStoreOperations operations, final DataStoreOptions options) { super(operations, options, MetadataType.ADAPTER); } @Override public void addAdapter(final InternalDataAdapter adapter) { addObject(adapter); } @Override public InternalDataAdapter getAdapter(final Short internalAdapterId) { if (internalAdapterId == null) { LOGGER.warn("Cannot get adapter for null internal ID"); return null; } return getObject(new ByteArray(ByteArrayUtils.shortToByteArray(internalAdapterId)), null); } @Override protected InternalDataAdapter fromValue(final GeoWaveMetadata entry) { final DataTypeAdapter adapter = (DataTypeAdapter) PersistenceUtils.fromBinary(entry.getValue()); if (adapter instanceof InternalDataAdapter) { return (InternalDataAdapter) adapter; } return adapter.asInternalAdapter(ByteArrayUtils.byteArrayToShort(entry.getPrimaryId())); } @Override protected byte[] getValue(final InternalDataAdapter object) { return PersistenceUtils.toBinary(object); } @Override public boolean adapterExists(final Short internalAdapterId) { if (internalAdapterId == null) { LOGGER.warn("Cannot check existence of adapter for null internal ID"); return false; } return objectExists(new ByteArray(ByteArrayUtils.shortToByteArray(internalAdapterId)), null); } @Override protected ByteArray getPrimaryId(final InternalDataAdapter persistedObject) { return new ByteArray(ByteArrayUtils.shortToByteArray(persistedObject.getAdapterId())); } @Override public InternalDataAdapter[] getAdapters() { // use a map with the adapter ID as key to ensure only one adapter per unique ID final Map> adapters = new HashMap<>(); try (CloseableIterator> iter = getObjects()) { while (iter.hasNext()) { final InternalDataAdapter adapter = iter.next(); adapters.put(adapter.getAdapterId(), adapter); } } return adapters.values().toArray(new InternalDataAdapter[adapters.size()]); } @Override public void removeAdapter(final Short internalAdapterId) { if (internalAdapterId == null) { LOGGER.warn("Cannot remove adapter for null internal ID"); return; } remove(new ByteArray(ByteArrayUtils.shortToByteArray(internalAdapterId))); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/metadata/DataStatisticsStoreFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.metadata; import org.locationtech.geowave.core.store.BaseStoreFactory; import org.locationtech.geowave.core.store.StoreFactoryHelper; import org.locationtech.geowave.core.store.StoreFactoryOptions; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; public class DataStatisticsStoreFactory extends BaseStoreFactory { public DataStatisticsStoreFactory( final String typeName, final String description, final StoreFactoryHelper helper) { super(typeName, description, helper); } @Override public DataStatisticsStore createStore(final StoreFactoryOptions options) { return new DataStatisticsStoreImpl(helper.createOperations(options), options.getStoreOptions()); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/metadata/DataStatisticsStoreImpl.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.metadata; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import javax.annotation.Nullable; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.ByteArrayRange; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.DataStoreOptions; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.api.BinConstraints.ByteArrayConstraints; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.DataTypeStatistic; import org.locationtech.geowave.core.store.api.FieldStatistic; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.IndexStatistic; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.operations.DataStoreOperations; import org.locationtech.geowave.core.store.operations.MetadataDeleter; import org.locationtech.geowave.core.store.operations.MetadataQuery; import org.locationtech.geowave.core.store.operations.MetadataType; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.locationtech.geowave.core.store.statistics.StatisticId; import org.locationtech.geowave.core.store.statistics.StatisticType; import org.locationtech.geowave.core.store.statistics.StatisticUpdateCallback; import org.locationtech.geowave.core.store.statistics.StatisticValueReader; import org.locationtech.geowave.core.store.statistics.StatisticValueWriter; import org.locationtech.geowave.core.store.statistics.StatisticsValueIterator; import org.locationtech.geowave.core.store.statistics.binning.CompositeBinningStrategy; import org.locationtech.geowave.core.store.statistics.binning.DataTypeBinningStrategy; import org.locationtech.geowave.core.store.statistics.field.FieldStatisticId; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; public class DataStatisticsStoreImpl extends AbstractGeoWavePersistence>> implements DataStatisticsStore { private static final Logger LOGGER = LoggerFactory.getLogger(DataStatisticsStoreImpl.class); // this is fairly arbitrary at the moment because it is the only custom // server op added public static final int STATS_COMBINER_PRIORITY = 10; public static final String STATISTICS_COMBINER_NAME = "STATS_COMBINER"; public DataStatisticsStoreImpl( final DataStoreOperations operations, final DataStoreOptions options) { super(operations, options, MetadataType.STATISTICS); } @Override protected ByteArray getPrimaryId(final Statistic> persistedObject) { return persistedObject.getId().getUniqueId(); } @Override protected ByteArray getSecondaryId(final Statistic> persistedObject) { return persistedObject.getId().getGroupId(); } @Override public boolean exists(final Statistic> statistic) { return objectExists(getPrimaryId(statistic), getSecondaryId(statistic)); } @Override public void addStatistic(final Statistic> statistic) { this.addObject(statistic); } @Override public boolean removeStatistic(final Statistic> statistic) { // Delete the statistic values removeStatisticValues(statistic); return deleteObject(getPrimaryId(statistic), getSecondaryId(statistic)); } @Override public boolean removeStatistics( final Iterator>> statistics) { boolean deleted = false; while (statistics.hasNext()) { final Statistic> statistic = statistics.next(); removeStatisticValues(statistic); deleted = deleteObject(getPrimaryId(statistic), getSecondaryId(statistic)) || deleted; } return deleted; } @Override public boolean removeStatistics(final Index index) { boolean removed = deleteObjects(IndexStatistic.generateGroupId(index.getName())); removed = deleteObjects( null, IndexStatistic.generateGroupId(index.getName()), operations, MetadataType.STATISTIC_VALUES, this) || removed; return removed; } @Override public boolean removeStatistics(final DataTypeAdapter type, final Index... adapterIndices) { boolean removed = deleteObjects(DataTypeStatistic.generateGroupId(type.getTypeName())); removed = deleteObjects( null, DataTypeStatistic.generateGroupId(type.getTypeName()), operations, MetadataType.STATISTIC_VALUES, this) || removed; removed = deleteObjects(FieldStatistic.generateGroupId(type.getTypeName())) || removed; removed = deleteObjects( null, FieldStatistic.generateGroupId(type.getTypeName()), operations, MetadataType.STATISTIC_VALUES, this) || removed; for (final Index index : adapterIndices) { try (CloseableIterator>> statsIter = getIndexStatistics(index, null, null)) { while (statsIter.hasNext()) { final IndexStatistic next = (IndexStatistic) statsIter.next(); removeTypeSpecificStatisticValues(next, type.getTypeName()); } } } return removed; } @SuppressWarnings("unchecked") @Override public boolean removeTypeSpecificStatisticValues( final IndexStatistic indexStatistic, final String typeName) { if (indexStatistic.getBinningStrategy() == null) { return false; } final ByteArray adapterBin = DataTypeBinningStrategy.getBin(typeName); boolean removed = false; if (indexStatistic.getBinningStrategy() instanceof DataTypeBinningStrategy) { removed = removeStatisticValue(indexStatistic, adapterBin); } else if ((indexStatistic.getBinningStrategy() instanceof CompositeBinningStrategy) && ((CompositeBinningStrategy) indexStatistic.getBinningStrategy()).usesStrategy( DataTypeBinningStrategy.class)) { final CompositeBinningStrategy binningStrategy = (CompositeBinningStrategy) indexStatistic.getBinningStrategy(); // TODO: The current metadata deleter only deletes exact values. One future optimization // could be to allow it to delete with a primary Id prefix. If the strategy index is 0, // a prefix delete could be used. final List binsToRemove = Lists.newLinkedList(); try (CloseableIterator> valueIter = getStatisticValues((Statistic>) indexStatistic)) { while (valueIter.hasNext()) { final ByteArray bin = valueIter.next().getBin(); if (binningStrategy.binMatches(DataTypeBinningStrategy.class, bin, adapterBin)) { binsToRemove.add(bin); } } } for (final ByteArray bin : binsToRemove) { removed = removeStatisticValue(indexStatistic, bin) || removed; } } return removed; } @SuppressWarnings("unchecked") protected CloseableIterator>> getCachedObject( final ByteArray primaryId, final ByteArray secondaryId) { final Object cacheResult = getObjectFromCache(primaryId, secondaryId); // if there's an exact match in the cache return a singleton if (cacheResult != null) { return new CloseableIterator.Wrapper<>( Iterators.singletonIterator((Statistic>) cacheResult)); } return internalGetObjects(new MetadataQuery(primaryId.getBytes(), secondaryId.getBytes())); } protected CloseableIterator>> getBasicStatisticsInternal( final ByteArray secondaryId, final @Nullable StatisticType> statisticType, final @Nullable String tag) { if (statisticType == null) { final CloseableIterator>> stats = getAllObjectsWithSecondaryId(secondaryId); if (tag == null) { return stats; } return new TagFilter(stats, tag); } else if (tag == null) { return internalGetObjects( new MetadataQuery(statisticType.getBytes(), secondaryId.getBytes(), true)); } return getCachedObject(StatisticId.generateUniqueId(statisticType, tag), secondaryId); } protected CloseableIterator>> getFieldStatisticsInternal( final ByteArray secondaryId, final @Nullable StatisticType> statisticType, final @Nullable String fieldName, final @Nullable String tag) { if (statisticType != null) { if (fieldName != null) { final ByteArray primaryId = FieldStatisticId.generateUniqueId(statisticType, fieldName, tag); if (tag != null) { return getCachedObject(primaryId, secondaryId); } else { return internalGetObjects( new MetadataQuery(primaryId.getBytes(), secondaryId.getBytes(), true)); } } else { if (tag != null) { return new TagFilter( internalGetObjects( new MetadataQuery(statisticType.getBytes(), secondaryId.getBytes(), true)), tag); } else { return internalGetObjects( new MetadataQuery(statisticType.getBytes(), secondaryId.getBytes(), true)); } } } return new FieldStatisticFilter(getAllObjectsWithSecondaryId(secondaryId), fieldName, tag); } protected CloseableIterator>> getAllStatisticsInternal( final @Nullable StatisticType> statisticType) { return internalGetObjects( new MetadataQuery(statisticType == null ? null : statisticType.getBytes(), null, true)); } @SuppressWarnings("unchecked") @Override public CloseableIterator>> getIndexStatistics( final Index index, final @Nullable StatisticType> statisticType, final @Nullable String tag) { return (CloseableIterator>>) getBasicStatisticsInternal( IndexStatistic.generateGroupId(index.getName()), statisticType, tag); } @SuppressWarnings("unchecked") @Override public CloseableIterator>> getDataTypeStatistics( final DataTypeAdapter type, final @Nullable StatisticType> statisticType, final @Nullable String tag) { return (CloseableIterator>>) getBasicStatisticsInternal( DataTypeStatistic.generateGroupId(type.getTypeName()), statisticType, tag); } @SuppressWarnings("unchecked") @Override public CloseableIterator>> getFieldStatistics( final DataTypeAdapter type, final @Nullable StatisticType> statisticType, final @Nullable String fieldName, final @Nullable String tag) { return (CloseableIterator>>) getFieldStatisticsInternal( FieldStatistic.generateGroupId(type.getTypeName()), statisticType, fieldName, tag); } @Override public CloseableIterator>> getAllStatistics( final @Nullable StatisticType> statisticType) { return getAllStatisticsInternal(statisticType); } @SuppressWarnings("unchecked") @Override public , R> Statistic getStatisticById( final StatisticId statisticId) { try (CloseableIterator>> iterator = getCachedObject(statisticId.getUniqueId(), statisticId.getGroupId())) { if (iterator.hasNext()) { return (Statistic) iterator.next(); } } return null; } @Override public , R> void setStatisticValue( final Statistic statistic, final V value) { if (statistic.getBinningStrategy() != null) { throw new UnsupportedOperationException( "The given statistic uses a binning strategy, but no bin was specified."); } removeStatisticValue(statistic); incorporateStatisticValue(statistic, value); } @Override public , R> void setStatisticValue( final Statistic statistic, final V value, final ByteArray bin) { if (statistic.getBinningStrategy() == null) { throw new UnsupportedOperationException( "The given statistic does not use a binning strategy, but a bin was specified."); } removeStatisticValue(statistic, bin); incorporateStatisticValue(statistic, value, bin); } @Override public , R> void incorporateStatisticValue( final Statistic statistic, final V value) { if (statistic.getBinningStrategy() != null) { throw new UnsupportedOperationException( "The given statistic uses a binning strategy, but no bin was specified."); } try (StatisticValueWriter writer = createStatisticValueWriter(statistic)) { writer.writeStatisticValue(null, null, value); } catch (final Exception e) { LOGGER.error("Unable to write statistic value", e); } } @Override public , R> void incorporateStatisticValue( final Statistic statistic, final V value, final ByteArray bin) { if (statistic.getBinningStrategy() == null) { throw new UnsupportedOperationException( "The given statistic does not use a binning strategy, but a bin was specified."); } try (StatisticValueWriter writer = createStatisticValueWriter(statistic)) { writer.writeStatisticValue(bin.getBytes(), null, value); } catch (final Exception e) { LOGGER.error("Unable to write statistic value", e); } } @Override public , R> StatisticValueWriter createStatisticValueWriter( final Statistic statistic) { return new StatisticValueWriter<>( operations.createMetadataWriter(MetadataType.STATISTIC_VALUES), statistic); } private , R> StatisticValueReader createStatisticValueReader( final Statistic statistic, final ByteArray bin, final boolean exact, final String... authorizations) { final byte[] primaryId; if ((bin == null) && !exact) { primaryId = StatisticValue.getValueId(statistic.getId(), new byte[0]); } else { primaryId = StatisticValue.getValueId(statistic.getId(), bin); } final MetadataQuery query = new MetadataQuery( primaryId, statistic.getId().getGroupId().getBytes(), !exact, authorizations); return new StatisticValueReader<>( operations.createMetadataReader(MetadataType.STATISTIC_VALUES).query(query), statistic); } private , R> StatisticValueReader createStatisticValueReader( final Statistic statistic, final ByteArrayRange[] binRanges, final String... authorizations) { final MetadataQuery query = new MetadataQuery( Arrays.stream(binRanges).map( range -> new ByteArrayRange( range.getStart() == null ? null : StatisticValue.getValueId(statistic.getId(), range.getStart()), range.getEnd() == null ? null : StatisticValue.getValueId(statistic.getId(), range.getEnd()), range.isSingleValue())).toArray(ByteArrayRange[]::new), statistic.getId().getGroupId().getBytes(), authorizations); return new StatisticValueReader<>( operations.createMetadataReader(MetadataType.STATISTIC_VALUES).query(query), statistic); } @Override public boolean removeStatisticValue(final Statistic> statistic) { if (statistic.getBinningStrategy() != null) { throw new UnsupportedOperationException( "The given statistic uses a binning strategy, but no bin was specified."); } boolean deleted = false; try ( MetadataDeleter deleter = operations.createMetadataDeleter(MetadataType.STATISTIC_VALUES)) { deleted = deleter.delete( new MetadataQuery( statistic.getId().getUniqueId().getBytes(), statistic.getId().getGroupId().getBytes())); } catch (final Exception e) { LOGGER.error("Unable to remove value for statistic", e); } return deleted; } @Override public boolean removeStatisticValue( final Statistic> statistic, final ByteArray bin) { if (statistic.getBinningStrategy() == null) { throw new UnsupportedOperationException( "The given statistic does not use a binning strategy, but a bin was specified."); } boolean deleted = false; try ( MetadataDeleter deleter = operations.createMetadataDeleter(MetadataType.STATISTIC_VALUES)) { deleted = deleter.delete( new MetadataQuery( StatisticValue.getValueId(statistic.getId(), bin), statistic.getId().getGroupId().getBytes())); } catch (final Exception e) { LOGGER.error("Unable to remove value for statistic", e); } return deleted; } @SuppressWarnings("unchecked") @Override public boolean removeStatisticValues(final Statistic> statistic) { if (statistic.getBinningStrategy() == null) { return removeStatisticValue(statistic); } // TODO: The performance of this operation could be improved if primary ID prefix queries were // allowed during delete. boolean deleted = false; final List binsToRemove = Lists.newLinkedList(); try (CloseableIterator> valueIter = getStatisticValues((Statistic>) statistic)) { while (valueIter.hasNext()) { final ByteArray bin = valueIter.next().getBin(); binsToRemove.add(bin); } } for (final ByteArray bin : binsToRemove) { deleted = deleted || removeStatisticValue(statistic, bin); } return deleted; } @Override public CloseableIterator> getStatisticValues( final Iterator>> statistics, final ByteArrayConstraints binConstraints, final String... authorizations) { return new StatisticsValueIterator(this, statistics, binConstraints, authorizations); } @Override public , R> V getStatisticValue( final Statistic statistic, final String... authorizations) { if (statistic.getBinningStrategy() != null) { throw new UnsupportedOperationException( "The given statistic uses a binning strategy, but no bin was specified."); } try (StatisticValueReader reader = createStatisticValueReader(statistic, null, true, authorizations)) { if (reader.hasNext()) { return reader.next(); } } return null; } @Override public , R> V getStatisticValue( final Statistic statistic, final ByteArray bin, final String... authorizations) { if (statistic.getBinningStrategy() == null) { throw new UnsupportedOperationException( "The given statistic does not use a binning strategy, but a bin was specified."); } // allow for bin prefix scans try (StatisticValueReader reader = createStatisticValueReader(statistic, bin, true, authorizations)) { if (reader.hasNext()) { return reader.next(); } } return null; } @Override public , R> CloseableIterator getStatisticValues( final Statistic statistic, final ByteArray binPrefix, final String... authorizations) { if (statistic.getBinningStrategy() == null) { throw new UnsupportedOperationException( "The given statistic does not use a binning strategy, but a bin was specified."); } return createStatisticValueReader(statistic, binPrefix, false, authorizations); } @Override public , R> CloseableIterator getStatisticValues( final Statistic statistic, final ByteArrayRange[] binRanges, final String... authorizations) { return createStatisticValueReader(statistic, binRanges, authorizations); } @Override public , R> CloseableIterator getStatisticValues( final Statistic statistic, final String... authorizations) { if (statistic.getBinningStrategy() != null) { return createStatisticValueReader(statistic, null, false, authorizations); } return createStatisticValueReader(statistic, null, true, authorizations); } @Override public StatisticUpdateCallback createUpdateCallback( final Index index, final AdapterToIndexMapping indexMapping, final InternalDataAdapter adapter, final boolean updateAdapterStats) { final List>> statistics = Lists.newArrayList(); if (index != null) { try (CloseableIterator>> indexStats = getIndexStatistics(index, null, null)) { while (indexStats.hasNext()) { statistics.add(indexStats.next()); } } } if (updateAdapterStats) { try (CloseableIterator>> adapterStats = getDataTypeStatistics(adapter, null, null)) { while (adapterStats.hasNext()) { statistics.add(adapterStats.next()); } } try (CloseableIterator>> fieldStats = getFieldStatistics(adapter, null, null, null)) { while (fieldStats.hasNext()) { statistics.add(fieldStats.next()); } } } return new StatisticUpdateCallback<>(statistics, this, index, indexMapping, adapter); } @Override public void removeAll() { deleteObjects(null, null, operations, MetadataType.STATISTIC_VALUES, null); super.removeAll(); } @SuppressWarnings("unchecked") @Override public boolean mergeStats() { final List>> statistics = new ArrayList<>(); try (CloseableIterator> it = getAllStatisticsInternal(null)) { while (it.hasNext()) { statistics.add((Statistic>) it.next()); } } for (final Statistic> stat : statistics) { try (CloseableIterator> it = this.getStatisticValues(stat)) { if (stat.getBinningStrategy() != null) { while (it.hasNext()) { final StatisticValue value = it.next(); this.setStatisticValue(stat, value, value.getBin()); } } else if (it.hasNext()) { this.setStatisticValue(stat, it.next()); } } } return true; } protected static class TagFilter implements CloseableIterator>> { private final CloseableIterator>> source; private final String tag; private Statistic> next = null; public TagFilter( final CloseableIterator>> source, final String tag) { this.source = source; this.tag = tag; } private void computeNext() { while (source.hasNext()) { final Statistic> possibleNext = source.next(); if (tag.equals(possibleNext.getTag())) { next = possibleNext; break; } } } @Override public boolean hasNext() { if (next == null) { computeNext(); } return next != null; } @Override public Statistic> next() { if (next == null) { computeNext(); } final Statistic> nextValue = next; next = null; return nextValue; } @Override public void close() { source.close(); } } protected static class FieldStatisticFilter implements CloseableIterator>> { private final CloseableIterator>> source; private final String fieldName; private final String tag; private Statistic> next = null; public FieldStatisticFilter( final CloseableIterator>> source, final String fieldName, final String tag) { this.source = source; this.fieldName = fieldName; this.tag = tag; } private void computeNext() { while (source.hasNext()) { final Statistic> possibleNext = source.next(); if (possibleNext instanceof FieldStatistic) { final FieldStatistic> statistic = (FieldStatistic>) possibleNext; if (((tag == null) || statistic.getTag().equals(tag)) && ((fieldName == null) || statistic.getFieldName().equals(fieldName))) { next = possibleNext; break; } } } } @Override public boolean hasNext() { if (next == null) { computeNext(); } return next != null; } @Override public Statistic> next() { if (next == null) { computeNext(); } final Statistic> nextValue = next; next = null; return nextValue; } @Override public void close() { source.close(); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/metadata/IndexStoreFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.metadata; import org.locationtech.geowave.core.store.BaseStoreFactory; import org.locationtech.geowave.core.store.StoreFactoryHelper; import org.locationtech.geowave.core.store.StoreFactoryOptions; import org.locationtech.geowave.core.store.index.IndexStore; public class IndexStoreFactory extends BaseStoreFactory { public IndexStoreFactory( final String typeName, final String description, final StoreFactoryHelper helper) { super(typeName, description, helper); } @Override public IndexStore createStore(final StoreFactoryOptions options) { return new IndexStoreImpl(helper.createOperations(options), options.getStoreOptions()); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/metadata/IndexStoreImpl.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.metadata; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.DataStoreOptions; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.operations.DataStoreOperations; import org.locationtech.geowave.core.store.operations.MetadataType; /** * This class will persist Index objects within an Accumulo table for GeoWave metadata. The indices * will be persisted in an "INDEX" column family. * *

There is an LRU cache associated with it so staying in sync with external updates is not * practical - it assumes the objects are not updated often or at all. The objects are stored in * their own table. */ public class IndexStoreImpl extends AbstractGeoWavePersistence implements IndexStore { public IndexStoreImpl(final DataStoreOperations operations, final DataStoreOptions options) { super(operations, options, MetadataType.INDEX); } @Override public void addIndex(final Index index) { addObject(index); } @Override public Index getIndex(final String indexName) { return getObject(new ByteArray(indexName), null); } @Override protected ByteArray getPrimaryId(final Index persistedObject) { return new ByteArray(persistedObject.getName()); } @Override public boolean indexExists(final String indexName) { return objectExists(new ByteArray(indexName), null); } @Override public CloseableIterator getIndices() { return getObjects(); } @Override public void removeIndex(final String indexName) { remove(new ByteArray(indexName)); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/metadata/InternalAdapterStoreFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.metadata; import org.locationtech.geowave.core.store.BaseStoreFactory; import org.locationtech.geowave.core.store.StoreFactoryHelper; import org.locationtech.geowave.core.store.StoreFactoryOptions; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; public class InternalAdapterStoreFactory extends BaseStoreFactory { public InternalAdapterStoreFactory( final String typeName, final String description, final StoreFactoryHelper helper) { super(typeName, description, helper); } @Override public InternalAdapterStore createStore(final StoreFactoryOptions options) { return new InternalAdapterStoreImpl(helper.createOperations(options)); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/metadata/InternalAdapterStoreImpl.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.metadata; import java.io.IOException; import org.apache.commons.lang.ArrayUtils; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.CloseableIteratorWrapper; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.entities.GeoWaveMetadata; import org.locationtech.geowave.core.store.operations.DataStoreOperations; import org.locationtech.geowave.core.store.operations.MetadataQuery; import org.locationtech.geowave.core.store.operations.MetadataReader; import org.locationtech.geowave.core.store.operations.MetadataType; import org.locationtech.geowave.core.store.operations.MetadataWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.BiMap; import com.google.common.collect.HashBiMap; import com.google.common.collect.Iterators; import com.google.common.collect.Maps; /** * This class will persist Adapter Internal Adapter Mappings within an Accumulo table for GeoWave * metadata. The mappings will be persisted in an "AIM" column family. * *

There is an LRU cache associated with it so staying in sync with external updates is not * practical - it assumes the objects are not updated often or at all. The objects are stored in * their own table. * *

Objects are maintained with regard to visibility. The assumption is that a mapping between an * adapter and indexing is consistent across all visibility constraints. */ public class InternalAdapterStoreImpl implements InternalAdapterStore { private static final Logger LOGGER = LoggerFactory.getLogger(InternalAdapterStoreImpl.class); private static final Object MUTEX = new Object(); protected final BiMap cache = Maps.synchronizedBiMap(HashBiMap.create()); private static final byte[] INTERNAL_TO_EXTERNAL_ID = new byte[] {0}; private static final byte[] EXTERNAL_TO_INTERNAL_ID = new byte[] {1}; private static final ByteArray INTERNAL_TO_EXTERNAL_BYTEARRAYID = new ByteArray(INTERNAL_TO_EXTERNAL_ID); private static final ByteArray EXTERNAL_TO_INTERNAL_BYTEARRAYID = new ByteArray(EXTERNAL_TO_INTERNAL_ID); private final DataStoreOperations operations; public InternalAdapterStoreImpl(final DataStoreOperations operations) { this.operations = operations; } private MetadataReader getReader(final boolean warnIfNotExists) { try { if (!operations.metadataExists(MetadataType.INTERNAL_ADAPTER)) { return null; } } catch (final IOException e1) { if (warnIfNotExists) { LOGGER.error("Unable to check for existence of metadata to get object", e1); } return null; } return operations.createMetadataReader(MetadataType.INTERNAL_ADAPTER); } @Override public String getTypeName(final short adapterId) { return internalGetTypeName(adapterId, true); } private String internalGetTypeName(final short adapterId, final boolean warnIfNotExists) { String typeName = cache.inverse().get(adapterId); if (typeName != null) { return typeName; } final MetadataReader reader = getReader(true); if (reader == null) { if (warnIfNotExists) { LOGGER.warn( "Adapter ID '" + adapterId + "' not found. INTERNAL_ADAPTER '" + AbstractGeoWavePersistence.METADATA_TABLE + "' table does not exist"); } return null; } try (CloseableIterator it = reader.query( new MetadataQuery( ByteArrayUtils.shortToByteArray(adapterId), INTERNAL_TO_EXTERNAL_ID))) { if (!it.hasNext()) { if (warnIfNotExists) { LOGGER.warn("Internal Adapter ID '" + adapterId + "' not found"); } return null; } typeName = StringUtils.stringFromBinary(it.next().getValue()); cache.putIfAbsent(typeName, adapterId); return typeName; } } @Override public Short getAdapterId(final String typeName) { return internalGetAdapterId(typeName, true); } public Short internalGetAdapterId(final String typeName, final boolean warnIfNotExist) { final Short id = cache.get(typeName); if (id != null) { return id; } final MetadataReader reader = getReader(warnIfNotExist); if (reader == null) { if (warnIfNotExist) { LOGGER.warn( "Adapter '" + typeName + "' not found. INTERNAL_ADAPTER '" + AbstractGeoWavePersistence.METADATA_TABLE + "' table does not exist"); } return null; } try (CloseableIterator it = reader.query( new MetadataQuery(StringUtils.stringToBinary(typeName), EXTERNAL_TO_INTERNAL_ID))) { if (!it.hasNext()) { if (warnIfNotExist) { LOGGER.warn("Adapter '" + typeName + "' not found"); } return null; } final short adapterId = ByteArrayUtils.byteArrayToShort(it.next().getValue()); cache.putIfAbsent(typeName, adapterId); return adapterId; } } /** * This method has a chance of producing a conflicting adapter ID. Whenever possible, * {@link #getInitialAdapterId(String)} should be used. * * @param typeName the type name * @return a possibly conflicting adapter ID */ public static short getLazyInitialAdapterId(final String typeName) { return (short) (Math.abs((typeName.hashCode() % 127))); } @Override public short getInitialAdapterId(final String typeName) { // try to fit it into 1 byte first short adapterId = (short) (Math.abs((typeName.hashCode() % 127))); for (int i = 0; i < 127; i++) { final String adapterIdTypeName = internalGetTypeName(adapterId, false); if ((adapterIdTypeName == null) || typeName.equals(adapterIdTypeName)) { return adapterId; } adapterId++; if (adapterId > 127) { adapterId = 0; } } // try to fit into 2 bytes (only happens if there are more than 127 // adapters) adapterId = (short) (Math.abs((typeName.hashCode() % 16383))); for (int i = 0; i < 16256; i++) { final String adapterIdTypeName = internalGetTypeName(adapterId, false); if ((adapterIdTypeName == null) || typeName.equals(adapterIdTypeName)) { return adapterId; } adapterId++; if (adapterId > 16383) { adapterId = 128; // it already didn't fit in 1 byte } } // fall back to negative numbers (only happens if there are more than // 16,383 adapters) final int negativeRange = 0 - Short.MIN_VALUE; adapterId = (short) (Math.abs((typeName.hashCode() % negativeRange)) - Short.MIN_VALUE); for (int i = 0; i < negativeRange; i++) { final String adapterIdTypeName = internalGetTypeName(adapterId, false); if ((adapterIdTypeName == null) || typeName.equals(adapterIdTypeName)) { return adapterId; } adapterId++; if (adapterId > -1) { adapterId = Short.MIN_VALUE; } } return adapterId; } // ** this introduces a distributed race condition if multiple JVM processes // are excuting this method simultaneously // care should be taken to either explicitly call this from a single client // before running a distributed job, or use a distributed locking mechanism // so that internal Adapter Ids are consistent without any race conditions @Override public short addTypeName(final String typeName) { synchronized (MUTEX) { Short adapterId = internalGetAdapterId(typeName, false); if (adapterId != null) { return adapterId; } adapterId = getInitialAdapterId(typeName); try (final MetadataWriter writer = operations.createMetadataWriter(MetadataType.INTERNAL_ADAPTER)) { if (writer != null) { final byte[] adapterIdBytes = ByteArrayUtils.shortToByteArray(adapterId); writer.write( new GeoWaveMetadata( StringUtils.stringToBinary(typeName), EXTERNAL_TO_INTERNAL_ID, null, adapterIdBytes)); writer.write( new GeoWaveMetadata( adapterIdBytes, INTERNAL_TO_EXTERNAL_ID, null, StringUtils.stringToBinary(typeName))); } } catch (final Exception e) { LOGGER.warn("Unable to close metadata writer", e); } return adapterId; } } @Override public boolean remove(final String typeName) { final Short internalAdapterId = getAdapterId(typeName); return delete(typeName, internalAdapterId); } private boolean delete(final String typeName, final Short internalAdapterId) { boolean externalDeleted = false; if (typeName != null) { externalDeleted = AbstractGeoWavePersistence.deleteObjects( new ByteArray(typeName), EXTERNAL_TO_INTERNAL_BYTEARRAYID, operations, MetadataType.INTERNAL_ADAPTER, null); cache.remove(typeName); } boolean internalDeleted = false; if (internalAdapterId != null) { internalDeleted = AbstractGeoWavePersistence.deleteObjects( new ByteArray(ByteArrayUtils.shortToByteArray(internalAdapterId)), INTERNAL_TO_EXTERNAL_BYTEARRAYID, operations, MetadataType.INTERNAL_ADAPTER, null); } return internalDeleted && externalDeleted; } @Override public void removeAll() { AbstractGeoWavePersistence.deleteObjects( null, null, operations, MetadataType.INTERNAL_ADAPTER, null); cache.clear(); } @Override public boolean remove(final short adapterId) { final String typeName = getTypeName(adapterId); return delete(typeName, adapterId); } @Override public String[] getTypeNames() { final MetadataReader reader = getReader(false); if (reader == null) { return new String[0]; } final CloseableIterator results = reader.query(new MetadataQuery(INTERNAL_TO_EXTERNAL_ID)); try (CloseableIterator it = new CloseableIteratorWrapper<>( results, Iterators.transform( results, input -> StringUtils.stringFromBinary(input.getValue())))) { return Iterators.toArray(it, String.class); } } @Override public short[] getAdapterIds() { final MetadataReader reader = getReader(false); if (reader == null) { return new short[0]; } final CloseableIterator results = reader.query(new MetadataQuery(EXTERNAL_TO_INTERNAL_ID)); try (CloseableIterator it = new CloseableIteratorWrapper<>( results, Iterators.transform( results, input -> ByteArrayUtils.byteArrayToShort(input.getValue())))) { return ArrayUtils.toPrimitive(Iterators.toArray(it, Short.class)); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/metadata/MetadataIterators.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.metadata; import java.util.Arrays; import java.util.HashSet; import java.util.Set; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.CloseableIteratorWrapper; import org.locationtech.geowave.core.store.data.visibility.VisibilityExpression; import org.locationtech.geowave.core.store.entities.GeoWaveMetadata; import org.locationtech.geowave.core.store.operations.MetadataQuery; import com.google.common.collect.Iterators; public class MetadataIterators { public static CloseableIterator clientVisibilityFilter( final CloseableIterator source, final String... authorizations) { if (authorizations != null) { final Set authorizationSet = new HashSet<>(Arrays.asList(authorizations)); return new CloseableIteratorWrapper<>( source, Iterators.filter(source, input -> isVisible(input, authorizationSet))); } return source; } public static CloseableIterator clientPrefixFilter( final CloseableIterator source, final MetadataQuery query) { if (query.hasPrimaryId()) { return new CloseableIteratorWrapper<>( source, Iterators.filter(source, input -> startsWith(input, query))); } return source; } public static CloseableIterator clientPrefixAndVisibilityFilter( final CloseableIterator source, final MetadataQuery query) { if (query.getAuthorizations() != null) { if (query.hasPrimaryId()) { final Set authorizationSet = new HashSet<>(Arrays.asList(query.getAuthorizations())); return new CloseableIteratorWrapper<>(source, Iterators.filter(source, input -> { return isVisible(input, authorizationSet) && startsWith(input, query); })); } else { return clientVisibilityFilter(source, query.getAuthorizations()); } } else if (query.hasPrimaryId()) { return clientPrefixFilter(source, query); } return source; } private static boolean isVisible( final GeoWaveMetadata metadata, final Set authorizationSet) { String visibility = ""; if (metadata.getVisibility() != null) { visibility = StringUtils.stringFromBinary(metadata.getVisibility()); } return VisibilityExpression.evaluate(visibility, authorizationSet); } private static boolean startsWith(final GeoWaveMetadata metadata, MetadataQuery query) { return ByteArrayUtils.startsWith(metadata.getPrimaryId(), query.getPrimaryId()); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/metadata/PropertyStoreFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.metadata; import org.locationtech.geowave.core.store.BaseStoreFactory; import org.locationtech.geowave.core.store.PropertyStore; import org.locationtech.geowave.core.store.StoreFactoryHelper; import org.locationtech.geowave.core.store.StoreFactoryOptions; public class PropertyStoreFactory extends BaseStoreFactory { public PropertyStoreFactory( final String typeName, final String description, final StoreFactoryHelper helper) { super(typeName, description, helper); } @Override public PropertyStore createStore(final StoreFactoryOptions options) { return new PropertyStoreImpl(helper.createOperations(options), options.getStoreOptions()); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/metadata/PropertyStoreImpl.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.metadata; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.DataStoreOptions; import org.locationtech.geowave.core.store.DataStoreProperty; import org.locationtech.geowave.core.store.PropertyStore; import org.locationtech.geowave.core.store.operations.DataStoreOperations; import org.locationtech.geowave.core.store.operations.MetadataType; public class PropertyStoreImpl extends AbstractGeoWavePersistence implements PropertyStore { public PropertyStoreImpl(final DataStoreOperations operations, final DataStoreOptions options) { super(operations, options, MetadataType.STORE_PROPERTIES); } private ByteArray keyToPrimaryId(final String key) { return new ByteArray(StringUtils.stringToBinary(key)); } @Override public DataStoreProperty getProperty(final String propertyKey) { return internalGetObject(keyToPrimaryId(propertyKey), null, false); } @Override public void setProperty(final DataStoreProperty property) { final ByteArray primaryId = getPrimaryId(property); if (objectExists(primaryId, null)) { remove(primaryId); } addObject(property); } @Override protected ByteArray getPrimaryId(final DataStoreProperty persistedObject) { return keyToPrimaryId(persistedObject.getKey()); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/operations/BaseReaderParams.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.operations; import org.apache.commons.lang3.tuple.Pair; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.Aggregation; public abstract class BaseReaderParams { private final PersistentAdapterStore adapterStore; private final AdapterIndexMappingStore mappingStore; private final InternalAdapterStore internalAdapterStore; private final Pair, Aggregation> aggregation; private final Pair> fieldSubsets; private final boolean isAuthorizationsLimiting; private final String[] additionalAuthorizations; public BaseReaderParams( final PersistentAdapterStore adapterStore, final AdapterIndexMappingStore mappingStore, final InternalAdapterStore internalAdapterStore, final Pair, Aggregation> aggregation, final Pair> fieldSubsets, final boolean isAuthorizationsLimiting, final String[] additionalAuthorizations) { this.adapterStore = adapterStore; this.mappingStore = mappingStore; this.internalAdapterStore = internalAdapterStore; this.aggregation = aggregation; this.fieldSubsets = fieldSubsets; this.isAuthorizationsLimiting = isAuthorizationsLimiting; this.additionalAuthorizations = additionalAuthorizations; } public PersistentAdapterStore getAdapterStore() { return adapterStore; } public AdapterIndexMappingStore getAdapterIndexMappingStore() { return mappingStore; } public InternalAdapterStore getInternalAdapterStore() { return internalAdapterStore; } public Pair, Aggregation> getAggregation() { return aggregation; } public Pair> getFieldSubsets() { return fieldSubsets; } public boolean isAggregation() { return ((aggregation != null) && (aggregation.getRight() != null)); } public boolean isAuthorizationsLimiting() { return isAuthorizationsLimiting; } public String[] getAdditionalAuthorizations() { return additionalAuthorizations; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/operations/BaseReaderParamsBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.operations; import org.apache.commons.lang3.tuple.Pair; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.Aggregation; public abstract class BaseReaderParamsBuilder> { protected final PersistentAdapterStore adapterStore; protected final AdapterIndexMappingStore mappingStore; protected final InternalAdapterStore internalAdapterStore; protected Pair, Aggregation> aggregation = null; protected Pair> fieldSubsets = null; protected boolean isAuthorizationsLimiting = true; protected String[] additionalAuthorizations; public BaseReaderParamsBuilder( final PersistentAdapterStore adapterStore, final AdapterIndexMappingStore mappingStore, final InternalAdapterStore internalAdapterStore) { this.adapterStore = adapterStore; this.mappingStore = mappingStore; this.internalAdapterStore = internalAdapterStore; } protected abstract R builder(); public R aggregation(final Pair, Aggregation> aggregation) { this.aggregation = aggregation; return builder(); } public R fieldSubsets(final Pair> fieldSubsets) { this.fieldSubsets = fieldSubsets; return builder(); } public R additionalAuthorizations(final String... authorizations) { this.additionalAuthorizations = authorizations; return builder(); } public R isAuthorizationsLimiting(final boolean isAuthorizationsLimiting) { this.isAuthorizationsLimiting = isAuthorizationsLimiting; return builder(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/operations/DataIndexReaderParams.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.operations; import org.apache.commons.lang3.tuple.Pair; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.entities.GeoWaveRow; public class DataIndexReaderParams extends BaseReaderParams { private byte[][] dataIds; private byte[] startInclusiveDataId; private byte[] endInclusiveDataId; private boolean reverse = false; private final short adapterId; public DataIndexReaderParams( final PersistentAdapterStore adapterStore, final AdapterIndexMappingStore mappingStore, final InternalAdapterStore internalAdapterStore, final short adapterId, final Pair, Aggregation> aggregation, final Pair> fieldSubsets, final byte[][] dataIds, final boolean isAuthorizationsLimiting, final String[] additionalAuthorizations) { super( adapterStore, mappingStore, internalAdapterStore, aggregation, fieldSubsets, isAuthorizationsLimiting, additionalAuthorizations); this.dataIds = dataIds; this.adapterId = adapterId; } public DataIndexReaderParams( final PersistentAdapterStore adapterStore, final AdapterIndexMappingStore mappingStore, final InternalAdapterStore internalAdapterStore, final short adapterId, final Pair, Aggregation> aggregation, final Pair> fieldSubsets, final byte[] startInclusiveDataId, final byte[] endInclusiveDataId, final boolean reverse, final boolean isAuthorizationsLimiting, final String[] additionalAuthorizations) { super( adapterStore, mappingStore, internalAdapterStore, aggregation, fieldSubsets, isAuthorizationsLimiting, additionalAuthorizations); this.startInclusiveDataId = startInclusiveDataId; this.endInclusiveDataId = endInclusiveDataId; this.reverse = reverse; this.adapterId = adapterId; } public byte[][] getDataIds() { return dataIds; } public short getAdapterId() { return adapterId; } public byte[] getStartInclusiveDataId() { return startInclusiveDataId; } public byte[] getEndInclusiveDataId() { return endInclusiveDataId; } public boolean isReverse() { return reverse; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/operations/DataIndexReaderParamsBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.operations; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; public class DataIndexReaderParamsBuilder extends BaseReaderParamsBuilder> { protected byte[][] dataIds = null; private byte[] startInclusiveDataId = null; private byte[] endInclusiveDataId = null; private boolean reverse = false; protected short adapterId; public DataIndexReaderParamsBuilder( final PersistentAdapterStore adapterStore, final AdapterIndexMappingStore mappingStore, final InternalAdapterStore internalAdapterStore) { super(adapterStore, mappingStore, internalAdapterStore); } @Override protected DataIndexReaderParamsBuilder builder() { return this; } public DataIndexReaderParamsBuilder dataIds(final byte[]... dataIds) { this.dataIds = dataIds; // its either an array of explicit IDs or a range, not both this.startInclusiveDataId = null; this.endInclusiveDataId = null; return builder(); } public DataIndexReaderParamsBuilder dataIdsByRange( final byte[] startInclusiveDataId, final byte[] endInclusiveDataId) { return dataIdsByRange(startInclusiveDataId, endInclusiveDataId, false); } /** * Currently only RocksDB And HBase support reverse scans */ public DataIndexReaderParamsBuilder dataIdsByRange( final byte[] startInclusiveDataId, final byte[] endInclusiveDataId, final boolean reverse) { this.dataIds = null; // its either an array of explicit IDs or a range, not both this.startInclusiveDataId = startInclusiveDataId; this.endInclusiveDataId = endInclusiveDataId; this.reverse = reverse; return builder(); } public DataIndexReaderParamsBuilder adapterId(final short adapterId) { this.adapterId = adapterId; return builder(); } public DataIndexReaderParams build() { if ((startInclusiveDataId != null) || (endInclusiveDataId != null)) { return new DataIndexReaderParams( adapterStore, mappingStore, internalAdapterStore, adapterId, aggregation, fieldSubsets, startInclusiveDataId, endInclusiveDataId, reverse, isAuthorizationsLimiting, additionalAuthorizations); } return new DataIndexReaderParams( adapterStore, mappingStore, internalAdapterStore, adapterId, aggregation, fieldSubsets, dataIds, isAuthorizationsLimiting, additionalAuthorizations); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/operations/DataStoreOperations.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.operations; import java.io.Closeable; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.stream.Collectors; import org.locationtech.geowave.core.index.ByteArrayRange; import org.locationtech.geowave.core.index.QueryRanges; import org.locationtech.geowave.core.store.CloseableIteratorWrapper; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.base.dataidx.DataIndexUtils; import org.locationtech.geowave.core.store.base.dataidx.DefaultDataIndexRowWriterWrapper; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.locationtech.geowave.core.store.util.DataStoreUtils; import com.google.common.collect.Iterators; import com.google.common.primitives.Bytes; public interface DataStoreOperations { boolean indexExists(String indexName) throws IOException; boolean metadataExists(MetadataType type) throws IOException; void deleteAll() throws Exception; boolean deleteAll( String indexName, String typeName, Short adapterId, String... additionalAuthorizations); RowWriter createWriter(Index index, InternalDataAdapter adapter); default RowWriter createDataIndexWriter(final InternalDataAdapter adapter) { return new DefaultDataIndexRowWriterWrapper( createWriter(DataIndexUtils.DATA_ID_INDEX, adapter)); } default boolean ensureAuthorizations(final String clientUser, final String... authorizations) { return true; } default boolean clearAuthorizations(final String clientUser) { return true; } MetadataWriter createMetadataWriter(MetadataType metadataType); MetadataReader createMetadataReader(MetadataType metadataType); MetadataDeleter createMetadataDeleter(MetadataType metadataType); RowReader createReader(ReaderParams readerParams); default RowReader createReader(final DataIndexReaderParams readerParams) { final List> readers; if (readerParams.getDataIds() != null) { readers = Arrays.stream(readerParams.getDataIds()).map(dataId -> { final byte[] sortKey = Bytes.concat(new byte[] {(byte) dataId.length}, dataId); return createReader( new ReaderParams<>( DataIndexUtils.DATA_ID_INDEX, readerParams.getAdapterStore(), readerParams.getAdapterIndexMappingStore(), readerParams.getInternalAdapterStore(), new short[] {readerParams.getAdapterId()}, null, readerParams.getAggregation(), readerParams.getFieldSubsets(), false, false, false, false, new QueryRanges(new ByteArrayRange(sortKey, sortKey, false)), null, 1, null, null, null, GeoWaveRowIteratorTransformer.NO_OP_TRANSFORMER, new String[0])); }).collect(Collectors.toList()); } else { final byte[] startKey = Bytes.concat( new byte[] {(byte) readerParams.getStartInclusiveDataId().length}, readerParams.getStartInclusiveDataId()); final byte[] endKey = Bytes.concat( new byte[] {(byte) readerParams.getEndInclusiveDataId().length}, readerParams.getEndInclusiveDataId()); readers = Collections.singletonList( createReader( new ReaderParams<>( DataIndexUtils.DATA_ID_INDEX, readerParams.getAdapterStore(), readerParams.getAdapterIndexMappingStore(), readerParams.getInternalAdapterStore(), new short[] {readerParams.getAdapterId()}, null, readerParams.getAggregation(), readerParams.getFieldSubsets(), false, false, false, false, new QueryRanges(new ByteArrayRange(startKey, endKey, false)), null, 1, null, null, null, GeoWaveRowIteratorTransformer.NO_OP_TRANSFORMER, new String[0]))); } return new RowReaderWrapper<>(new CloseableIteratorWrapper(new Closeable() { @Override public void close() { for (final RowReader r : readers) { r.close(); } } }, Iterators.concat(readers.iterator()))); } default Deleter createDeleter(final ReaderParams readerParams) { return new QueryAndDeleteByRow<>( createRowDeleter( readerParams.getIndex().getName(), readerParams.getAdapterStore(), readerParams.getInternalAdapterStore(), readerParams.getAdditionalAuthorizations()), createReader(readerParams)); } default void delete(final DataIndexReaderParams readerParams) { try (QueryAndDeleteByRow defaultDeleter = new QueryAndDeleteByRow<>( createRowDeleter( DataIndexUtils.DATA_ID_INDEX.getName(), readerParams.getAdapterStore(), readerParams.getInternalAdapterStore()), createReader(readerParams))) { while (defaultDeleter.hasNext()) { defaultDeleter.next(); } } } RowDeleter createRowDeleter( String indexName, PersistentAdapterStore adapterStore, InternalAdapterStore internalAdapterStore, String... authorizations); default boolean mergeData( final Index index, final PersistentAdapterStore adapterStore, final InternalAdapterStore internalAdapterStore, final AdapterIndexMappingStore adapterIndexMappingStore, final Integer maxRangeDecomposition) { return DataStoreUtils.mergeData( this, maxRangeDecomposition, index, adapterStore, internalAdapterStore, adapterIndexMappingStore); } default boolean mergeStats(final DataStatisticsStore statsStore) { return statsStore.mergeStats(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/operations/DataStoreOperationsFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.operations; import org.locationtech.geowave.core.store.BaseStoreFactory; import org.locationtech.geowave.core.store.StoreFactoryHelper; import org.locationtech.geowave.core.store.StoreFactoryOptions; public class DataStoreOperationsFactory extends BaseStoreFactory { public DataStoreOperationsFactory( final String typeName, final String description, final StoreFactoryHelper helper) { super(typeName, description, helper); } @Override public DataStoreOperations createStore(final StoreFactoryOptions options) { return helper.createOperations(options); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/operations/Deleter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.operations; import org.locationtech.geowave.core.store.callback.ScanCallback; import org.locationtech.geowave.core.store.entities.GeoWaveRow; public interface Deleter extends RowReader, ScanCallback { } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/operations/MetadataDeleter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.operations; /** * Provides an interface for deleting GeoWave metadata. A {@link MetadataQuery} is used to specify * the metadata to be deleted. * * Delete queries may only be performed if the deleter is not closed. */ public interface MetadataDeleter extends AutoCloseable { /** * Delete metadata from the DB. * * Preconditions:

  • The deleter is not closed
* * @param query The query that specifies the metadata to be deleted. * @return {@code true} if an object matching the query was found and successfully deleted, * {@code false} otherwise. */ boolean delete(MetadataQuery query); /** * Flush the deleter, committing all pending changes. Note that the changes may already be * committed - this method just establishes that they *must* be committed after the method * returns. * * Preconditions:
  • The deleter is not closed
*/ void flush(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/operations/MetadataQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.operations; import org.locationtech.geowave.core.index.ByteArrayRange; public class MetadataQuery { private final byte[] primaryId; private final byte[] secondaryId; private final String[] authorizations; private final boolean primaryIdPrefix; private final ByteArrayRange[] primaryIdRanges; public MetadataQuery() { this(null, null, false); } public MetadataQuery(final byte[] secondaryId, final String... authorizations) { this(null, secondaryId, false, authorizations); } public MetadataQuery( final byte[] primaryId, final byte[] secondaryId, final String... authorizations) { this(primaryId, secondaryId, false, authorizations); } public MetadataQuery( final byte[] primaryId, final byte[] secondaryId, final boolean primaryIdPrefix, final String... authorizations) { this.primaryId = primaryId; primaryIdRanges = null; this.secondaryId = secondaryId; this.authorizations = authorizations; this.primaryIdPrefix = primaryIdPrefix; } public MetadataQuery( final ByteArrayRange[] primaryIdRanges, final byte[] secondaryId, final String... authorizations) { this.primaryIdRanges = primaryIdRanges; primaryId = null; this.secondaryId = secondaryId; this.authorizations = authorizations; primaryIdPrefix = false; } public byte[] getPrimaryId() { return primaryId; } public byte[] getSecondaryId() { return secondaryId; } public boolean hasPrimaryId() { return (primaryId != null) && (primaryId.length > 0); } public boolean hasSecondaryId() { return (secondaryId != null) && (secondaryId.length > 0); } public boolean hasPrimaryIdRanges() { return (primaryIdRanges != null) && (primaryIdRanges.length > 0); } public boolean isExact() { return !primaryIdPrefix; } public boolean isPrefix() { return primaryIdPrefix; } public ByteArrayRange[] getPrimaryIdRanges() { return primaryIdRanges; } public String[] getAuthorizations() { return authorizations; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/operations/MetadataReader.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.operations; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.entities.GeoWaveMetadata; /** * Provides an interface for reading GeoWave metadata. A {@link MetadataQuery} is used to specify * the metadata to be read. */ public interface MetadataReader { /** * Read metadata, as specified by the query. * * @param query The query that specifies the metadata to be read. * @return An iterator that lazily loads the metadata as they are requested. */ CloseableIterator query(MetadataQuery query); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/operations/MetadataType.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.operations; public enum MetadataType { ADAPTER, INDEX_MAPPINGS, INDEX, STATISTICS, STATISTIC_VALUES(true), INTERNAL_ADAPTER, STORE_PROPERTIES, LEGACY_STATISTICS("STATS", true), LEGACY_INDEX_MAPPINGS("AIM"); private boolean statValues; private String id; private MetadataType() { this(null); } private MetadataType(final String id) { this(id, false); } private MetadataType(final boolean statValues) { this(null, statValues); } private MetadataType(final String id, final boolean statValues) { this.id = id == null ? name() : id; this.statValues = statValues; } @Override public String toString() { return id(); } public String id() { return id; } public boolean isStatValues() { return statValues; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/operations/MetadataWriter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.operations; import org.locationtech.geowave.core.store.entities.GeoWaveMetadata; /** * Provides an interface for persisting metadata. * * Writes may only be performed as long as the instance is not closed. */ public interface MetadataWriter extends AutoCloseable { /** * Write metadata to the table. * * Preconditions:

  • The writer is not closed
* * @param metadata The metadata. */ void write(GeoWaveMetadata metadata); /** * Flush the writer, committing all pending writes. Note that the writes may already be committed * - this method just establishes that they *must* be committed after the method returns. * * Preconditions:
  • The writer is not closed
*/ void flush(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/operations/ParallelDecoder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.operations; import java.io.Closeable; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer; /** * An abstract class that offers data stores a way to scan and decode rows in parallel. It is up to * the data store implementation to provide implementations of {@link ParallelDecoder.RowProvider} * to be used for providing rows from the underlying database. * *

Note: The row transformer passed in MUST be thread-safe, as decoding happens in parallel. * * @param the type of the decoded rows */ public abstract class ParallelDecoder implements CloseableIterator { private BlockingQueue results; private ExecutorService threadPool; private final GeoWaveRowIteratorTransformer rowTransformer; private static int RESULT_BUFFER_SIZE = 10000; private int remainingTasks = 0; private final int numThreads; private static Object TASK_END_MARKER = new Object(); private Exception exception = null; /** * Create a parallel decoder with the given row transformer. * * @param rowTransformer the thread-safe row transformer to use for decoding rows */ public ParallelDecoder(final GeoWaveRowIteratorTransformer rowTransformer) { this(rowTransformer, 8); } /** * Create a parallel decoder with the given row transformer and number of threads. * * @param rowTransformer the thread-safe row transformer to use for decoding rows * @param numThreads the number of threads to allow in the thread pool */ public ParallelDecoder( final GeoWaveRowIteratorTransformer rowTransformer, final int numThreads) { this.numThreads = numThreads; this.rowTransformer = rowTransformer; this.threadPool = new ThreadPoolExecutor( numThreads, numThreads, 60, TimeUnit.SECONDS, new LinkedBlockingQueue(), Executors.defaultThreadFactory()); ((ThreadPoolExecutor) this.threadPool).allowCoreThreadTimeOut(true); results = new ArrayBlockingQueue<>(RESULT_BUFFER_SIZE); } /** @return the number of threads allowed in the thread pool */ protected int getNumThreads() { return numThreads; } /** * @return a list of {@link RowProvider}s that provide {@link GeoWaveRow}s to the decoder * @throws Exception */ protected abstract List getRowProviders() throws Exception; protected synchronized void setDecodeException(final Exception e) { if (exception == null) { this.exception = e; this.threadPool.shutdownNow(); } } private synchronized boolean hasException() { return this.exception != null; } private synchronized Exception getException() { return this.exception; } /** * Start the parallel decode. * * @throws Exception */ public void startDecode() throws Exception { final List rowProviders = getRowProviders(); remainingTasks = rowProviders.size(); for (final RowProvider rowProvider : rowProviders) { threadPool.submit(new DecodeTask<>(rowProvider, this)); } } /** * Task to decode the rows from a single row provider. * * @param the type of the decoded rows */ private static class DecodeTask implements Runnable { private final RowProvider rowProvider; private final ParallelDecoder parent; public DecodeTask(final RowProvider rowProvider, final ParallelDecoder parent) { this.rowProvider = rowProvider; this.parent = parent; } private boolean shouldTerminate() { return Thread.currentThread().isInterrupted(); } private void offerResult(final Object result) throws InterruptedException { while (!shouldTerminate() && !parent.results.offer(result)) { // Results buffer is full, wait until there is some space Thread.sleep(1); } } @Override public void run() { try { rowProvider.init(); final Iterator transformed = parent.rowTransformer.apply(rowProvider); while (transformed.hasNext() && !shouldTerminate()) { offerResult(transformed.next()); } // No more rows, signal the end of this task. offerResult(TASK_END_MARKER); } catch (final Exception e) { // Don't overwrite the original exception if there is one if (!parent.hasException()) { parent.setDecodeException(e); } } finally { try { rowProvider.close(); } catch (final IOException e) { // Ignore } } } } @Override public void close() { threadPool.shutdownNow(); } Object nextResult = null; private void computeNext() { try { nextResult = null; while (remainingTasks > 0) { while (!hasException() && ((nextResult = results.poll()) == null)) { // No results available, but there are still tasks running, // wait for more results. Thread.sleep(1); } // task end was signaled, reduce remaining task count. if (nextResult == TASK_END_MARKER) { remainingTasks--; nextResult = null; continue; } break; } } catch (final InterruptedException e) { setDecodeException(e); } if (hasException()) { throw new RuntimeException(getException()); } } @Override public boolean hasNext() { if (nextResult == null) { computeNext(); } return nextResult != null; } @SuppressWarnings("unchecked") @Override public T next() { if (nextResult == null) { computeNext(); } final Object next = nextResult; nextResult = null; return (T) next; } /** * Row provider used by the parallel decoder to get {@link GeoWaveRow}s from the underlying * database. */ public abstract static class RowProvider implements Closeable, Iterator { public abstract void init(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/operations/QueryAndDeleteByRow.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.operations; import java.util.NoSuchElementException; import org.locationtech.geowave.core.store.entities.GeoWaveRow; public class QueryAndDeleteByRow implements Deleter { private final RowDeleter rowDeleter; private final RowReader reader; public QueryAndDeleteByRow() { this.reader = new EmptyReader<>(); rowDeleter = null; } public QueryAndDeleteByRow(final RowDeleter rowDeleter, final RowReader reader) { this.rowDeleter = rowDeleter; this.reader = reader; } @Override public void close() { reader.close(); rowDeleter.close(); } @Override public boolean hasNext() { return reader.hasNext(); } @Override public T next() { return reader.next(); } @Override public void entryScanned(final T entry, final GeoWaveRow row) { rowDeleter.delete(row); } private static class EmptyReader implements RowReader { @Override public void close() {} @Override public boolean hasNext() { return false; } @Override public T next() { throw new NoSuchElementException(); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/operations/RangeReaderParams.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.operations; import org.apache.commons.lang3.tuple.Pair; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.api.Index; public abstract class RangeReaderParams extends BaseReaderParams { private final Index index; private final short[] adapterIds; private final double[] maxResolutionSubsamplingPerDimension; private final boolean isMixedVisibility; private final boolean isClientsideRowMerging; private final Integer limit; private final Integer maxRangeDecomposition; public RangeReaderParams( final Index index, final PersistentAdapterStore adapterStore, final AdapterIndexMappingStore mappingStore, final InternalAdapterStore internalAdapterStore, final short[] adapterIds, final double[] maxResolutionSubsamplingPerDimension, final Pair, Aggregation> aggregation, final Pair> fieldSubsets, final boolean isMixedVisibility, final boolean isAuthorizationsLimiting, final boolean isClientsideRowMerging, final Integer limit, final Integer maxRangeDecomposition, final String[] additionalAuthorizations) { super( adapterStore, mappingStore, internalAdapterStore, aggregation, fieldSubsets, isAuthorizationsLimiting, additionalAuthorizations); this.index = index; this.adapterIds = adapterIds; this.maxResolutionSubsamplingPerDimension = maxResolutionSubsamplingPerDimension; this.isMixedVisibility = isMixedVisibility; this.isClientsideRowMerging = isClientsideRowMerging; this.limit = limit; this.maxRangeDecomposition = maxRangeDecomposition; } public Index getIndex() { return index; } public short[] getAdapterIds() { return adapterIds; } public double[] getMaxResolutionSubsamplingPerDimension() { return maxResolutionSubsamplingPerDimension; } public boolean isMixedVisibility() { return isMixedVisibility; } public Integer getLimit() { return limit; } public Integer getMaxRangeDecomposition() { return maxRangeDecomposition; } public boolean isClientsideRowMerging() { return isClientsideRowMerging; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/operations/RangeReaderParamsBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.operations; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.Index; public abstract class RangeReaderParamsBuilder> extends BaseReaderParamsBuilder { protected final Index index; protected short[] adapterIds = null; protected double[] maxResolutionSubsamplingPerDimension = null; protected boolean isMixedVisibility = false; protected boolean isClientsideRowMerging = false; protected Integer limit = null; protected Integer maxRangeDecomposition = null; public RangeReaderParamsBuilder( final Index index, final PersistentAdapterStore adapterStore, final AdapterIndexMappingStore mappingStore, final InternalAdapterStore internalAdapterStore) { super(adapterStore, mappingStore, internalAdapterStore); this.index = index; } @Override protected abstract R builder(); public R adapterIds(final short... adapterIds) { this.adapterIds = adapterIds; return builder(); } public R maxResolutionSubsamplingPerDimension( final double[] maxResolutionSubsamplingPerDimension) { this.maxResolutionSubsamplingPerDimension = maxResolutionSubsamplingPerDimension; return builder(); } public R isMixedVisibility(final boolean isMixedVisibility) { this.isMixedVisibility = isMixedVisibility; return builder(); } public R isClientsideRowMerging(final boolean isClientsideRowMerging) { this.isClientsideRowMerging = isClientsideRowMerging; return builder(); } public R limit(final Integer limit) { this.limit = limit; return builder(); } public R maxRangeDecomposition(final Integer maxRangeDecomposition) { this.maxRangeDecomposition = maxRangeDecomposition; return builder(); } @Override public R additionalAuthorizations(final String... authorizations) { additionalAuthorizations = authorizations; return builder(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/operations/ReaderParams.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.operations; import java.util.List; import org.apache.commons.lang3.tuple.Pair; import org.locationtech.geowave.core.index.MultiDimensionalCoordinateRangesArray; import org.locationtech.geowave.core.index.QueryRanges; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer; import org.locationtech.geowave.core.store.query.filter.QueryFilter; public class ReaderParams extends RangeReaderParams { private final boolean isServersideAggregation; private final QueryRanges queryRanges; private final QueryFilter filter; private final List coordinateRanges; private final List constraints; private final GeoWaveRowIteratorTransformer rowTransformer; public ReaderParams( final Index index, final PersistentAdapterStore adapterStore, final AdapterIndexMappingStore mappingStore, final InternalAdapterStore internalAdapterStore, final short[] adapterIds, final double[] maxResolutionSubsamplingPerDimension, final Pair, Aggregation> aggregation, final Pair> fieldSubsets, final boolean isMixedVisibility, final boolean isAuthorizationsLimiting, final boolean isServersideAggregation, final boolean isClientsideRowMerging, final QueryRanges queryRanges, final QueryFilter filter, final Integer limit, final Integer maxRangeDecomposition, final List coordinateRanges, final List constraints, final GeoWaveRowIteratorTransformer rowTransformer, final String[] additionalAuthorizations) { super( index, adapterStore, mappingStore, internalAdapterStore, adapterIds, maxResolutionSubsamplingPerDimension, aggregation, fieldSubsets, isMixedVisibility, isAuthorizationsLimiting, isClientsideRowMerging, limit, maxRangeDecomposition, additionalAuthorizations); this.isServersideAggregation = isServersideAggregation; this.queryRanges = queryRanges; this.filter = filter; this.coordinateRanges = coordinateRanges; this.constraints = constraints; this.rowTransformer = rowTransformer; } public List getCoordinateRanges() { return coordinateRanges; } public List getConstraints() { return constraints; } public boolean isServersideAggregation() { return isServersideAggregation; } public QueryRanges getQueryRanges() { return queryRanges; } public QueryFilter getFilter() { return filter; } public GeoWaveRowIteratorTransformer getRowTransformer() { return rowTransformer; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/operations/ReaderParamsBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.operations; import java.util.List; import org.locationtech.geowave.core.index.MultiDimensionalCoordinateRangesArray; import org.locationtech.geowave.core.index.QueryRanges; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer; import org.locationtech.geowave.core.store.query.filter.QueryFilter; public class ReaderParamsBuilder extends RangeReaderParamsBuilder> { protected boolean isServersideAggregation = false; protected QueryRanges queryRanges = null; protected QueryFilter filter = null; protected List coordinateRanges = null; protected List constraints = null; protected GeoWaveRowIteratorTransformer rowTransformer; public ReaderParamsBuilder( final Index index, final PersistentAdapterStore adapterStore, final AdapterIndexMappingStore mappingStore, final InternalAdapterStore internalAdapterStore, final GeoWaveRowIteratorTransformer rowTransformer) { super(index, adapterStore, mappingStore, internalAdapterStore); this.rowTransformer = rowTransformer; } @Override protected ReaderParamsBuilder builder() { return this; } public ReaderParamsBuilder isServersideAggregation(final boolean isServersideAggregation) { this.isServersideAggregation = isServersideAggregation; return builder(); } public ReaderParamsBuilder queryRanges(final QueryRanges queryRanges) { this.queryRanges = queryRanges; return builder(); } public ReaderParamsBuilder filter(final QueryFilter filter) { this.filter = filter; return builder(); } public ReaderParamsBuilder coordinateRanges( final List coordinateRanges) { this.coordinateRanges = coordinateRanges; return builder(); } public ReaderParamsBuilder constraints(final List constraints) { this.constraints = constraints; return builder(); } public GeoWaveRowIteratorTransformer getRowTransformer() { return rowTransformer; } public ReaderParams build() { if (queryRanges == null) { queryRanges = new QueryRanges(); } if (additionalAuthorizations == null) { additionalAuthorizations = new String[0]; } return new ReaderParams<>( index, adapterStore, mappingStore, internalAdapterStore, adapterIds, maxResolutionSubsamplingPerDimension, aggregation, fieldSubsets, isMixedVisibility, isAuthorizationsLimiting, isServersideAggregation, isClientsideRowMerging, queryRanges, filter, limit, maxRangeDecomposition, coordinateRanges, constraints, rowTransformer, additionalAuthorizations); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/operations/RowDeleter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.operations; import java.io.Closeable; import org.locationtech.geowave.core.store.entities.GeoWaveRow; /** * Provides an interface for deleting GeoWave data rows. */ public interface RowDeleter extends Closeable { /** * Delete a GeoWave row from the DB. * * Preconditions:

  • The deleter is not closed
* * @param row The row to delete. */ void delete(GeoWaveRow row); /** * Flush the deleter, committing all pending changes. Note that the changes may already be * committed - this method just establishes that they *must* be committed after the method * returns. * * Preconditions:
  • The deleter is not closed
*/ void flush(); /** * Close the deleter, committing all pending changes. This method is overridden because it does * not throw an IOException. */ @Override void close(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/operations/RowReader.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.operations; import org.locationtech.geowave.core.store.CloseableIterator; public interface RowReader extends CloseableIterator { } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/operations/RowReaderWrapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.operations; import org.locationtech.geowave.core.store.CloseableIterator; public class RowReaderWrapper implements RowReader { private final CloseableIterator iterator; public RowReaderWrapper(final CloseableIterator iterator) { this.iterator = iterator; } @Override public void close() { iterator.close(); } @Override public boolean hasNext() { return iterator.hasNext(); } @Override public T next() { return iterator.next(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/operations/RowWriter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.operations; import org.locationtech.geowave.core.store.entities.GeoWaveRow; /** * This interface is returned by DataStoreOperations and useful for general purpose writing of * entries. The default implementation of AccumuloOperations will wrap this interface with a * BatchWriter but can be overridden for other mechanisms to write the data. */ public interface RowWriter extends AutoCloseable { /** * Write multiple GeoWave rows to the DB. * * Preconditions:

  • The writer is not closed
* * @param rows The array of rows to be written. */ void write(GeoWaveRow[] rows); /** * Write a GeoWave row to the DB. * * Preconditions:
  • The writer is not closed
* * @param row The row to be written. */ void write(GeoWaveRow row); /** * Flush the writer, committing all pending writes. Note that the writes may already be committed * - this method just establishes that they *must* be committed after the method returns. * * Preconditions:
  • The writer is not closed
*/ void flush(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/operations/SimpleParallelDecoder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.operations; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.concurrent.ArrayBlockingQueue; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer; /** * An implementation of {@link ParallelDecoder} that consumes a single {@link GeoWaveRow} iterator * and decodes it in parallel. * * @param the type of the decoded rows */ public class SimpleParallelDecoder extends ParallelDecoder { private ArrayBlockingQueue consumedRows; private Thread consumerThread; private volatile boolean isTerminating = false; private static final int CONSUMED_ROW_BUFFER_SIZE = 10000; public SimpleParallelDecoder( final GeoWaveRowIteratorTransformer rowTransformer, final Iterator sourceIterator) { super(rowTransformer); consumedRows = new ArrayBlockingQueue<>(CONSUMED_ROW_BUFFER_SIZE); consumerThread = new Thread(new Runnable() { @Override public void run() { try { while (sourceIterator.hasNext() && !Thread.interrupted()) { final GeoWaveRow next = sourceIterator.next(); while (!consumedRows.offer(next)) { // queue is full, wait for space try { Thread.sleep(1); } catch (final InterruptedException e) { isTerminating = true; return; } } } } catch (final Exception e) { setDecodeException(e); } isTerminating = true; } }); consumerThread.setDaemon(true); } @Override public void startDecode() throws Exception { consumerThread.start(); super.startDecode(); } @Override public void close() { if (consumerThread.isAlive()) { consumerThread.interrupt(); } super.close(); } @Override protected List getRowProviders() throws Exception { final int numThreads = getNumThreads(); final List rowProviders = new ArrayList<>(numThreads); for (int i = 0; i < numThreads; i++) { rowProviders.add(new BlockingQueueRowProvider<>(this)); } return rowProviders; } /* * Simple row provider that provides the next result from the blocking queue. */ private static class BlockingQueueRowProvider extends ParallelDecoder.RowProvider { private final SimpleParallelDecoder source; public BlockingQueueRowProvider(final SimpleParallelDecoder source) { this.source = source; } @Override public void close() throws IOException { // Do nothing } private GeoWaveRow next = null; private void computeNext() { while ((next = source.consumedRows.poll()) == null) { if (source.isTerminating) { next = source.consumedRows.poll(); break; } try { Thread.sleep(1); } catch (final InterruptedException e) { return; } } } @Override public boolean hasNext() { if (next == null) { computeNext(); } return next != null; } @Override public GeoWaveRow next() { if (next == null) { computeNext(); } final GeoWaveRow retVal = next; next = null; return retVal; } @Override public void init() { // Do nothing } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/operations/config/IndexDefaultConfigProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.operations.config; import java.util.Properties; import org.locationtech.geowave.core.cli.spi.DefaultConfigProviderSpi; public class IndexDefaultConfigProvider implements DefaultConfigProviderSpi { private final Properties configProperties = new Properties(); /** Create the properties for the config-properties file */ private void setProperties() { // Spatial Index configProperties.setProperty("index.default-spatial.opts.numPartitions", "8"); configProperties.setProperty("index.default-spatial.opts.partitionStrategy", "HASH"); configProperties.setProperty("index.default-spatial.opts.storeTime", "false"); configProperties.setProperty("index.default-spatial.type", "spatial"); // Spatial_Temporal Index configProperties.setProperty("index.default-spatial-temporal.opts.bias", "BALANCED"); configProperties.setProperty("index.default-spatial-temporal.opts.maxDuplicates", "-1"); configProperties.setProperty("index.default-spatial-temporal.opts.numPartitions", "8"); configProperties.setProperty("index.default-spatial-temporal.opts.partitionStrategy", "HASH"); configProperties.setProperty("index.default-spatial-temporal.opts.period", "YEAR"); configProperties.setProperty("index.default-spatial-temporal.type", "spatial_temporal"); } @Override public Properties getDefaultConfig() { setProperties(); return configProperties; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/operations/remote/options/BasicIndexOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.operations.remote.options; import java.util.Arrays; import org.locationtech.geowave.core.store.index.IndexPluginOptions.PartitionStrategy; import com.beust.jcommander.IStringConverter; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; public class BasicIndexOptions { @Parameter( names = {"-np", "--numPartitions"}, description = "The number of partitions. Default partitions will be 1.") protected int numPartitions = 1; @Parameter( names = {"-ps", "--partitionStrategy"}, description = "The partition strategy to use. Default will be none.", converter = PartitionStrategyConverter.class) protected PartitionStrategy partitionStrategy = PartitionStrategy.NONE; public int getNumPartitions() { return numPartitions; } public void setNumPartitions(final int numPartitions) { this.numPartitions = numPartitions; } public PartitionStrategy getPartitionStrategy() { return partitionStrategy; } public void setPartitionStrategy(final PartitionStrategy partitionStrategy) { this.partitionStrategy = partitionStrategy; } public static class PartitionStrategyConverter implements IStringConverter { @Override public PartitionStrategy convert(final String value) { final PartitionStrategy convertedValue = PartitionStrategy.fromString(value); if (convertedValue == null) { throw new ParameterException( "Value " + value + " can not be converted to PartitionStrategy. " + "Available values are: " + Arrays.toString(PartitionStrategy.values())); } return convertedValue; } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/BaseQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.geowave.core.store.query.options.CommonQueryOptions; import org.locationtech.geowave.core.store.query.options.DataTypeQueryOptions; import org.locationtech.geowave.core.store.query.options.IndexQueryOptions; public abstract class BaseQuery> implements Persistable { private CommonQueryOptions commonQueryOptions; private O dataTypeQueryOptions; private IndexQueryOptions indexQueryOptions; private QueryConstraints queryConstraints; protected BaseQuery() {} public BaseQuery( final CommonQueryOptions commonQueryOptions, final O dataTypeQueryOptions, final IndexQueryOptions indexQueryOptions, final QueryConstraints queryConstraints) { this.commonQueryOptions = commonQueryOptions; this.dataTypeQueryOptions = dataTypeQueryOptions; this.indexQueryOptions = indexQueryOptions; this.queryConstraints = queryConstraints; } public CommonQueryOptions getCommonQueryOptions() { return commonQueryOptions; } public O getDataTypeQueryOptions() { return dataTypeQueryOptions; } public IndexQueryOptions getIndexQueryOptions() { return indexQueryOptions; } public QueryConstraints getQueryConstraints() { return queryConstraints; } @Override public byte[] toBinary() { byte[] commonQueryOptionsBinary, dataTypeQueryOptionsBinary, indexQueryOptionsBinary, queryConstraintsBinary; if (commonQueryOptions != null) { commonQueryOptionsBinary = PersistenceUtils.toBinary(commonQueryOptions); } else { commonQueryOptionsBinary = new byte[0]; } if (dataTypeQueryOptions != null) { dataTypeQueryOptionsBinary = PersistenceUtils.toBinary(dataTypeQueryOptions); } else { dataTypeQueryOptionsBinary = new byte[0]; } if (indexQueryOptions != null) { indexQueryOptionsBinary = PersistenceUtils.toBinary(indexQueryOptions); } else { indexQueryOptionsBinary = new byte[0]; } if (queryConstraints != null) { queryConstraintsBinary = PersistenceUtils.toBinary(queryConstraints); } else { queryConstraintsBinary = new byte[0]; } final ByteBuffer buf = ByteBuffer.allocate( commonQueryOptionsBinary.length + dataTypeQueryOptionsBinary.length + indexQueryOptionsBinary.length + queryConstraintsBinary.length + VarintUtils.unsignedIntByteLength(commonQueryOptionsBinary.length) + VarintUtils.unsignedIntByteLength(dataTypeQueryOptionsBinary.length) + VarintUtils.unsignedIntByteLength(indexQueryOptionsBinary.length)); VarintUtils.writeUnsignedInt(commonQueryOptionsBinary.length, buf); buf.put(commonQueryOptionsBinary); VarintUtils.writeUnsignedInt(dataTypeQueryOptionsBinary.length, buf); buf.put(dataTypeQueryOptionsBinary); VarintUtils.writeUnsignedInt(indexQueryOptionsBinary.length, buf); buf.put(indexQueryOptionsBinary); buf.put(queryConstraintsBinary); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int commonQueryOptionsBinaryLength = VarintUtils.readUnsignedInt(buf); if (commonQueryOptionsBinaryLength == 0) { commonQueryOptions = null; } else { final byte[] commonQueryOptionsBinary = ByteArrayUtils.safeRead(buf, commonQueryOptionsBinaryLength); commonQueryOptions = (CommonQueryOptions) PersistenceUtils.fromBinary(commonQueryOptionsBinary); } final int dataTypeQueryOptionsBinaryLength = VarintUtils.readUnsignedInt(buf); if (dataTypeQueryOptionsBinaryLength == 0) { dataTypeQueryOptions = null; } else { final byte[] dataTypeQueryOptionsBinary = ByteArrayUtils.safeRead(buf, dataTypeQueryOptionsBinaryLength); dataTypeQueryOptions = (O) PersistenceUtils.fromBinary(dataTypeQueryOptionsBinary); } final int indexQueryOptionsBinaryLength = VarintUtils.readUnsignedInt(buf); if (indexQueryOptionsBinaryLength == 0) { indexQueryOptions = null; } else { final byte[] indexQueryOptionsBinary = ByteArrayUtils.safeRead(buf, indexQueryOptionsBinaryLength); indexQueryOptions = (IndexQueryOptions) PersistenceUtils.fromBinary(indexQueryOptionsBinary); } final byte[] queryConstraintsBinary = new byte[buf.remaining()]; if (queryConstraintsBinary.length == 0) { queryConstraints = null; } else { buf.get(queryConstraintsBinary); queryConstraints = (QueryConstraints) PersistenceUtils.fromBinary(queryConstraintsBinary); } } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((commonQueryOptions == null) ? 0 : commonQueryOptions.hashCode()); result = (prime * result) + ((dataTypeQueryOptions == null) ? 0 : dataTypeQueryOptions.hashCode()); result = (prime * result) + ((indexQueryOptions == null) ? 0 : indexQueryOptions.hashCode()); result = (prime * result) + ((queryConstraints == null) ? 0 : queryConstraints.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final BaseQuery other = (BaseQuery) obj; if (commonQueryOptions == null) { if (other.commonQueryOptions != null) { return false; } } else if (!commonQueryOptions.equals(other.commonQueryOptions)) { return false; } if (dataTypeQueryOptions == null) { if (other.dataTypeQueryOptions != null) { return false; } } else if (!dataTypeQueryOptions.equals(other.dataTypeQueryOptions)) { return false; } if (indexQueryOptions == null) { if (other.indexQueryOptions != null) { return false; } } else if (!indexQueryOptions.equals(other.indexQueryOptions)) { return false; } if (queryConstraints == null) { if (other.queryConstraints != null) { return false; } } else if (!queryConstraints.equals(other.queryConstraints)) { return false; } return true; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/BaseQueryBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query; import org.locationtech.geowave.core.store.api.QueryConstraintsFactory; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.geowave.core.store.query.constraints.QueryConstraintsFactoryImpl; import org.locationtech.geowave.core.store.query.filter.expression.Filter; import org.locationtech.geowave.core.store.query.options.CommonQueryOptions.HintKey; /** * A base class for building queries * * @param the type of the entries * @param the type of query (AggregationQuery or Query) * @param the type of the builder, useful for extensions of this to maintain type */ public interface BaseQueryBuilder, R extends BaseQueryBuilder> { /** * Choose the appropriate index from all available indices (the default behavior). * * @return this builder */ R allIndices(); /** * Query only using the specified index. * * @param indexName the name of the index * @return this builder */ R indexName(String indexName); /** * Add an authorization to the query. * * @param authorization the authorization * @return this builder */ R addAuthorization(String authorization); /** * Set the authorizations for this query (authorizations are intersected with row visibilities to * determine access). * * @param authorizations the authorizations * @return this builder */ R setAuthorizations(String[] authorizations); /** * Set to no authorizations (default behavior). * * @return this builder */ R noAuthorizations(); /** * Set no limit for the number of entries (default behavior). * * @return this builder */ R noLimit(); /** * Set the limit for the number of entries. * * @param limit the limit * @return this builder */ R limit(int limit); /** * Add a hint to the query. * * @param key the hint key * @param value the hint value * @return this builder */ R addHint(HintKey key, HintValueType value); /** * Clear out any hints (default is no hints). * * @return this builder */ R noHints(); /** * Use the specified constraints. Constraints can most easily be define by using the * constraintFactory(). * * @param constraints the constraints * @return this builder */ R constraints(QueryConstraints constraints); /** * Constrain the query with a filter expression. This is an alternate way of providing constraints * and will override any other constraints specified. * * @param filter the filter expression * @return this builder */ R filter(Filter filter); /** * This is the easiest approach to defining a set of constraints and can be used to create the * constraints that are provided to the constraints method. * * @return a constraints factory */ default QueryConstraintsFactory constraintsFactory() { return QueryConstraintsFactoryImpl.SINGLETON_INSTANCE; } /** * Build the query represented by this builder. * * @return the query */ Q build(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/BaseQueryBuilderImpl.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query; import java.util.HashMap; import java.util.Map; import org.apache.commons.lang.ArrayUtils; import org.locationtech.geowave.core.store.query.constraints.EverythingQuery; import org.locationtech.geowave.core.store.query.constraints.OptimalExpressionQuery; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.geowave.core.store.query.filter.expression.Filter; import org.locationtech.geowave.core.store.query.options.CommonQueryOptions; import org.locationtech.geowave.core.store.query.options.CommonQueryOptions.HintKey; import org.locationtech.geowave.core.store.query.options.IndexQueryOptions; import org.locationtech.geowave.core.store.query.options.QuerySingleIndex; public abstract class BaseQueryBuilderImpl, R extends BaseQueryBuilder> implements BaseQueryBuilder { protected String indexName = null; protected String[] authorizations = new String[0]; protected Integer limit = null; protected Map, Object> hints = new HashMap<>(); protected QueryConstraints constraints = new EverythingQuery(); @Override public R allIndices() { this.indexName = null; return (R) this; } @Override public R indexName(final String indexName) { this.indexName = indexName; return (R) this; } @Override public R addAuthorization(final String authorization) { authorizations = (String[]) ArrayUtils.add(authorizations, authorization); return (R) this; } @Override public R setAuthorizations(final String[] authorizations) { if (authorizations == null) { this.authorizations = new String[0]; } else { this.authorizations = authorizations; } return (R) this; } @Override public R noAuthorizations() { this.authorizations = new String[0]; return (R) this; } @Override public R noLimit() { limit = null; return (R) this; } @Override public R limit(final int limit) { this.limit = limit; return (R) this; } @Override public R addHint(final HintKey key, final HintValueType value) { this.hints.put(key, value); return (R) this; } @Override public R noHints() { hints.clear(); return (R) this; } @Override public R constraints(final QueryConstraints constraints) { this.constraints = constraints; return (R) this; } @Override public R filter(final Filter filter) { this.constraints = new OptimalExpressionQuery(filter); return (R) this; } protected CommonQueryOptions newCommonQueryOptions() { return new CommonQueryOptions(limit, hints, authorizations); } protected IndexQueryOptions newIndexQueryOptions() { return new QuerySingleIndex(indexName); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/QueryBuilderImpl.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query; import org.apache.commons.lang.ArrayUtils; import org.locationtech.geowave.core.store.api.Query; import org.locationtech.geowave.core.store.api.QueryBuilder; import org.locationtech.geowave.core.store.query.options.FilterByTypeQueryOptions; public class QueryBuilderImpl> extends BaseQueryBuilderImpl, R> implements QueryBuilder { protected String[] typeNames = new String[0]; protected String[] fieldNames = null; @Override public R allTypes() { this.typeNames = new String[0]; return (R) this; } @Override public R addTypeName(final String typeName) { if ((fieldNames == null) || (fieldNames.length == 0)) { typeNames = (String[]) ArrayUtils.add(typeNames, typeName); } else { throw new IllegalStateException("Subsetting fields only allows for a single type name"); } return (R) this; } @Override public R setTypeNames(final String[] typeNames) { if ((fieldNames == null) || (fieldNames.length == 0)) { if (typeNames == null) { return allTypes(); } this.typeNames = typeNames; } else if ((typeNames == null) || (typeNames.length != 1)) { throw new IllegalStateException("Subsetting fields only allows for a single type name"); } else { // we assume the user knows what they're doing and is choosing to // override the current type name with this this.typeNames = typeNames; } return (R) this; } @Override public R subsetFields(final String typeName, final String... fieldNames) { this.typeNames = new String[] {typeName}; this.fieldNames = fieldNames; return (R) this; } @Override public R allFields() { this.fieldNames = null; return (R) this; } protected FilterByTypeQueryOptions newFilterByTypeQueryOptions() { return typeNames.length == 1 ? new FilterByTypeQueryOptions<>(typeNames[0], fieldNames) : new FilterByTypeQueryOptions<>(typeNames); } @Override public Query build() { return new Query<>( newCommonQueryOptions(), newFilterByTypeQueryOptions(), newIndexQueryOptions(), constraints); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/aggregate/AdapterAndIndexBasedAggregation.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.aggregate; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; public interface AdapterAndIndexBasedAggregation

extends Aggregation { Aggregation createAggregation( DataTypeAdapter adapter, AdapterToIndexMapping indexMapping, Index index); @Override default byte[] toBinary() { return new byte[0]; } @Override default void fromBinary(final byte[] bytes) {} @Override default P getParameters() { return null; } @Override default void setParameters(final P parameters) {} @Override default R getResult() { return null; } @Override default R merge(final R result1, final R result2) { return null; } @Override default byte[] resultToBinary(final R result) { return new byte[0]; } @Override default R resultFromBinary(final byte[] binary) { return null; } @Override default void clearResult() {} @Override default void aggregate(final DataTypeAdapter adapter, final T entry) {} } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/aggregate/AggregationQueryBuilderImpl.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.aggregate; import java.util.Map; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.api.AggregationQuery; import org.locationtech.geowave.core.store.api.AggregationQueryBuilder; import org.locationtech.geowave.core.store.api.BinningStrategy; import org.locationtech.geowave.core.store.query.BaseQueryBuilderImpl; import org.locationtech.geowave.core.store.query.options.AggregateTypeQueryOptions; public class AggregationQueryBuilderImpl

> extends BaseQueryBuilderImpl, A> implements AggregationQueryBuilder { protected AggregateTypeQueryOptions options; public AggregationQueryBuilderImpl() { this.options = new AggregateTypeQueryOptions<>(); } @Override public AggregationQuery build() { return new AggregationQuery<>( newCommonQueryOptions(), newAggregateTypeQueryOptions(), newIndexQueryOptions(), constraints); } @Override public AggregationQuery, Map, T> buildWithBinningStrategy( final BinningStrategy binningStrategy, final int maxBins) { final AggregateTypeQueryOptions, Map, T> newOptions = new AggregateTypeQueryOptions<>( new BinningAggregation(this.options.getAggregation(), binningStrategy, maxBins), this.options.getTypeNames()); return new AggregationQuery<>( newCommonQueryOptions(), newOptions, newIndexQueryOptions(), constraints); } @Override public A aggregate(final String typeName, final Aggregation aggregation) { this.options.setAggregation(aggregation); this.options.setTypeNames(new String[] {typeName}); return (A) this; } @Override public A count(final String... typeNames) { // this forces the result type of the aggregation to be Long, // and will fail at runtime otherwise. this.options.setAggregation((Aggregation) new CountAggregation()); this.options.setTypeNames(typeNames); return (A) this; } private AggregateTypeQueryOptions newAggregateTypeQueryOptions() { return options; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/aggregate/BinningAggregation.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.aggregate; import java.nio.ByteBuffer; import java.util.HashMap; import java.util.Map; import java.util.stream.Collectors; import java.util.stream.Stream; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.api.BinningStrategy; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import com.google.common.collect.Maps; /** * A Meta-Aggregation, to be used internally by an aggregation query.

This takes an * aggregation-supplier and a binning strategy. When new data is aggregated, it is binned, and if * that bin does not exist, a new one will be made, along with a new aggregation.

See * {@link org.locationtech.geowave.core.store.api.AggregationQueryBuilder#buildWithBinningStrategy(BinningStrategy, int)} * AggregationQueryBuilder#bin} for usage * * @param

The configuration parameters of the inner aggregation. * @param The type of the result that is returned by the inner aggregation. * @param The type of the data given to the aggregation. */ public class BinningAggregation

implements Aggregation, Map, T> { /** * An Aggregation that doesn't get used for aggregation, but to forward various helper tasks with, * such as merging and persistence. */ private Aggregation helperAggregation; /** * The bins and their aggregations. This is not the final result, but will be used to compute it. */ private Map> result; /** * The options that are needed to produce a correct aggregation. */ private BinningAggregationOptions options; /** * Create an useless BinningAggregation that must be fully realized through */ public BinningAggregation() { this(null, null, -1); } /** * Creates a BinningAggregation based upon a base aggregation and a strategy for binning. * * @param baseAggregation A supplier of the inner aggregation. This decides what is done to the * data inside of the bin. Make sure that the given aggregation properly implements * {@link Aggregation#fromBinary(byte[]) Aggregation#fromBinary} * {@link Aggregation#toBinary() Aggregation#toBinary}. * @param binningStrategy How to bin the given data. * @param maxBins The maximum amount of bins that this aggregation should support. If a bin is * computed after reaching the max, it will be silently dropped. */ public BinningAggregation( final Aggregation baseAggregation, final BinningStrategy binningStrategy, final int maxBins) { this.options = new BinningAggregationOptions<>( PersistenceUtils.toBinary(baseAggregation), baseAggregation == null ? null : PersistenceUtils.toBinary(baseAggregation.getParameters()), binningStrategy, maxBins); this.result = Maps.newHashMapWithExpectedSize(maxBins == -1 ? 1024 : maxBins); } @Override public Map getResult() { return this.result.entrySet().stream().collect( Collectors.toMap(Map.Entry::getKey, e -> e.getValue().getResult())); } @Override public Map merge(final Map result1, final Map result2) { final Aggregation agg = this.getHelperAggregation(); return Stream.of(result1, result2).flatMap(m -> m.entrySet().stream()).collect( Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue, agg::merge)); } @Override public void aggregate(final DataTypeAdapter adapter, final T entry) { final ByteArray[] bins = this.options.binningStrategy.getBins(adapter, entry); for (final ByteArray bin : bins) { if (this.result.containsKey(bin)) { this.result.get(bin).aggregate(adapter, entry); } else if ((this.options.maxBins == -1) || (this.result.size() < this.options.maxBins)) { this.result.put(bin, this.instantiateBaseAggregation()); this.result.get(bin).aggregate(adapter, entry); } } } /** * Clear all bins and all sub-aggregations. Future calls to aggregate will be unaffected by past * calls, after calling this. */ @Override public void clearResult() { this.result.clear(); } /** * @return A fresh instance of the base aggregation for use in this class. */ private Aggregation instantiateBaseAggregation() { final Aggregation agg = (Aggregation) PersistenceUtils.fromBinary(this.options.baseBytes); final P baseParams = (P) PersistenceUtils.fromBinary(this.options.baseParamBytes); agg.setParameters(baseParams); return agg; } @Override public BinningAggregationOptions getParameters() { return this.options; } @Override public void setParameters(final BinningAggregationOptions parameters) { this.options = parameters; } @Override public byte[] resultToBinary(final Map result) { final Aggregation agg = this.getHelperAggregation(); final Map mapped = result.entrySet().stream().collect( Collectors.toMap(Map.Entry::getKey, e -> agg.resultToBinary(e.getValue()))); final int totalDataSize = mapped.entrySet().stream().mapToInt( e -> (VarintUtils.unsignedIntByteLength(e.getKey().getBytes().length) + e.getKey().getBytes().length + VarintUtils.unsignedIntByteLength(e.getValue().length) + e.getValue().length)).reduce(0, Integer::sum); final ByteBuffer bb = ByteBuffer.allocate(totalDataSize); mapped.forEach((k, v) -> { VarintUtils.writeUnsignedInt(k.getBytes().length, bb); bb.put(k.getBytes()); VarintUtils.writeUnsignedInt(v.length, bb); bb.put(v); }); return bb.array(); } @Override public Map resultFromBinary(final byte[] binary) { final Aggregation agg = this.getHelperAggregation(); final ByteBuffer bb = ByteBuffer.wrap(binary); final Map resultMap = new HashMap<>(); while (bb.hasRemaining()) { final int keyLen = VarintUtils.readUnsignedInt(bb); final byte[] keyBytes = new byte[keyLen]; bb.get(keyBytes); final ByteArray key = new ByteArray(keyBytes); final int valLen = VarintUtils.readUnsignedInt(bb); final byte[] valBytes = new byte[valLen]; bb.get(valBytes); final R val = agg.resultFromBinary(valBytes); resultMap.put(key, val); } return resultMap; } private Aggregation getHelperAggregation() { if (this.helperAggregation == null) { this.helperAggregation = this.instantiateBaseAggregation(); } return this.helperAggregation; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/aggregate/BinningAggregationOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.aggregate; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.api.BinningStrategy; /** * The configuration parameters of a {@link BinningAggregation} * * @param

The Persistable that the sub-aggregation uses for configuration. * @param The type that is being sent to the sub-aggregations for binning. */ public class BinningAggregationOptions

implements Persistable { /** * The baseBytes should contain primarily the classId of the Aggregation. This is used in * conjunction with the baseParams to create a fully-functional aggregation. * * When a new bin is created, these bytes are deserialized into a new {@code Aggregation} * object. * * This is used to create the helperAggregation if it doesn't exist, and is used to create the * aggregation for new bins, when a new bin is created. */ byte[] baseBytes; /** * The baseBytes should contain all the parameters needed to finish instantiating the base * aggregation that constitutes this meta-aggregation. */ byte[] baseParamBytes; /** * The strategy that we use to bin entries with. */ BinningStrategy binningStrategy; /** * The maximum bins that the binning aggregation can support. */ int maxBins; public BinningAggregationOptions() {} public BinningAggregationOptions( final byte[] baseBytes, final byte[] baseParamBytes, final BinningStrategy binningStrategy, final int maxBins) { this.baseBytes = baseBytes; this.baseParamBytes = baseParamBytes; this.binningStrategy = binningStrategy; this.maxBins = maxBins; } @Override public byte[] toBinary() { final byte[] strategyBytes = PersistenceUtils.toBinary(this.binningStrategy); final byte[] baseParams = baseParamBytes == null ? new byte[0] : baseParamBytes; return ByteBuffer.allocate( 16 + this.baseBytes.length + baseParams.length + strategyBytes.length).putInt( this.baseBytes.length).put(this.baseBytes).putInt(baseParams.length).put( baseParams).putInt(strategyBytes.length).put(strategyBytes).putInt(maxBins).array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer bb = ByteBuffer.wrap(bytes); final int baseBytesLen = bb.getInt(); final byte[] baseBytes = new byte[baseBytesLen]; bb.get(baseBytes); this.baseBytes = baseBytes; final int paramsBytesLen = bb.getInt(); final byte[] paramsBytes = new byte[paramsBytesLen]; if (paramsBytes.length > 0) { bb.get(paramsBytes); this.baseParamBytes = paramsBytes; } else { this.baseParamBytes = null; } final int strategyBytesLen = bb.getInt(); final byte[] strategyBytes = new byte[strategyBytesLen]; bb.get(strategyBytes); this.binningStrategy = (BinningStrategy) PersistenceUtils.fromBinary(strategyBytes); this.maxBins = bb.getInt(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/aggregate/CommonIndexAggregation.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.aggregate; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding; public interface CommonIndexAggregation

extends Aggregation { } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/aggregate/CompositeAggregation.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.aggregate; import java.nio.ByteBuffer; import java.util.List; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistableList; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import com.google.common.collect.Lists; /** * Aggregation class that allows multiple aggregations to be performed in a single aggregation * query. The initial implementation does not take advantage of common index aggregations. * * TODO: Update this class to derive from BaseOptimalVectorAggregation and if all sub aggregations * are common index aggregations, then the composite aggregation can run with only common index * data. Otherwise the feature needs to be decoded anyways, so all of the sub aggregations should be * run on the decoded data. */ @SuppressWarnings({"rawtypes", "unchecked"}) public class CompositeAggregation implements Aggregation, T> { List aggregations = Lists.newArrayList(); /** * Add an aggregation to this composite aggregation. * * @param aggregation the aggregation to add */ public void add(final Aggregation aggregation) { aggregations.add(aggregation); } @Override public PersistableList getParameters() { final List persistables = Lists.newArrayListWithCapacity(aggregations.size() * 2); for (final Aggregation agg : aggregations) { persistables.add(agg); persistables.add(agg.getParameters()); } return new PersistableList(persistables); } @Override public void setParameters(final PersistableList parameters) { final List persistables = parameters.getPersistables(); aggregations = Lists.newArrayListWithCapacity(persistables.size() / 2); for (int i = 0; i < persistables.size(); i += 2) { aggregations.add((Aggregation) persistables.get(i)); aggregations.get(i / 2).setParameters(persistables.get(i + 1)); } } @Override public List merge(final List result1, final List result2) { final List merged = Lists.newArrayListWithCapacity(aggregations.size()); for (int i = 0; i < aggregations.size(); i++) { merged.add(aggregations.get(i).merge(result1.get(i), result2.get(i))); } return merged; } @Override public List getResult() { return Lists.transform(aggregations, a -> a.getResult()); } @Override public byte[] resultToBinary(final List result) { final List parts = Lists.newArrayListWithCapacity(aggregations.size()); int length = 0; for (int i = 0; i < aggregations.size(); i++) { final byte[] binary = aggregations.get(i).resultToBinary(result.get(i)); length += binary.length + 4; parts.add(binary); } final ByteBuffer buffer = ByteBuffer.allocate(length); for (final byte[] part : parts) { buffer.putInt(part.length); buffer.put(part); } return buffer.array(); } @Override public List resultFromBinary(final byte[] binary) { final ByteBuffer buffer = ByteBuffer.wrap(binary); final List result = Lists.newArrayListWithCapacity(aggregations.size()); final int length = aggregations.size(); for (int i = 0; i < length; i++) { final int partLength = buffer.getInt(); final byte[] part = new byte[partLength]; buffer.get(part); result.add(aggregations.get(i).resultFromBinary(part)); } return result; } @Override public void clearResult() { aggregations.forEach(a -> a.clearResult()); } @Override public void aggregate(final DataTypeAdapter adapter, final T entry) { aggregations.forEach(a -> a.aggregate(adapter, entry)); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/aggregate/CountAggregation.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.aggregate; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding; public class CountAggregation implements CommonIndexAggregation { private long count = 0; public CountAggregation() {} public boolean isSet() { return count != Long.MIN_VALUE; } @Override public String toString() { return "count[count=" + count + ']'; } @Override public void aggregate( final DataTypeAdapter adapter, final CommonIndexedPersistenceEncoding entry) { count++; } @Override public Persistable getParameters() { return null; } @Override public Long getResult() { return count; } @Override public void setParameters(final Persistable parameters) {} @Override public void clearResult() { count = 0; } @Override public byte[] toBinary() { return new byte[] {}; } @Override public void fromBinary(final byte[] bytes) {} @Override public Long merge(final Long result1, final Long result2) { return result1 + result2; } @Override public byte[] resultToBinary(final Long result) { final ByteBuffer buffer = ByteBuffer.allocate(VarintUtils.unsignedLongByteLength(result)); VarintUtils.writeUnsignedLong(result, buffer); return buffer.array(); } @Override public Long resultFromBinary(final byte[] binary) { return VarintUtils.readUnsignedLong(ByteBuffer.wrap(binary)); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/aggregate/FieldMathAggregation.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.aggregate; import java.math.BigDecimal; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.api.DataTypeAdapter; /** * Base aggregation class for performing math operations on numeric attributes. It uses BigDecimal * due to it being the most precise numeric attribute possible. */ public abstract class FieldMathAggregation implements Aggregation { private FieldNameParam fieldNameParam; private BigDecimal value = null; public FieldMathAggregation() { this(null); } public FieldMathAggregation(final FieldNameParam fieldNameParam) { super(); this.fieldNameParam = fieldNameParam; } @Override public FieldNameParam getParameters() { return fieldNameParam; } @Override public void setParameters(final FieldNameParam fieldNameParam) { this.fieldNameParam = fieldNameParam; } @Override public BigDecimal getResult() { return value; } @Override public BigDecimal merge(final BigDecimal result1, final BigDecimal result2) { return agg(result1, result2); } @Override public byte[] resultToBinary(BigDecimal result) { return VarintUtils.writeBigDecimal(result); } @Override public BigDecimal resultFromBinary(byte[] binary) { return VarintUtils.readBigDecimal(ByteBuffer.wrap(binary)); } @Override public void clearResult() { value = null; } @Override public void aggregate(final DataTypeAdapter adapter, T entry) { Object o; if ((fieldNameParam != null) && !fieldNameParam.isEmpty()) { o = adapter.getFieldValue(entry, fieldNameParam.getFieldName()); if (o instanceof Number) { value = agg(value, new BigDecimal(o.toString())); } } } protected abstract BigDecimal agg(final BigDecimal a, final BigDecimal b); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/aggregate/FieldMaxAggregation.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.aggregate; import java.math.BigDecimal; /** * Aggregates to find the maximum value of a given numeric attribute. Ignores null attribute values. */ public class FieldMaxAggregation extends FieldMathAggregation { public FieldMaxAggregation() { this(null); } public FieldMaxAggregation(final FieldNameParam fieldNameParam) { super(fieldNameParam); } @Override protected BigDecimal agg(BigDecimal a, BigDecimal b) { if (a == null) { return b; } else if (b == null) { return a; } return a.max(b); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/aggregate/FieldMinAggregation.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.aggregate; import java.math.BigDecimal; /** * Aggregates to find the minimum value of a given numeric attribute. Ignores null attribute values. */ public class FieldMinAggregation extends FieldMathAggregation { public FieldMinAggregation() { this(null); } public FieldMinAggregation(final FieldNameParam fieldNameParam) { super(fieldNameParam); } @Override protected BigDecimal agg(final BigDecimal a, final BigDecimal b) { if (a == null) { return b; } else if (b == null) { return a; } return a.min(b); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/aggregate/FieldNameParam.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.aggregate; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.persist.Persistable; public class FieldNameParam implements Persistable { // TODO we can also include a requested CRS in case we want to reproject // (although it seemingly can just as easily be done on the resulting // envelope rather than per feature) private String fieldName; public FieldNameParam() { this(null); } public FieldNameParam(final String fieldName) { this.fieldName = fieldName; } @Override public byte[] toBinary() { if ((fieldName == null) || fieldName.isEmpty()) { return new byte[0]; } return StringUtils.stringToBinary(fieldName); } @Override public void fromBinary(final byte[] bytes) { if (bytes.length > 0) { fieldName = StringUtils.stringFromBinary(bytes); } else { fieldName = null; } } public boolean isEmpty() { return (fieldName == null) || fieldName.isEmpty(); } public String getFieldName() { return fieldName; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/aggregate/FieldSumAggregation.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.aggregate; import java.math.BigDecimal; /** * Calculates the sum of all value of a given numeric attribute. Ignores null attribute values. */ public class FieldSumAggregation extends FieldMathAggregation { public FieldSumAggregation() { this(null); } public FieldSumAggregation(final FieldNameParam fieldNameParam) { super(fieldNameParam); } @Override protected BigDecimal agg(BigDecimal a, BigDecimal b) { if (a == null) { return b; } else if (b == null) { return a; } return a.add(b); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/aggregate/MergingAggregation.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.aggregate; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.api.DataTypeAdapter; public class MergingAggregation implements Aggregation { private T result = null; @Override public Persistable getParameters() { return null; } @Override public void setParameters(final Persistable parameters) {} @Override public T getResult() { return result; } @Override public byte[] resultToBinary(final T result) { if (result == null) { return new byte[0]; } return PersistenceUtils.toBinary(result); } @Override public T resultFromBinary(final byte[] binary) { if (binary.length > 0) { return (T) PersistenceUtils.fromBinary(binary); } return null; } @Override public void clearResult() { result = null; } @Override public void aggregate(final DataTypeAdapter adapter, final T entry) { if (result == null) { result = entry; } else { result.merge(entry); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/aggregate/OptimalCountAggregation.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.aggregate; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding; public class OptimalCountAggregation extends OptimalFieldAggregation { public OptimalCountAggregation() { super(); } public OptimalCountAggregation(final FieldNameParam fieldNameParam) { super(fieldNameParam); } @Override protected Aggregation createCommonIndexAggregation() { return new CommonIndexCountAggregation(fieldNameParam); } @Override protected Aggregation createAggregation() { return new FieldCountAggregation<>(fieldNameParam); } public static class CommonIndexCountAggregation implements CommonIndexAggregation { private FieldNameParam fieldNameParam; private long count = 0; public CommonIndexCountAggregation() { fieldNameParam = null; } public CommonIndexCountAggregation(final FieldNameParam param) { this.fieldNameParam = param; } @Override public FieldNameParam getParameters() { return fieldNameParam; } @Override public void setParameters(FieldNameParam parameters) { this.fieldNameParam = parameters; } @Override public Long getResult() { return count; } @Override public byte[] resultToBinary(Long result) { final ByteBuffer buffer = ByteBuffer.allocate(VarintUtils.unsignedLongByteLength(result)); VarintUtils.writeUnsignedLong(result, buffer); return buffer.array(); } @Override public Long resultFromBinary(byte[] binary) { return VarintUtils.readUnsignedLong(ByteBuffer.wrap(binary)); } @Override public Long merge(final Long value1, final Long value2) { return value1 + value2; } @Override public void clearResult() { count = 0; } @Override public void aggregate( DataTypeAdapter adapter, CommonIndexedPersistenceEncoding entry) { if (fieldNameParam == null) { count++; } else if (entry.getCommonData().getValue(fieldNameParam.getFieldName()) != null) { count++; } } } public static class FieldCountAggregation implements Aggregation { private FieldNameParam fieldNameParam; private long count = 0; public FieldCountAggregation() { fieldNameParam = null; } public FieldCountAggregation(final FieldNameParam fieldNameParam) { this.fieldNameParam = fieldNameParam; } @Override public FieldNameParam getParameters() { return fieldNameParam; } @Override public void setParameters(FieldNameParam parameters) { this.fieldNameParam = parameters; } @Override public Long getResult() { return count; } @Override public byte[] resultToBinary(Long result) { final ByteBuffer buffer = ByteBuffer.allocate(VarintUtils.unsignedLongByteLength(result)); VarintUtils.writeUnsignedLong(result, buffer); return buffer.array(); } @Override public Long resultFromBinary(byte[] binary) { return VarintUtils.readUnsignedLong(ByteBuffer.wrap(binary)); } @Override public Long merge(final Long value1, final Long value2) { return value1 + value2; } @Override public void clearResult() { count = 0; } @Override public void aggregate(DataTypeAdapter adapter, T entry) { if (fieldNameParam == null) { count++; } else if (adapter.getFieldValue(entry, fieldNameParam.getFieldName()) != null) { count++; } } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/aggregate/OptimalFieldAggregation.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.aggregate; import org.apache.commons.lang.ArrayUtils; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding; /** * Abstract class for performing optimal aggregations on adapter fields. * * @param the aggregation return type * @param the adapter type */ public abstract class OptimalFieldAggregation implements AdapterAndIndexBasedAggregation { protected FieldNameParam fieldNameParam; public OptimalFieldAggregation() {} public OptimalFieldAggregation(final FieldNameParam fieldNameParam) { this.fieldNameParam = fieldNameParam; } @Override public FieldNameParam getParameters() { return fieldNameParam; } @Override public void setParameters(final FieldNameParam parameters) { fieldNameParam = parameters; } @Override public Aggregation createAggregation( final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index) { if (fieldNameParam == null || indexMapping.getIndexFieldMappers().stream().anyMatch( m -> ArrayUtils.contains(m.getAdapterFields(), fieldNameParam.getFieldName()))) { return createCommonIndexAggregation(); } return createAggregation(); } protected abstract Aggregation createCommonIndexAggregation(); protected abstract Aggregation createAggregation(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/AdapterAndIndexBasedQueryConstraints.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.constraints; import java.util.List; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.query.filter.QueryFilter; public interface AdapterAndIndexBasedQueryConstraints extends QueryConstraints { QueryConstraints createQueryConstraints( InternalDataAdapter adapter, Index index, AdapterToIndexMapping indexMapping); @Override default List createFilters(final Index index) { return null; } @Override default List getIndexConstraints(final Index index) { return null; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/BasicOrderedConstraintQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.constraints; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Collections; import java.util.List; import org.apache.commons.lang3.Range; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.numeric.BasicNumericDataset; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.dimension.NumericDimensionField; import org.locationtech.geowave.core.store.query.filter.BasicQueryFilter.BasicQueryCompareOperation; import org.locationtech.geowave.core.store.query.filter.QueryFilter; public class BasicOrderedConstraintQuery extends BasicQuery { /** A list of Constraint Sets. Each Constraint Set is an individual hyper-cube query. */ public static class OrderedConstraints implements Constraints { private Range[] rangesPerDimension; private String indexName; public OrderedConstraints() {} public OrderedConstraints(final Range rangePerDimension) { this(new Range[] {rangePerDimension}, null); } public OrderedConstraints(final Range[] rangesPerDimension) { this(rangesPerDimension, null); } public OrderedConstraints(final Range[] rangesPerDimension, final String indexName) { this.rangesPerDimension = rangesPerDimension; this.indexName = indexName; } @Override public byte[] toBinary() { final byte[] indexNameBinary; if (indexName != null) { indexNameBinary = StringUtils.stringToBinary(indexName); } else { indexNameBinary = new byte[0]; } final ByteBuffer buf = ByteBuffer.allocate( VarintUtils.unsignedIntByteLength(rangesPerDimension.length) + VarintUtils.unsignedIntByteLength(indexNameBinary.length) + (16 * rangesPerDimension.length) + indexNameBinary.length); VarintUtils.writeUnsignedInt(rangesPerDimension.length, buf); VarintUtils.writeUnsignedInt(indexNameBinary.length, buf); for (int i = 0; i < rangesPerDimension.length; i++) { buf.putDouble(rangesPerDimension[i].getMinimum()); buf.putDouble(rangesPerDimension[i].getMaximum()); } buf.put(indexNameBinary); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int numRanges = VarintUtils.readUnsignedInt(buf); ByteArrayUtils.verifyBufferSize(buf, numRanges); rangesPerDimension = new Range[numRanges]; final int indexNameBinaryLength = VarintUtils.readUnsignedInt(buf); for (int i = 0; i < rangesPerDimension.length; i++) { rangesPerDimension[i] = Range.between(buf.getDouble(), buf.getDouble()); } if (indexNameBinaryLength > 0) { final byte[] indexNameBinary = ByteArrayUtils.safeRead(buf, indexNameBinaryLength); indexName = StringUtils.stringFromBinary(indexNameBinary); } else { indexName = null; } } @Override public List getIndexConstraints(final Index index) { if (((indexName == null) || indexName.equals(index.getName())) && (index.getIndexStrategy().getOrderedDimensionDefinitions().length == rangesPerDimension.length)) { return Collections.singletonList(getIndexConstraints()); } return Collections.emptyList(); } protected MultiDimensionalNumericData getIndexConstraints() { return new BasicNumericDataset( Arrays.stream(rangesPerDimension).map( r -> new NumericRange(r.getMinimum(), r.getMaximum())).toArray( i -> new NumericData[i])); } @Override public List createFilters(final Index index, final BasicQuery parentQuery) { final QueryFilter filter = parentQuery.createQueryFilter( getIndexConstraints(), index.getIndexModel().getDimensions(), new NumericDimensionField[0], index); if (filter != null) { return Collections.singletonList(filter); } return Collections.emptyList(); } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((indexName == null) ? 0 : indexName.hashCode()); result = (prime * result) + Arrays.hashCode(rangesPerDimension); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final OrderedConstraints other = (OrderedConstraints) obj; if (indexName == null) { if (other.indexName != null) { return false; } } else if (!indexName.equals(other.indexName)) { return false; } if (!Arrays.equals(rangesPerDimension, other.rangesPerDimension)) { return false; } return true; } } public BasicOrderedConstraintQuery() {} public BasicOrderedConstraintQuery(final OrderedConstraints constraints) { super(constraints); } public BasicOrderedConstraintQuery( final OrderedConstraints constraints, final BasicQueryCompareOperation compareOp) { super(constraints, compareOp); } @Override public byte[] toBinary() { return constraints.toBinary(); } @Override public void fromBinary(final byte[] bytes) { constraints = new OrderedConstraints(); constraints.fromBinary(bytes); } @Override public boolean indexMustBeSpecified() { return true; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/BasicQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.constraints; import java.util.List; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.dimension.NumericDimensionField; import org.locationtech.geowave.core.store.query.filter.BasicQueryFilter; import org.locationtech.geowave.core.store.query.filter.BasicQueryFilter.BasicQueryCompareOperation; import org.locationtech.geowave.core.store.query.filter.QueryFilter; public class BasicQuery implements QueryConstraints { protected Constraints constraints; // compare OP doesn't need to be serialized because its only used clientside to generate the query // filter protected transient BasicQueryCompareOperation compareOp = BasicQueryCompareOperation.INTERSECTS; public BasicQuery() {} public BasicQuery(final Constraints constraints) { this(constraints, BasicQueryCompareOperation.INTERSECTS); } public BasicQuery(final Constraints constraints, final BasicQueryCompareOperation compareOp) { super(); this.constraints = constraints; this.compareOp = compareOp; } @Override public List createFilters(final Index index) { return constraints.createFilters(index, this); } protected QueryFilter createQueryFilter( final MultiDimensionalNumericData constraints, final NumericDimensionField[] orderedConstrainedDimensionFields, final NumericDimensionField[] unconstrainedDimensionFields, final Index index) { return new BasicQueryFilter(constraints, orderedConstrainedDimensionFields, compareOp); } @Override public List getIndexConstraints(final Index index) { return constraints.getIndexConstraints(index); } @Override public byte[] toBinary() { return PersistenceUtils.toBinary(constraints); } @Override public void fromBinary(final byte[] bytes) { constraints = (Constraints) PersistenceUtils.fromBinary(bytes); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/BasicQueryByClass.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.constraints; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.numeric.BasicNumericDataset; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.dimension.NumericDimensionField; import org.locationtech.geowave.core.store.index.CommonIndexModel; import org.locationtech.geowave.core.store.query.filter.BasicQueryFilter.BasicQueryCompareOperation; import org.locationtech.geowave.core.store.query.filter.FilterList; import org.locationtech.geowave.core.store.query.filter.QueryFilter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.math.DoubleMath; /** * The Basic Query class represent a hyper-cube(s) query across all dimensions that match the * Constraints passed into the constructor * *

NOTE: query to an index that requires a constraint and the constraint is missing within the * query equates to an unconstrained index scan. The query filter is still applied. */ public class BasicQueryByClass extends BasicQuery { private static final double DOUBLE_TOLERANCE = 1E-12d; private static final Logger LOGGER = LoggerFactory.getLogger(BasicQueryByClass.class); /** A set of constraints, one range per dimension */ public static class ConstraintSet { protected Map, ConstraintData> constraintsPerTypeOfDimensionDefinition; public ConstraintSet() { constraintsPerTypeOfDimensionDefinition = new HashMap<>(); } public ConstraintSet( final Map, ConstraintData> constraintsPerTypeOfDimensionDefinition) { this.constraintsPerTypeOfDimensionDefinition = constraintsPerTypeOfDimensionDefinition; } public ConstraintSet( final Class dimDefinition, final ConstraintData constraintData) { this(); addConstraint(dimDefinition, constraintData); } public ConstraintSet( final ConstraintData constraintData, final Class... dimDefinitions) { this(); for (final Class dimDefinition : dimDefinitions) { addConstraint(dimDefinition, constraintData); } } public void addConstraint( final Class dimDefinition, final ConstraintData constraintData) { final ConstraintData myCd = constraintsPerTypeOfDimensionDefinition.get(dimDefinition); if (myCd != null) { constraintsPerTypeOfDimensionDefinition.put(dimDefinition, myCd.merge(constraintData)); } else { constraintsPerTypeOfDimensionDefinition.put(dimDefinition, constraintData); } } public ConstraintSet merge(final ConstraintSet constraintSet) { final Map, ConstraintData> newSet = new HashMap<>(); for (final Map.Entry, ConstraintData> entry : constraintSet.constraintsPerTypeOfDimensionDefinition.entrySet()) { final ConstraintData data = constraintsPerTypeOfDimensionDefinition.get(entry.getKey()); if (data == null) { newSet.put(entry.getKey(), entry.getValue()); } else { newSet.put(entry.getKey(), data.merge(entry.getValue())); } } for (final Map.Entry, ConstraintData> entry : constraintsPerTypeOfDimensionDefinition.entrySet()) { final ConstraintData data = constraintSet.constraintsPerTypeOfDimensionDefinition.get(entry.getKey()); if (data == null) { newSet.put(entry.getKey(), entry.getValue()); } } return new ConstraintSet(newSet); } public boolean isEmpty() { return constraintsPerTypeOfDimensionDefinition.isEmpty(); } public boolean matches(final ConstraintSet constraints) { if (constraints.isEmpty() != isEmpty()) { return false; } for (final Map.Entry, ConstraintData> entry : constraintsPerTypeOfDimensionDefinition.entrySet()) { final ConstraintData data = constraints.constraintsPerTypeOfDimensionDefinition.get(entry.getKey()); if ((data == null) || !data.matches(entry.getValue())) { return false; } } return true; } /* * Makes the decision to provide a empty data set if an one dimension is left unconstrained. */ public MultiDimensionalNumericData getIndexConstraints( final NumericIndexStrategy indexStrategy) { if (constraintsPerTypeOfDimensionDefinition.isEmpty()) { return new BasicNumericDataset(); } final NumericDimensionDefinition[] dimensionDefinitions = indexStrategy.getOrderedDimensionDefinitions(); final NumericData[] dataPerDimension = new NumericData[dimensionDefinitions.length]; // all or nothing...for now for (int d = 0; d < dimensionDefinitions.length; d++) { final ConstraintData dimConstraint = constraintsPerTypeOfDimensionDefinition.get(dimensionDefinitions[d].getClass()); if (dimConstraint == null) { return new BasicNumericDataset(); } dataPerDimension[d] = dimConstraint.range; } return new BasicNumericDataset(dataPerDimension); } protected QueryFilter createFilter(final Index index, final BasicQuery basicQuery) { final CommonIndexModel indexModel = index.getIndexModel(); final NumericDimensionField[] dimensionFields = indexModel.getDimensions(); NumericDimensionField[] orderedConstrainedDimensionFields = dimensionFields; NumericDimensionField[] unconstrainedDimensionFields; NumericData[] orderedConstraintsPerDimension = new NumericData[dimensionFields.length]; // trim dimension fields to be only what is contained in the // constraints final Set fieldsToTrim = new HashSet<>(); for (int d = 0; d < dimensionFields.length; d++) { final ConstraintData nd = constraintsPerTypeOfDimensionDefinition.get( dimensionFields[d].getBaseDefinition().getClass()); if (nd == null) { fieldsToTrim.add(d); } else { orderedConstraintsPerDimension[d] = constraintsPerTypeOfDimensionDefinition.get( dimensionFields[d].getBaseDefinition().getClass()).range; } } if (!fieldsToTrim.isEmpty()) { final NumericDimensionField[] newDimensionFields = new NumericDimensionField[dimensionFields.length - fieldsToTrim.size()]; unconstrainedDimensionFields = new NumericDimensionField[fieldsToTrim.size()]; final NumericData[] newOrderedConstraintsPerDimension = new NumericData[newDimensionFields.length]; int newDimensionCtr = 0; int constrainedCtr = 0; for (int i = 0; i < dimensionFields.length; i++) { if (!fieldsToTrim.contains(i)) { newDimensionFields[newDimensionCtr] = dimensionFields[i]; newOrderedConstraintsPerDimension[newDimensionCtr++] = orderedConstraintsPerDimension[i]; } else { unconstrainedDimensionFields[constrainedCtr++] = dimensionFields[i]; } } orderedConstrainedDimensionFields = newDimensionFields; orderedConstraintsPerDimension = newOrderedConstraintsPerDimension; } else { unconstrainedDimensionFields = new NumericDimensionField[] {}; } return basicQuery.createQueryFilter( new BasicNumericDataset(orderedConstraintsPerDimension), orderedConstrainedDimensionFields, unconstrainedDimensionFields, index); } public byte[] toBinary() { final List bytes = new ArrayList<>(constraintsPerTypeOfDimensionDefinition.size()); int totalBytes = VarintUtils.unsignedIntByteLength(bytes.size()); for (final Entry, ConstraintData> c : constraintsPerTypeOfDimensionDefinition.entrySet()) { final byte[] className = StringUtils.stringToBinary(c.getKey().getName()); final double min = c.getValue().range.getMin(); final double max = c.getValue().range.getMax(); final int entryLength = className.length + 17 + VarintUtils.unsignedIntByteLength(className.length); final byte isDefault = (byte) (c.getValue().isDefault ? 1 : 0); final ByteBuffer entryBuf = ByteBuffer.allocate(entryLength); VarintUtils.writeUnsignedInt(className.length, entryBuf); entryBuf.put(className); entryBuf.putDouble(min); entryBuf.putDouble(max); entryBuf.put(isDefault); bytes.add(entryBuf.array()); totalBytes += entryLength; } final ByteBuffer buf = ByteBuffer.allocate(totalBytes); VarintUtils.writeUnsignedInt(bytes.size(), buf); for (final byte[] entryBytes : bytes) { buf.put(entryBytes); } return buf.array(); } public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int numEntries = VarintUtils.readUnsignedInt(buf); final Map, ConstraintData> constraintsPerTypeOfDimensionDefinition = new HashMap<>(numEntries); for (int i = 0; i < numEntries; i++) { final int classNameLength = VarintUtils.readUnsignedInt(buf); final byte[] className = ByteArrayUtils.safeRead(buf, classNameLength); final double min = buf.getDouble(); final double max = buf.getDouble(); final boolean isDefault = buf.get() > 0; final String classNameStr = StringUtils.stringFromBinary(className); try { final Class cls = (Class) Class.forName(classNameStr); constraintsPerTypeOfDimensionDefinition.put( cls, new ConstraintData(new NumericRange(min, max), isDefault)); } catch (final ClassNotFoundException e) { // HP Fortify "Improper Output Neutralization" false // positive // What Fortify considers "user input" comes only // from users with OS-level access anyway LOGGER.warn("Cannot find dimension definition class: " + classNameStr, e); } } this.constraintsPerTypeOfDimensionDefinition = constraintsPerTypeOfDimensionDefinition; } } public static class ConstraintData { protected NumericData range; protected boolean isDefault; public ConstraintData(final NumericData range, final boolean isDefault) { super(); this.range = range; this.isDefault = isDefault; } public boolean intersects(final ConstraintData cd) { final double i1 = cd.range.getMin(); final double i2 = cd.range.getMax(); final double j1 = range.getMin(); final double j2 = range.getMax(); return ((i1 < j2) || DoubleMath.fuzzyEquals(i1, j2, DOUBLE_TOLERANCE)) && ((i2 > j1) || DoubleMath.fuzzyEquals(i2, j1, DOUBLE_TOLERANCE)); } public ConstraintData merge(final ConstraintData cd) { if (range.equals(cd.range)) { return new ConstraintData(range, isDefault); } return new ConstraintData( new NumericRange( Math.min(cd.range.getMin(), range.getMin()), Math.max(cd.range.getMax(), range.getMax())), false); // TODO: ideally, this would be set // based on some // logic } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + (isDefault ? 1231 : 1237); result = (prime * result) + ((range == null) ? 0 : range.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final ConstraintData other = (ConstraintData) obj; if (isDefault != other.isDefault) { return false; } if (range == null) { if (other.range != null) { return false; } } else if (!range.equals(other.range)) { return false; } return true; } /** * Ignores 'default' indicator * * @param other * @return {@code true} if these constraints match the other constraints */ public boolean matches(final ConstraintData other) { if (this == other) { return true; } if (range == null) { if (other.range != null) { return false; } } else if (!DoubleMath.fuzzyEquals(range.getMin(), other.range.getMin(), DOUBLE_TOLERANCE) || !DoubleMath.fuzzyEquals(range.getMax(), other.range.getMax(), DOUBLE_TOLERANCE)) { return false; } return true; } } /** A list of Constraint Sets. Each Constraint Set is an individual hyper-cube query. */ public static class ConstraintsByClass implements Constraints { // these basic queries are tied to NumericDimensionDefinition types, not // ideal, but third-parties can and will nned to implement their own // queries if they implement their own dimension definitions protected List constraintsSets = new LinkedList<>(); public ConstraintsByClass() {} public ConstraintsByClass(final ConstraintSet constraintSet) { constraintsSets.add(constraintSet); } public ConstraintsByClass(final List constraintSets) { constraintsSets.addAll(constraintSets); } public ConstraintsByClass merge(final ConstraintsByClass constraints) { return merge(constraints.constraintsSets); } public ConstraintsByClass merge(final List otherConstraintSets) { if (otherConstraintSets.isEmpty()) { return this; } else if (isEmpty()) { return new ConstraintsByClass(otherConstraintSets); } final List newSets = new LinkedList<>(); for (final ConstraintSet newSet : otherConstraintSets) { add(newSets, constraintsSets, newSet); } return new ConstraintsByClass(newSets); } private static void add( final List newSets, final List currentSets, final ConstraintSet newSet) { for (final ConstraintSet cs : currentSets) { newSets.add(cs.merge(newSet)); } } public boolean isEmpty() { return constraintsSets.isEmpty(); } public boolean matches(final ConstraintsByClass constraints) { if (constraints.isEmpty() != isEmpty()) { return false; } for (final ConstraintSet set : constraintsSets) { boolean foundMatch = false; for (final ConstraintSet otherSet : constraints.constraintsSets) { foundMatch |= set.matches(otherSet); } if (!foundMatch) { return false; } } return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((constraintsSets == null) ? 0 : constraintsSets.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final ConstraintsByClass other = (ConstraintsByClass) obj; if (constraintsSets == null) { if (other.constraintsSets != null) { return false; } } else if (!constraintsSets.equals(other.constraintsSets)) { return false; } return true; } /* * (non-Javadoc) * * @see * org.locationtech.geowave.core.store.query.constraints.Constraints#getIndexConstraints(org. * locationtech.geowave.core.index.NumericIndexStrategy) */ @Override public List getIndexConstraints(final Index index) { final NumericIndexStrategy indexStrategy = index.getIndexStrategy(); if (constraintsSets.isEmpty()) { return Collections.emptyList(); } final List setRanges = new ArrayList<>(constraintsSets.size()); for (final ConstraintSet set : constraintsSets) { final MultiDimensionalNumericData mdSet = set.getIndexConstraints(indexStrategy); if (!mdSet.isEmpty()) { setRanges.add(mdSet); } } return setRanges; } @Override public byte[] toBinary() { final List bytes = new ArrayList<>(constraintsSets.size()); int totalBytes = 0; for (final ConstraintSet c : constraintsSets) { bytes.add(c.toBinary()); final int length = bytes.get(bytes.size() - 1).length; totalBytes += (length + VarintUtils.unsignedIntByteLength(length)); } final ByteBuffer buf = ByteBuffer.allocate(totalBytes + VarintUtils.unsignedIntByteLength(bytes.size())); VarintUtils.writeUnsignedInt(bytes.size(), buf); for (final byte[] entryBytes : bytes) { VarintUtils.writeUnsignedInt(entryBytes.length, buf); buf.put(entryBytes); } return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int numEntries = VarintUtils.readUnsignedInt(buf); final List sets = new LinkedList<>(); for (int i = 0; i < numEntries; i++) { final byte[] d = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); final ConstraintSet cs = new ConstraintSet(); cs.fromBinary(d); sets.add(cs); } constraintsSets = sets; } @Override public List createFilters(final Index index, final BasicQuery parentQuery) { final List filters = new ArrayList<>(); for (final ConstraintSet constraint : constraintsSets) { final QueryFilter filter = constraint.createFilter(index, parentQuery); if (filter != null) { filters.add(filter); } } if (!filters.isEmpty()) { return Collections.singletonList( filters.size() == 1 ? filters.get(0) : new FilterList(false, filters)); } return Collections.emptyList(); } } // this is a clientside flag that is unnecessary to persist protected transient boolean exact = true; public BasicQueryByClass() {} public BasicQueryByClass(final ConstraintsByClass constraints) { super(constraints); } public BasicQueryByClass( final ConstraintsByClass constraints, final BasicQueryCompareOperation compareOp) { super(constraints, compareOp); } @Override public byte[] toBinary() { return constraints.toBinary(); } @Override public void fromBinary(final byte[] bytes) { constraints = new ConstraintsByClass(); constraints.fromBinary(bytes); } public boolean isExact() { return exact; } public void setExact(final boolean exact) { this.exact = exact; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/Constraints.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.constraints; import java.util.List; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.query.filter.QueryFilter; public interface Constraints extends Persistable { List getIndexConstraints(Index index); List createFilters(Index index, BasicQuery parentQuery); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/CoordinateRangeQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.constraints; import java.util.Collections; import java.util.List; import org.locationtech.geowave.core.index.MultiDimensionalCoordinateRangesArray; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.query.filter.CoordinateRangeQueryFilter; import org.locationtech.geowave.core.store.query.filter.QueryFilter; public class CoordinateRangeQuery implements QueryConstraints { private NumericIndexStrategy indexStrategy; private MultiDimensionalCoordinateRangesArray[] coordinateRanges; public CoordinateRangeQuery() {} public CoordinateRangeQuery( final NumericIndexStrategy indexStrategy, final MultiDimensionalCoordinateRangesArray[] coordinateRanges) { this.indexStrategy = indexStrategy; this.coordinateRanges = coordinateRanges; } @Override public List createFilters(final Index index) { return Collections.singletonList( new CoordinateRangeQueryFilter(indexStrategy, coordinateRanges)); } @Override public List getIndexConstraints(final Index index) { // TODO should we consider implementing this? return Collections.EMPTY_LIST; } @Override public byte[] toBinary() { return new CoordinateRangeQueryFilter(indexStrategy, coordinateRanges).toBinary(); } @Override public void fromBinary(final byte[] bytes) { final CoordinateRangeQueryFilter filter = new CoordinateRangeQueryFilter(); filter.fromBinary(bytes); indexStrategy = filter.getIndexStrategy(); coordinateRanges = filter.getCoordinateRanges(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/CoordinateRangeUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.constraints; import java.util.HashMap; import java.util.Map; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.Coordinate; import org.locationtech.geowave.core.index.CoordinateRange; import org.locationtech.geowave.core.index.MultiDimensionalCoordinateRanges; import org.locationtech.geowave.core.index.MultiDimensionalCoordinateRangesArray; import org.locationtech.geowave.core.index.MultiDimensionalCoordinates; public class CoordinateRangeUtils { public static interface RangeCache { public boolean inBounds(final MultiDimensionalCoordinates coordinates); } private static interface RangeByBinIdCache { public boolean inBounds(final Coordinate coordinate); } public static class RangeLookupFactory { public static RangeCache createMultiRangeLookup( final MultiDimensionalCoordinateRangesArray[] coordinateRanges) { if ((coordinateRanges == null) || (coordinateRanges.length == 0)) { return new NullRangeLookup(); } else if (coordinateRanges.length == 1) { return createRangeLookup(coordinateRanges[0].getRangesArray()); } else { return new MultiRangeCacheLookup(coordinateRanges); } } public static RangeCache createRangeLookup( final MultiDimensionalCoordinateRanges[] coordinateRanges) { if (coordinateRanges == null) { return new NullRangeLookup(); } else if ((coordinateRanges.length == 1) && (coordinateRanges[0].getMultiDimensionalId() == null)) { return new SingleRangeLookup(coordinateRanges[0]); } else { return new MultiRangeLookup(coordinateRanges); } } } private static class MultiRangeCacheLookup implements RangeCache { private final RangeCache[] rangeCaches; public MultiRangeCacheLookup(final MultiDimensionalCoordinateRangesArray[] coordinateRanges) { rangeCaches = new RangeCache[coordinateRanges.length]; for (int i = 0; i < coordinateRanges.length; i++) { rangeCaches[i] = RangeLookupFactory.createRangeLookup(coordinateRanges[i].getRangesArray()); } } @Override public boolean inBounds(final MultiDimensionalCoordinates coordinates) { // this should act as an OR clause for (final RangeCache r : rangeCaches) { if (r.inBounds(coordinates)) { return true; } } return false; } } private static class NullRangeLookup implements RangeCache { @Override public boolean inBounds(final MultiDimensionalCoordinates coordinates) { return false; } } private static class SingleRangeLookup implements RangeCache { private final MultiDimensionalBinLookup singleton; public SingleRangeLookup(final MultiDimensionalCoordinateRanges coordinateRanges) { singleton = new MultiDimensionalBinLookup(coordinateRanges); } @Override public boolean inBounds(final MultiDimensionalCoordinates coordinates) { return inBounds(coordinates, singleton); } private static boolean inBounds( final MultiDimensionalCoordinates coordinates, final MultiDimensionalBinLookup binLookup) { final CoordinateRange[] retVal = new CoordinateRange[coordinates.getNumDimensions()]; for (int d = 0; d < retVal.length; d++) { final Coordinate c = coordinates.getCoordinate(d); if (!binLookup.inBounds(d, c)) { return false; } } return true; } } private static class MultiRangeLookup implements RangeCache { private final Map multiDimensionalIdToRangeMap; public MultiRangeLookup(final MultiDimensionalCoordinateRanges[] coordinateRanges) { multiDimensionalIdToRangeMap = new HashMap<>(); for (final MultiDimensionalCoordinateRanges r : coordinateRanges) { multiDimensionalIdToRangeMap.put( new ByteArray(r.getMultiDimensionalId()), new MultiDimensionalBinLookup(r)); } } @Override public boolean inBounds(final MultiDimensionalCoordinates coordinates) { final MultiDimensionalBinLookup binLookup = multiDimensionalIdToRangeMap.get(new ByteArray(coordinates.getMultiDimensionalId())); if (binLookup == null) { return false; } return SingleRangeLookup.inBounds(coordinates, binLookup); } } private static class BinLookupFactory { public static RangeByBinIdCache createBinLookup(final CoordinateRange[] coordinateRanges) { if (coordinateRanges == null) { return new NullBinLookup(); } else if ((coordinateRanges.length == 1) && (coordinateRanges[0].getBinId() == null)) { return new SingleBinLookup(coordinateRanges[0]); } else { return new MultiBinLookup(coordinateRanges); } } } private static class MultiDimensionalBinLookup { private final RangeByBinIdCache[] rangePerDimensionCache; private MultiDimensionalBinLookup(final MultiDimensionalCoordinateRanges ranges) { rangePerDimensionCache = new RangeByBinIdCache[ranges.getNumDimensions()]; for (int d = 0; d < rangePerDimensionCache.length; d++) { rangePerDimensionCache[d] = BinLookupFactory.createBinLookup(ranges.getRangeForDimension(d)); } } public boolean inBounds(final int dimension, final Coordinate coordinate) { return rangePerDimensionCache[dimension].inBounds(coordinate); } } private static class NullBinLookup implements RangeByBinIdCache { @Override public boolean inBounds(final Coordinate coordinate) { return false; } } private static class SingleBinLookup implements RangeByBinIdCache { private final CoordinateRange singleton; public SingleBinLookup(final CoordinateRange singleton) { this.singleton = singleton; } @Override public boolean inBounds(final Coordinate coordinate) { return inBounds(singleton, coordinate); } private static boolean inBounds(final CoordinateRange range, final Coordinate coordinate) { final long coord = coordinate.getCoordinate(); return (range.getMinCoordinate() <= coord) && (range.getMaxCoordinate() >= coord); } } private static class MultiBinLookup implements RangeByBinIdCache { private final Map binIdToRangeMap; public MultiBinLookup(final CoordinateRange[] coordinateRanges) { binIdToRangeMap = new HashMap<>(); for (final CoordinateRange r : coordinateRanges) { binIdToRangeMap.put(new ByteArray(r.getBinId()), r); } } @Override public boolean inBounds(final Coordinate coordinate) { final CoordinateRange range = binIdToRangeMap.get(new ByteArray(coordinate.getBinId())); if (range == null) { return false; } return SingleBinLookup.inBounds(range, coordinate); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/CustomQueryConstraints.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.constraints; import java.nio.ByteBuffer; import java.util.Collections; import java.util.List; import org.locationtech.geowave.core.index.CustomIndexStrategy; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.numeric.BasicNumericDataset; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.query.filter.QueryFilter; import com.clearspring.analytics.util.Lists; public class CustomQueryConstraints implements AdapterAndIndexBasedQueryConstraints { private C customConstraints; private List filters; public CustomQueryConstraints() { super(); } public CustomQueryConstraints(final C customConstraints) { this(customConstraints, Lists.newArrayList()); } public CustomQueryConstraints(final C customConstraints, final List filters) { this.customConstraints = customConstraints; this.filters = filters; } public C getCustomConstraints() { return customConstraints; } @Override public byte[] toBinary() { final byte[] constraintBytes = PersistenceUtils.toBinary(customConstraints); final byte[] filterBytes = PersistenceUtils.toBinary(filters); final ByteBuffer buffer = ByteBuffer.allocate( VarintUtils.unsignedIntByteLength(constraintBytes.length) + VarintUtils.unsignedIntByteLength(filterBytes.length) + constraintBytes.length + filterBytes.length); VarintUtils.writeUnsignedInt(constraintBytes.length, buffer); buffer.put(constraintBytes); VarintUtils.writeUnsignedInt(filterBytes.length, buffer); buffer.put(filterBytes); return buffer.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); final byte[] constraintBytes = new byte[VarintUtils.readUnsignedInt(buffer)]; buffer.get(constraintBytes); customConstraints = (C) PersistenceUtils.fromBinary(constraintBytes); final byte[] filterBytes = new byte[VarintUtils.readUnsignedInt(buffer)]; buffer.get(filterBytes); filters = (List) PersistenceUtils.fromBinaryAsList(filterBytes); } @Override public List createFilters(final Index index) { return filters; } @Override public List getIndexConstraints(final Index index) { if (index instanceof CustomIndexStrategy) { if (((CustomIndexStrategy) index).getConstraintsClass().isInstance(customConstraints)) { return Collections.singletonList(new InternalCustomConstraints(customConstraints)); } } return Collections.emptyList(); } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((customConstraints == null) ? 0 : customConstraints.hashCode()); result = (prime * result) + filters.hashCode(); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final CustomQueryConstraints other = (CustomQueryConstraints) obj; if (customConstraints == null) { if (other.customConstraints != null) { return false; } } else if (!customConstraints.equals(other.customConstraints)) { return false; } if (!filters.equals(other.filters)) { return false; } return true; } public static class InternalCustomConstraints extends BasicNumericDataset { private C customConstraints; public InternalCustomConstraints() {} public InternalCustomConstraints(final C customConstraints) { super(); this.customConstraints = customConstraints; } public C getCustomConstraints() { return customConstraints; } @Override public byte[] toBinary() { return PersistenceUtils.toBinary(customConstraints); } @Override public void fromBinary(final byte[] bytes) { customConstraints = (C) PersistenceUtils.fromBinary(bytes); } @Override public int hashCode() { final int prime = 31; int result = super.hashCode(); result = (prime * result) + ((customConstraints == null) ? 0 : customConstraints.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (!super.equals(obj)) { return false; } if (getClass() != obj.getClass()) { return false; } final InternalCustomConstraints other = (InternalCustomConstraints) obj; if (customConstraints == null) { if (other.customConstraints != null) { return false; } } else if (!customConstraints.equals(other.customConstraints)) { return false; } return true; } } @Override public QueryConstraints createQueryConstraints( final InternalDataAdapter adapter, final Index index, final AdapterToIndexMapping indexMapping) { if ((index instanceof CustomIndexStrategy) && (((CustomIndexStrategy) index).getFilter(getCustomConstraints()) != null)) { return new CustomQueryConstraintsWithFilter( getCustomConstraints(), adapter, new AdapterToIndexMapping[] {indexMapping}); } return this; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/CustomQueryConstraintsWithFilter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.constraints; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import org.locationtech.geowave.core.index.CustomIndexStrategy; import org.locationtech.geowave.core.index.CustomIndexStrategy.PersistableBiPredicate; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.adapter.AbstractAdapterPersistenceEncoding; import org.locationtech.geowave.core.store.adapter.IndexedAdapterPersistenceEncoding; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding; import org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset; import org.locationtech.geowave.core.store.data.PersistentDataset; import org.locationtech.geowave.core.store.index.CommonIndexModel; import org.locationtech.geowave.core.store.index.IndexImpl; import org.locationtech.geowave.core.store.query.filter.QueryFilter; import com.google.common.primitives.Bytes; public class CustomQueryConstraintsWithFilter extends CustomQueryConstraints { private InternalDataAdapter adapter; private Map indexMappings; public CustomQueryConstraintsWithFilter() { super(); } public CustomQueryConstraintsWithFilter( final C customConstraints, final InternalDataAdapter adapter, final AdapterToIndexMapping[] indexMappings) { super(customConstraints); this.adapter = adapter; this.indexMappings = Arrays.stream(indexMappings).collect( Collectors.toMap(AdapterToIndexMapping::getIndexName, mapping -> mapping)); } @Override public byte[] toBinary() { final byte[] adapterBinary = PersistenceUtils.toBinary(adapter); final byte[] mappingBinary = PersistenceUtils.toBinary(indexMappings.values()); return Bytes.concat( VarintUtils.writeUnsignedInt(adapterBinary.length), adapterBinary, VarintUtils.writeUnsignedInt(mappingBinary.length), mappingBinary, super.toBinary()); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final byte[] adapterBinary = new byte[VarintUtils.readUnsignedInt(buf)]; buf.get(adapterBinary); adapter = (InternalDataAdapter) PersistenceUtils.fromBinary(adapterBinary); final byte[] mappingBinary = new byte[VarintUtils.readUnsignedInt(buf)]; buf.get(mappingBinary); List mappings = (List) PersistenceUtils.fromBinaryAsList(mappingBinary); indexMappings = mappings.stream().collect( Collectors.toMap(AdapterToIndexMapping::getIndexName, mapping -> mapping)); final byte[] superBinary = new byte[buf.remaining()]; buf.get(superBinary); super.fromBinary(superBinary); } @Override public List createFilters(final Index index) { if (index instanceof CustomIndexStrategy) { if (((CustomIndexStrategy) index).getConstraintsClass().isInstance(getCustomConstraints())) { return Collections.singletonList( new InternalCustomQueryFilter( getCustomConstraints(), adapter, indexMappings.get(index.getName()), ((CustomIndexStrategy) index).getFilter(getCustomConstraints()))); } } return Collections.emptyList(); } public static class InternalCustomQueryFilter implements QueryFilter { private C customConstraints; private InternalDataAdapter adapter; private AdapterToIndexMapping indexMapping; private PersistableBiPredicate predicate; public InternalCustomQueryFilter() {} public InternalCustomQueryFilter( final C customConstraints, final InternalDataAdapter adapter, final AdapterToIndexMapping indexMapping, final PersistableBiPredicate predicate) { super(); this.customConstraints = customConstraints; this.adapter = adapter; this.indexMapping = indexMapping; this.predicate = predicate; } public C getCustomConstraints() { return customConstraints; } @Override public byte[] toBinary() { final byte[] adapterBytes = PersistenceUtils.toBinary(adapter); final byte[] mappingBytes = PersistenceUtils.toBinary(indexMapping); final byte[] predicateBytes = PersistenceUtils.toBinary(predicate); return Bytes.concat( VarintUtils.writeUnsignedInt(adapterBytes.length), adapterBytes, VarintUtils.writeUnsignedInt(mappingBytes.length), mappingBytes, VarintUtils.writeUnsignedInt(predicateBytes.length), predicateBytes, PersistenceUtils.toBinary(customConstraints)); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final byte[] adapterBytes = new byte[VarintUtils.readUnsignedInt(buf)]; buf.get(adapterBytes); adapter = (InternalDataAdapter) PersistenceUtils.fromBinary(adapterBytes); final byte[] mappingBytes = new byte[VarintUtils.readUnsignedInt(buf)]; buf.get(mappingBytes); indexMapping = (AdapterToIndexMapping) PersistenceUtils.fromBinary(mappingBytes); final byte[] predicateBytes = new byte[VarintUtils.readUnsignedInt(buf)]; buf.get(predicateBytes); predicate = (PersistableBiPredicate) PersistenceUtils.fromBinary(predicateBytes); final byte[] constraintsBytes = new byte[buf.remaining()]; buf.get(constraintsBytes); customConstraints = (C) PersistenceUtils.fromBinary(constraintsBytes); } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((adapter == null) ? 0 : adapter.hashCode()); result = (prime * result) + ((customConstraints == null) ? 0 : customConstraints.hashCode()); result = (prime * result) + ((predicate == null) ? 0 : predicate.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final InternalCustomQueryFilter other = (InternalCustomQueryFilter) obj; if (adapter == null) { if (other.adapter != null) { return false; } } else if (!adapter.equals(other.adapter)) { return false; } if (customConstraints == null) { if (other.customConstraints != null) { return false; } } else if (!customConstraints.equals(other.customConstraints)) { return false; } if (predicate == null) { if (other.predicate != null) { return false; } } else if (!predicate.equals(other.predicate)) { return false; } return true; } @Override public boolean accept( final CommonIndexModel indexModel, final IndexedPersistenceEncoding persistenceEncoding) { if ((predicate != null) && (indexModel != null) && (adapter != null)) { final PersistentDataset adapterExtendedValues = new MultiFieldPersistentDataset<>(); if (persistenceEncoding instanceof AbstractAdapterPersistenceEncoding) { ((AbstractAdapterPersistenceEncoding) persistenceEncoding).convertUnknownValues( adapter, indexModel); final PersistentDataset existingExtValues = ((AbstractAdapterPersistenceEncoding) persistenceEncoding).getAdapterExtendedData(); if (persistenceEncoding.isAsync()) { return false; } if (existingExtValues != null) { adapterExtendedValues.addValues(existingExtValues.getValues()); } } final IndexedAdapterPersistenceEncoding encoding = new IndexedAdapterPersistenceEncoding( persistenceEncoding.getInternalAdapterId(), persistenceEncoding.getDataId(), persistenceEncoding.getInsertionPartitionKey(), persistenceEncoding.getInsertionSortKey(), persistenceEncoding.getDuplicateCount(), (PersistentDataset) persistenceEncoding.getCommonData(), new MultiFieldPersistentDataset(), adapterExtendedValues); final T entry = adapter.decode( encoding, indexMapping, new IndexImpl( null, // we have to assume this adapter doesn't use the numeric index strategy // and only the common index model to decode the entry, // we pass along a null strategy to eliminate the necessity to send a // serialization of the strategy in the options of this iterator indexModel)); if (entry == null) { return false; } return predicate.test(entry, customConstraints); } return false; } } @Override public QueryConstraints createQueryConstraints( final InternalDataAdapter adapter, final Index index, final AdapterToIndexMapping indexMapping) { return this; } @Override public int hashCode() { final int prime = 31; int result = super.hashCode(); result = (prime * result) + ((adapter == null) ? 0 : adapter.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (!super.equals(obj)) { return false; } if (getClass() != obj.getClass()) { return false; } final CustomQueryConstraintsWithFilter other = (CustomQueryConstraintsWithFilter) obj; if (adapter == null) { if (other.adapter != null) { return false; } } else if (!adapter.equals(other.adapter)) { return false; } return true; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/DataIdQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.constraints; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.query.filter.DataIdQueryFilter; import org.locationtech.geowave.core.store.query.filter.QueryFilter; public class DataIdQuery implements QueryConstraints { private byte[][] dataIds; public DataIdQuery() {} public DataIdQuery(final byte[] dataId) { dataIds = new byte[][] {dataId}; } public DataIdQuery(final byte[][] dataIds) { this.dataIds = dataIds; } public byte[][] getDataIds() { return dataIds; } @Override public List createFilters(final Index index) { final List filters = new ArrayList<>(); filters.add(new DataIdQueryFilter(dataIds)); return filters; } @Override public List getIndexConstraints(final Index index) { return Collections.emptyList(); } @Override public byte[] toBinary() { final int length = Arrays.stream(dataIds).map( i -> i.length + VarintUtils.unsignedIntByteLength(i.length)).reduce(0, Integer::sum); final ByteBuffer buf = ByteBuffer.allocate(length + VarintUtils.unsignedIntByteLength(dataIds.length)); VarintUtils.writeUnsignedInt(dataIds.length, buf); Arrays.stream(dataIds).forEach(i -> { VarintUtils.writeUnsignedInt(i.length, buf); buf.put(i); }); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int length = VarintUtils.readUnsignedInt(buf); ByteArrayUtils.verifyBufferSize(buf, length); final byte[][] dataIds = new byte[length][]; for (int i = 0; i < length; i++) { final int iLength = VarintUtils.readUnsignedInt(buf); dataIds[i] = ByteArrayUtils.safeRead(buf, iLength);; } this.dataIds = dataIds; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/DataIdRangeQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.constraints; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.query.filter.DataIdRangeQueryFilter; import org.locationtech.geowave.core.store.query.filter.QueryFilter; public class DataIdRangeQuery implements QueryConstraints { private byte[] startDataIdInclusive; private byte[] endDataIdInclusive; private boolean reverse; public DataIdRangeQuery() {} public DataIdRangeQuery(final byte[] startDataIdInclusive, final byte[] endDataIdInclusive) { this(startDataIdInclusive, endDataIdInclusive, false); } public DataIdRangeQuery( final byte[] startDataIdInclusive, final byte[] endDataIdInclusive, final boolean reverse) { this.startDataIdInclusive = startDataIdInclusive; this.endDataIdInclusive = endDataIdInclusive; this.reverse = reverse; } public byte[] getStartDataIdInclusive() { return startDataIdInclusive; } public byte[] getEndDataIdInclusive() { return endDataIdInclusive; } public boolean isReverse() { return reverse; } @Override public List createFilters(final Index index) { final List filters = new ArrayList<>(); filters.add(new DataIdRangeQueryFilter(startDataIdInclusive, endDataIdInclusive)); return filters; } @Override public List getIndexConstraints(final Index index) { return Collections.emptyList(); } @Override public byte[] toBinary() { return new DataIdRangeQueryFilter(startDataIdInclusive, endDataIdInclusive).toBinary(); } @Override public void fromBinary(final byte[] bytes) { final DataIdRangeQueryFilter filter = new DataIdRangeQueryFilter(); filter.fromBinary(bytes); startDataIdInclusive = filter.getStartDataIdInclusive(); endDataIdInclusive = filter.getEndDataIdInclusive(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/EverythingQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.constraints; import java.util.Collections; import java.util.List; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.query.filter.QueryFilter; public class EverythingQuery implements QueryConstraints { public EverythingQuery() {} @Override public List createFilters(final Index index) { return Collections.emptyList(); } @Override public List getIndexConstraints(final Index index) { return Collections.emptyList(); } @Override public byte[] toBinary() { return new byte[0]; } @Override public void fromBinary(final byte[] bytes) {} @Override public int hashCode() { return getClass().hashCode(); } @Override public boolean equals(final Object obj) { if (obj == null) { return false; } return getClass() == obj.getClass(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/ExplicitFilteredQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.constraints; import java.nio.ByteBuffer; import java.util.List; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.query.filter.QueryFilter; /** * Allows the caller to provide explicit numeric constraints and filters for a query. */ public class ExplicitFilteredQuery implements QueryConstraints { private List filters; private List constraints; public ExplicitFilteredQuery() {} public ExplicitFilteredQuery( final List constraints, final List filters) { this.constraints = constraints; this.filters = filters; } @Override public byte[] toBinary() { final byte[] filterBytes = PersistenceUtils.toBinary(filters); final byte[] constraintBytes = PersistenceUtils.toBinary(constraints); final ByteBuffer buffer = ByteBuffer.allocate( VarintUtils.unsignedIntByteLength(filterBytes.length) + VarintUtils.unsignedIntByteLength(constraintBytes.length) + filterBytes.length + constraintBytes.length); VarintUtils.writeUnsignedInt(filterBytes.length, buffer); buffer.put(filterBytes); VarintUtils.writeUnsignedInt(constraintBytes.length, buffer); buffer.put(constraintBytes); return buffer.array(); } @SuppressWarnings({"unchecked", "rawtypes"}) @Override public void fromBinary(byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); final byte[] filterBytes = new byte[VarintUtils.readUnsignedInt(buffer)]; buffer.get(filterBytes); final byte[] constraintBytes = new byte[VarintUtils.readUnsignedInt(buffer)]; buffer.get(constraintBytes); filters = (List) PersistenceUtils.fromBinaryAsList(filterBytes); constraints = (List) PersistenceUtils.fromBinaryAsList(constraintBytes); } @Override public List createFilters(Index index) { return filters; } @Override public List getIndexConstraints(Index index) { return constraints; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/FilteredEverythingQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.constraints; import java.util.Collections; import java.util.List; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.query.filter.QueryFilter; /** * Fully cans the index, but passes every entry through the given filters. */ public class FilteredEverythingQuery implements QueryConstraints { private List filters; public FilteredEverythingQuery() {} public FilteredEverythingQuery(final List filters) { this.filters = filters; } @Override public List createFilters(final Index index) { return filters; } @Override public List getIndexConstraints(final Index index) { return Collections.emptyList(); } @Override public byte[] toBinary() { return PersistenceUtils.toBinary(filters); } @SuppressWarnings({"unchecked", "rawtypes"}) @Override public void fromBinary(final byte[] bytes) { filters = (List) PersistenceUtils.fromBinaryAsList(bytes); } @Override public int hashCode() { return filters.hashCode(); } @Override public boolean equals(final Object obj) { if (obj == null) { return false; } if (!(obj instanceof FilteredEverythingQuery)) { return false; } return filters.equals(((FilteredEverythingQuery) obj).filters); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/InsertionIdQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.constraints; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.query.filter.InsertionIdQueryFilter; import org.locationtech.geowave.core.store.query.filter.QueryFilter; public class InsertionIdQuery implements QueryConstraints { private byte[] partitionKey; private byte[] sortKey; private byte[] dataId; public InsertionIdQuery() {} public InsertionIdQuery(final byte[] partitionKey, final byte[] sortKey, final byte[] dataId) { this.partitionKey = partitionKey == null ? new byte[0] : partitionKey; this.sortKey = sortKey == null ? new byte[0] : sortKey; this.dataId = dataId == null ? new byte[0] : dataId; } public byte[] getPartitionKey() { return partitionKey; } public byte[] getSortKey() { return sortKey; } public byte[] getDataId() { return dataId; } @Override public List createFilters(final Index index) { final List filters = new ArrayList<>(); filters.add(new InsertionIdQueryFilter(partitionKey, sortKey, dataId)); return filters; } @Override public List getIndexConstraints(final Index index) { return Collections.emptyList(); } @Override public byte[] toBinary() { byte[] sortKeyBinary, partitionKeyBinary, dataIdBinary; if (partitionKey != null) { partitionKeyBinary = partitionKey; } else { partitionKeyBinary = new byte[0]; } if (sortKey != null) { sortKeyBinary = sortKey; } else { sortKeyBinary = new byte[0]; } if (dataId != null) { dataIdBinary = dataId; } else { dataIdBinary = new byte[0]; } final ByteBuffer buf = ByteBuffer.allocate( VarintUtils.unsignedIntByteLength(partitionKeyBinary.length) + VarintUtils.unsignedIntByteLength(sortKeyBinary.length) + sortKeyBinary.length + partitionKeyBinary.length); VarintUtils.writeUnsignedInt(partitionKeyBinary.length, buf); buf.put(partitionKeyBinary); VarintUtils.writeUnsignedInt(sortKeyBinary.length, buf); buf.put(sortKeyBinary); buf.put(dataIdBinary); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int partitionKeyBinaryLength = VarintUtils.readUnsignedInt(buf); if (partitionKeyBinaryLength == 0) { partitionKey = null; } else { partitionKey = ByteArrayUtils.safeRead(buf, partitionKeyBinaryLength); } final int sortKeyBinaryLength = VarintUtils.readUnsignedInt(buf); if (sortKeyBinaryLength == 0) { sortKey = null; } else { sortKey = ByteArrayUtils.safeRead(buf, sortKeyBinaryLength); } final byte[] dataIdBinary = new byte[buf.remaining()]; if (dataIdBinary.length == 0) { dataId = null; } else { buf.get(dataIdBinary); dataId = dataIdBinary; } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/OptimalExpressionQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.constraints; import java.nio.ByteBuffer; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.stream.Collectors; import org.apache.commons.lang3.tuple.Pair; import org.locationtech.geowave.core.index.QueryRanges; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.index.text.ExplicitTextSearch; import org.locationtech.geowave.core.index.text.TextIndexStrategy; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.api.AttributeIndex; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.IndexFieldMapper; import org.locationtech.geowave.core.store.api.IndexStatistic; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.base.BaseQueryOptions; import org.locationtech.geowave.core.store.index.CustomIndex; import org.locationtech.geowave.core.store.index.IndexFilter; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.index.TextAttributeIndexProvider.AdapterFieldTextIndexEntryConverter; import org.locationtech.geowave.core.store.query.filter.ExpressionQueryFilter; import org.locationtech.geowave.core.store.query.filter.QueryFilter; import org.locationtech.geowave.core.store.query.filter.expression.Filter; import org.locationtech.geowave.core.store.query.filter.expression.FilterConstraints; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.locationtech.geowave.core.store.statistics.StatisticId; import org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic; import org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic.RowRangeHistogramValue; import org.locationtech.geowave.core.store.util.DataStoreUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.internal.Sets; import com.google.common.collect.Lists; import com.google.common.collect.Maps; /** * Determines the best index and provides constraints based on a given GeoWave filter. */ public class OptimalExpressionQuery implements AdapterAndIndexBasedQueryConstraints, QueryConstraints { private static final Logger LOGGER = LoggerFactory.getLogger(OptimalExpressionQuery.class); private Filter filter; private IndexFilter indexFilter; public OptimalExpressionQuery() {} public OptimalExpressionQuery(final Filter filter) { this(filter, null); } public OptimalExpressionQuery(final Filter filter, final IndexFilter indexFilter) { this.filter = filter; this.indexFilter = indexFilter; } private final Map> constraintCache = Maps.newHashMap(); @SuppressWarnings({"rawtypes", "unchecked"}) public List>>> determineBestIndices( final BaseQueryOptions baseOptions, final InternalDataAdapter[] adapters, final AdapterIndexMappingStore adapterIndexMappingStore, final IndexStore indexStore, final DataStatisticsStore statisticsStore) { final Map>> bestIndices = Maps.newHashMap(); final Set referencedFields = Sets.newHashSet(); filter.addReferencedFields(referencedFields); for (final InternalDataAdapter adapter : adapters) { if (!adapterMatchesFilter(adapter, referencedFields)) { continue; } final AdapterToIndexMapping[] adapterIndices = adapterIndexMappingStore.getIndicesForAdapter(adapter.getAdapterId()); final Map> indexConstraints = Maps.newHashMap(); Index bestIndex = null; for (final AdapterToIndexMapping mapping : adapterIndices) { if ((baseOptions.getIndexName() != null) && !baseOptions.getIndexName().equals(mapping.getIndexName())) { continue; } final Index index = mapping.getIndex(indexStore); if (indexFilter != null && !indexFilter.test(index)) { continue; } if ((bestIndex == null) || ((bestIndex instanceof AttributeIndex) && !(index instanceof AttributeIndex))) { bestIndex = index; } final Set indexedFields = Sets.newHashSet(); final Class filterClass; if ((index instanceof CustomIndex) && (((CustomIndex) index).getCustomIndexStrategy() instanceof TextIndexStrategy)) { final TextIndexStrategy indexStrategy = (TextIndexStrategy) ((CustomIndex) index).getCustomIndexStrategy(); if (!(indexStrategy.getEntryConverter() instanceof AdapterFieldTextIndexEntryConverter)) { continue; } indexedFields.add( ((AdapterFieldTextIndexEntryConverter) indexStrategy.getEntryConverter()).getFieldName()); filterClass = String.class; } else { for (final IndexFieldMapper mapper : mapping.getIndexFieldMappers()) { for (final String adapterField : mapper.getAdapterFields()) { indexedFields.add(adapterField); } } // Remove any fields that are part of the common index model, but not used in the index // strategy. They shouldn't be considered when trying to find a best match. In the future // it may be useful to consider an index that has extra common index dimensions that // contain filtered fields over one that only matches indexed dimensions. For example, if // I have a spatial index, and a spatial index that stores time, it should pick the one // that stores time if I supply a temporal constraint, even though it isn't part of the // index strategy. final int modelDimensions = index.getIndexModel().getDimensions().length; final int strategyDimensions = index.getIndexStrategy().getOrderedDimensionDefinitions().length; for (int i = modelDimensions - 1; i >= strategyDimensions; i--) { final IndexFieldMapper mapper = mapping.getMapperForIndexField( index.getIndexModel().getDimensions()[i].getFieldName()); for (final String adapterField : mapper.getAdapterFields()) { indexedFields.remove(adapterField); } } filterClass = Double.class; } if (referencedFields.containsAll(indexedFields)) { final FilterConstraints constraints = filter.getConstraints( filterClass, statisticsStore, adapter, mapping, index, indexedFields); if (constraints.constrainsAllFields(indexedFields)) { indexConstraints.put(index, constraints); } } } if (indexConstraints.size() == 1) { final Entry> bestEntry = indexConstraints.entrySet().iterator().next(); bestIndex = bestEntry.getKey(); constraintCache.put(adapter.getTypeName(), bestEntry.getValue()); } else if (indexConstraints.size() > 1) { // determine which constraint is the best double bestCardinality = Double.MAX_VALUE; Index bestConstrainedIndex = null; for (final Entry> entry : indexConstraints.entrySet()) { final QueryRanges ranges = entry.getValue().getQueryRanges(baseOptions, statisticsStore); if (ranges.isEmpty()) { continue; } // TODO: A future optimization would be to add a default numeric histogram for any numeric // index dimensions and just use the index data ranges to determine cardinality rather // than decomposing query ranges. final StatisticId statisticId = IndexStatistic.generateStatisticId( entry.getKey().getName(), RowRangeHistogramStatistic.STATS_TYPE, Statistic.INTERNAL_TAG); final RowRangeHistogramStatistic histogram = (RowRangeHistogramStatistic) statisticsStore.getStatisticById(statisticId); final double cardinality = DataStoreUtils.cardinality( statisticsStore, histogram, adapter, bestConstrainedIndex, ranges); if ((bestConstrainedIndex == null) || (cardinality < bestCardinality)) { bestConstrainedIndex = entry.getKey(); bestCardinality = cardinality; } } if (bestConstrainedIndex != null) { bestIndex = bestConstrainedIndex; constraintCache.put(adapter.getTypeName(), indexConstraints.get(bestIndex)); } } if (bestIndex == null) { continue; } if (!bestIndices.containsKey(bestIndex)) { bestIndices.put(bestIndex, Lists.newArrayList()); } bestIndices.get(bestIndex).add(adapter); } return bestIndices.entrySet().stream().map(e -> Pair.of(e.getKey(), e.getValue())).collect( Collectors.toList()); } private boolean adapterMatchesFilter( final DataTypeAdapter adapter, final Set filteredFields) { for (final String field : filteredFields) { if (adapter.getFieldDescriptor(field) == null) { return false; } } return true; } @SuppressWarnings({"unchecked", "rawtypes"}) @Override public QueryConstraints createQueryConstraints( final InternalDataAdapter adapter, final Index index, final AdapterToIndexMapping indexMapping) { if (!constraintCache.containsKey(adapter.getTypeName())) { filter.prepare(adapter, indexMapping, index); return new FilteredEverythingQuery( Lists.newArrayList(new ExpressionQueryFilter<>(filter, adapter, indexMapping))); } final Filter reduced = filter.removePredicatesForFields( constraintCache.get(adapter.getTypeName()).getExactConstrainedFields()); final List filterList; if (reduced != null) { reduced.prepare(adapter, indexMapping, index); filterList = Lists.newArrayList(new ExpressionQueryFilter<>(reduced, adapter, indexMapping)); } else { filterList = Lists.newArrayList(); } if (index instanceof CustomIndex) { return new CustomQueryConstraints( new ExplicitTextSearch((List) constraintCache.get(adapter.getTypeName()).getIndexData()), filterList); } return new ExplicitFilteredQuery( (List) constraintCache.get(adapter.getTypeName()).getIndexData(), filterList); } @Override public byte[] toBinary() { byte[] filterBytes; if (filter == null) { LOGGER.warn("Filter is null"); filterBytes = new byte[] {}; } else { filterBytes = PersistenceUtils.toBinary(filter); } byte[] indexFilterBytes; if (indexFilter == null) { indexFilterBytes = new byte[] {}; } else { indexFilterBytes = PersistenceUtils.toBinary(indexFilter); } final ByteBuffer buffer = ByteBuffer.allocate( VarintUtils.unsignedIntByteLength(filterBytes.length) + filterBytes.length + indexFilterBytes.length); VarintUtils.writeUnsignedInt(filterBytes.length, buffer); buffer.put(filterBytes); buffer.put(indexFilterBytes); return buffer.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); byte[] filterBytes = new byte[VarintUtils.readUnsignedInt(buffer)]; buffer.get(filterBytes); if (filterBytes.length > 0) { filter = (Filter) PersistenceUtils.fromBinary(filterBytes); } else { LOGGER.warn("CQL filter is empty bytes"); filter = null; } if (buffer.hasRemaining()) { final byte[] indexFilterBytes = new byte[buffer.remaining()]; buffer.get(indexFilterBytes); indexFilter = (IndexFilter) PersistenceUtils.fromBinary(indexFilterBytes); } else { indexFilter = null; } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/PrefixIdQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.constraints; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.query.filter.PrefixIdQueryFilter; import org.locationtech.geowave.core.store.query.filter.QueryFilter; public class PrefixIdQuery implements QueryConstraints { private byte[] sortKeyPrefix; private byte[] partitionKey; public PrefixIdQuery() {} public PrefixIdQuery(final byte[] partitionKey, final byte[] sortKeyPrefix) { this.partitionKey = partitionKey; this.sortKeyPrefix = sortKeyPrefix; } public byte[] getPartitionKey() { return partitionKey; } public byte[] getSortKeyPrefix() { return sortKeyPrefix; } @Override public List createFilters(final Index index) { final List filters = new ArrayList<>(); filters.add(new PrefixIdQueryFilter(partitionKey, sortKeyPrefix)); return filters; } @Override public List getIndexConstraints(final Index index) { return Collections.emptyList(); } @Override public byte[] toBinary() { byte[] sortKeyPrefixBinary, partitionKeyBinary; if (partitionKey != null) { partitionKeyBinary = partitionKey; } else { partitionKeyBinary = new byte[0]; } if (sortKeyPrefix != null) { sortKeyPrefixBinary = sortKeyPrefix; } else { sortKeyPrefixBinary = new byte[0]; } final ByteBuffer buf = ByteBuffer.allocate( VarintUtils.unsignedIntByteLength(partitionKeyBinary.length) + sortKeyPrefixBinary.length + partitionKeyBinary.length); VarintUtils.writeUnsignedInt(partitionKeyBinary.length, buf); buf.put(partitionKeyBinary); buf.put(sortKeyPrefixBinary); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int partitionKeyBinaryLength = VarintUtils.readUnsignedInt(buf); if (partitionKeyBinaryLength == 0) { partitionKey = null; } else { partitionKey = ByteArrayUtils.safeRead(buf, partitionKeyBinaryLength); } final byte[] sortKeyPrefixBinary = new byte[buf.remaining()]; if (sortKeyPrefixBinary.length == 0) { sortKeyPrefix = null; } else { buf.get(sortKeyPrefixBinary); sortKeyPrefix = sortKeyPrefixBinary; } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/QueryConstraints.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.constraints; import java.util.List; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.query.filter.QueryFilter; /** This interface fully describes a query */ public interface QueryConstraints extends Persistable { /** * This is a list of filters (either client filters or distributed filters) which will be applied * to the result set. QueryFilters of type DistributableQueryFilter will automatically be * distributed across nodes, although the class must be on the classpath of each node. * Fine-grained filtering and secondary filtering should be applied here as the primary index will * only enable coarse-grained filtering. * * @param index the index to create filters for * @return A list of the query filters */ public List createFilters(Index index); /** * Return a set of constraints to apply to the primary index based on the indexing strategy used. * The ordering of dimensions within the index stategy must match the order of dimensions in the * numeric data returned which will represent the constraints applied to the primary index for the * query. * * @param index The index used to generate the constraints for * @return A multi-dimensional numeric data set that represents the constraints for the index */ public List getIndexConstraints(Index index); /** * To simplify query constraints, this allows ofr the index to be tightly coupled with the * constraints if true. * * @return A flag indicating that this query is specific to an index that must also be provided */ default boolean indexMustBeSpecified() { return false; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/QueryConstraintsFactoryImpl.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.constraints; import org.locationtech.geowave.core.index.MultiDimensionalCoordinateRangesArray; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.api.QueryConstraintsFactory; import org.locationtech.geowave.core.store.query.constraints.BasicOrderedConstraintQuery.OrderedConstraints; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintsByClass; import org.locationtech.geowave.core.store.query.filter.BasicQueryFilter.BasicQueryCompareOperation; public class QueryConstraintsFactoryImpl implements QueryConstraintsFactory { public static final QueryConstraintsFactoryImpl SINGLETON_INSTANCE = new QueryConstraintsFactoryImpl(); @Override public QueryConstraints dataIds(final byte[]... dataIds) { return new DataIdQuery(dataIds); } @Override public QueryConstraints prefix(final byte[] partitionKey, final byte[] sortKeyPrefix) { return new PrefixIdQuery(partitionKey, sortKeyPrefix); } @Override public QueryConstraints coordinateRanges( final NumericIndexStrategy indexStrategy, final MultiDimensionalCoordinateRangesArray[] coordinateRanges) { return new CoordinateRangeQuery(indexStrategy, coordinateRanges); } @Override public QueryConstraints customConstraints(final Persistable customConstraints) { return new CustomQueryConstraints<>(customConstraints); } @Override public QueryConstraints constraints(final Constraints constraints) { if (constraints instanceof ConstraintsByClass) { // slightly optimized wrapper for ConstraintsByClass return new BasicQueryByClass((ConstraintsByClass) constraints); } else if (constraints instanceof OrderedConstraints) { // slightly optimized wrapper for OrderedConstraints return new BasicOrderedConstraintQuery((OrderedConstraints) constraints); } return new BasicQuery(constraints); } @Override public QueryConstraints constraints( final Constraints constraints, final BasicQueryCompareOperation compareOp) { if (constraints instanceof ConstraintsByClass) { // slightly optimized wrapper for ConstraintsByClass return new BasicQueryByClass((ConstraintsByClass) constraints, compareOp); } else if (constraints instanceof OrderedConstraints) { // slightly optimized wrapper for OrderedConstraints return new BasicOrderedConstraintQuery((OrderedConstraints) constraints, compareOp); } return new BasicQuery(constraints, compareOp); } @Override public QueryConstraints noConstraints() { return new EverythingQuery(); } @Override public QueryConstraints dataIdsByRange( final byte[] startDataIdInclusive, final byte[] endDataIdInclusive) { return new DataIdRangeQuery(startDataIdInclusive, endDataIdInclusive); } @Override public QueryConstraints dataIdsByRangeReverse( final byte[] startDataIdInclusive, final byte[] endDataIdInclusive) { return new DataIdRangeQuery(startDataIdInclusive, endDataIdInclusive, true); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/SimpleNumericQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.constraints; import org.apache.commons.lang3.Range; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.dimension.NumericDimensionField; import org.locationtech.geowave.core.store.query.filter.QueryFilter; public class SimpleNumericQuery extends BasicOrderedConstraintQuery { public SimpleNumericQuery(final Range range) { super(new OrderedConstraints(range)); } public SimpleNumericQuery() { super(); } @Override protected QueryFilter createQueryFilter( final MultiDimensionalNumericData constraints, final NumericDimensionField[] orderedConstrainedDimensionFields, final NumericDimensionField[] unconstrainedDimensionFields, final Index index) { // this will ignore fine grained filters and just use the row ID in the // index, we don't need fine-grained filtering for simple numeric queries return null; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/constraints/TypeConstraintQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.constraints; public interface TypeConstraintQuery { public String getTypeName(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/AdapterIdQueryFilter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding; import org.locationtech.geowave.core.store.index.CommonIndexModel; public class AdapterIdQueryFilter implements QueryFilter { private Short adapterId; public AdapterIdQueryFilter() {} public AdapterIdQueryFilter(final short adapterId) { this.adapterId = adapterId; } @Override public boolean accept( final CommonIndexModel indexModel, final IndexedPersistenceEncoding persistenceEncoding) { return (adapterId == null) || adapterId.equals(persistenceEncoding.getInternalAdapterId()); } @Override public byte[] toBinary() { if (adapterId == null) { return ByteArrayUtils.shortToByteArray((short) 0); } return ByteArrayUtils.shortToByteArray(adapterId); } @Override public void fromBinary(final byte[] bytes) { if (ByteArrayUtils.byteArrayToShort(bytes) == 0) { adapterId = null; } else { adapterId = ByteArrayUtils.byteArrayToShort(bytes); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/BasicQueryFilter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.FloatCompareUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.numeric.BasicNumericDataset; import org.locationtech.geowave.core.index.numeric.BinnedNumericDataset; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding; import org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding; import org.locationtech.geowave.core.store.dimension.NumericDimensionField; import org.locationtech.geowave.core.store.index.CommonIndexModel; /** * This filter can perform fine-grained acceptance testing on generic dimensions, but is limited to * only using MBR (min-max in a single dimension, hyper-cubes in multi-dimensional space) */ public class BasicQueryFilter implements QueryFilter { protected interface BasicQueryCompareOp { public boolean compare(double dataMin, double dataMax, double queryMin, double queryMax); } public enum BasicQueryCompareOperation implements BasicQueryCompareOp { CONTAINS { @Override public boolean compare( final double dataMin, final double dataMax, final double queryMin, final double queryMax) { // checking if data range contains query range return !((dataMin < queryMin) || (dataMax > queryMax)); } }, OVERLAPS { @Override public boolean compare( final double dataMin, final double dataMax, final double queryMin, final double queryMax) { // per definition, it shouldn't allow only boundary points to // overlap (stricter than intersect, see DE-9IM definitions) return !((dataMax <= queryMin) || (dataMin >= queryMax)) && !EQUALS.compare(dataMin, dataMax, queryMin, queryMax) && !CONTAINS.compare(dataMin, dataMax, queryMin, queryMax) && !WITHIN.compare(dataMin, dataMax, queryMin, queryMax); } }, INTERSECTS { @Override public boolean compare( final double dataMin, final double dataMax, final double queryMin, final double queryMax) { // similar to overlap but a bit relaxed (allows boundary points // to touch) // this is equivalent to !((dataMax < queryMin) || (dataMin > // queryMax)); return !DISJOINT.compare(dataMin, dataMax, queryMin, queryMax); } }, TOUCHES { @Override public boolean compare( final double dataMin, final double dataMax, final double queryMin, final double queryMax) { return (FloatCompareUtils.checkDoublesEqual(dataMin, queryMax)) || (FloatCompareUtils.checkDoublesEqual(dataMax, queryMin)); } }, WITHIN { @Override public boolean compare( final double dataMin, final double dataMax, final double queryMin, final double queryMax) { // checking if query range is within the data range // this is equivalent to (queryMin >= dataMin) && (queryMax <= // dataMax); return CONTAINS.compare(queryMin, queryMax, dataMin, dataMax); } }, DISJOINT { @Override public boolean compare( final double dataMin, final double dataMax, final double queryMin, final double queryMax) { return ((dataMax < queryMin) || (dataMin > queryMax)); } }, CROSSES { @Override public boolean compare( final double dataMin, final double dataMax, final double queryMin, final double queryMax) { // accordingly to the def. intersection point must be interior // to both source geometries. // this is not possible in 1D data so always returns false return false; } }, EQUALS { @Override public boolean compare( final double dataMin, final double dataMax, final double queryMin, final double queryMax) { return (FloatCompareUtils.checkDoublesEqual(dataMin, queryMin)) && (FloatCompareUtils.checkDoublesEqual(dataMax, queryMax)); } } }; protected Map> binnedConstraints; protected NumericDimensionField[] dimensionFields; // this is referenced for serialization purposes only protected MultiDimensionalNumericData constraints; protected BasicQueryCompareOperation compareOp = BasicQueryCompareOperation.INTERSECTS; public BasicQueryFilter() {} public BasicQueryFilter( final MultiDimensionalNumericData constraints, final NumericDimensionField[] dimensionFields) { init(constraints, dimensionFields); } public BasicQueryFilter( final MultiDimensionalNumericData constraints, final NumericDimensionField[] dimensionFields, final BasicQueryCompareOperation compareOp) { init(constraints, dimensionFields); this.compareOp = compareOp; } private void init( final MultiDimensionalNumericData constraints, final NumericDimensionField[] dimensionFields) { this.dimensionFields = dimensionFields; binnedConstraints = new HashMap<>(); this.constraints = constraints; final List queries = BinnedNumericDataset.applyBins(constraints, dimensionFields); for (final BinnedNumericDataset q : queries) { final ByteArray binId = new ByteArray(q.getBinId()); List ranges = binnedConstraints.get(binId); if (ranges == null) { ranges = new ArrayList<>(); binnedConstraints.put(binId, ranges); } ranges.add(q); } } protected boolean validateConstraints( final BasicQueryCompareOp op, final MultiDimensionalNumericData queryRange, final MultiDimensionalNumericData dataRange) { final NumericData[] queryRangePerDimension = queryRange.getDataPerDimension(); final Double[] minPerDimension = dataRange.getMinValuesPerDimension(); final Double[] maxPerDimension = dataRange.getMaxValuesPerDimension(); boolean ok = true; for (int d = 0; (d < dimensionFields.length) && ok; d++) { ok &= op.compare( minPerDimension[d], maxPerDimension[d], queryRangePerDimension[d].getMin(), queryRangePerDimension[d].getMax()); } return ok; } @Override public boolean accept( final CommonIndexModel indexModel, final IndexedPersistenceEncoding persistenceEncoding) { if (!(persistenceEncoding instanceof CommonIndexedPersistenceEncoding)) { return false; } final List dataRanges = BinnedNumericDataset.applyBins( ((CommonIndexedPersistenceEncoding) persistenceEncoding).getNumericData( dimensionFields), dimensionFields); if (persistenceEncoding.isAsync()) { return false; } // check that at least one data range overlaps at least one query range for (final BinnedNumericDataset dataRange : dataRanges) { final List queries = binnedConstraints.get(new ByteArray(dataRange.getBinId())); if (queries != null) { for (final MultiDimensionalNumericData query : queries) { if ((query != null) && validateConstraints(compareOp, query, dataRange)) { return true; } } } } return false; } @Override public byte[] toBinary() { int byteBufferLength = VarintUtils.unsignedIntByteLength(compareOp.ordinal()); final int dimensions = Math.min(constraints.getDimensionCount(), dimensionFields.length); byteBufferLength += VarintUtils.unsignedIntByteLength(dimensions); final byte[][] lengthDimensionAndQueryBinaries = new byte[dimensions][]; final NumericData[] dataPerDimension = constraints.getDataPerDimension(); for (int d = 0; d < dimensions; d++) { final NumericDimensionField dimension = dimensionFields[d]; final NumericData data = dataPerDimension[d]; final byte[] dimensionBinary = PersistenceUtils.toBinary(dimension); final int currentDimensionByteBufferLength = (16 + dimensionBinary.length + VarintUtils.unsignedIntByteLength(dimensionBinary.length)); final ByteBuffer buf = ByteBuffer.allocate(currentDimensionByteBufferLength); VarintUtils.writeUnsignedInt(dimensionBinary.length, buf); buf.putDouble(data.getMin()); buf.putDouble(data.getMax()); buf.put(dimensionBinary); byteBufferLength += currentDimensionByteBufferLength; lengthDimensionAndQueryBinaries[d] = buf.array(); } final ByteBuffer buf = ByteBuffer.allocate(byteBufferLength); VarintUtils.writeUnsignedInt(compareOp.ordinal(), buf); VarintUtils.writeUnsignedInt(dimensions, buf); for (final byte[] binary : lengthDimensionAndQueryBinaries) { buf.put(binary); } return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); compareOp = BasicQueryCompareOperation.values()[VarintUtils.readUnsignedInt(buf)]; final int numDimensions = VarintUtils.readUnsignedInt(buf); ByteArrayUtils.verifyBufferSize(buf, numDimensions); dimensionFields = new NumericDimensionField[numDimensions]; final NumericData[] data = new NumericData[numDimensions]; for (int d = 0; d < numDimensions; d++) { final int fieldLength = VarintUtils.readUnsignedInt(buf); data[d] = new NumericRange(buf.getDouble(), buf.getDouble()); final byte[] field = ByteArrayUtils.safeRead(buf, fieldLength); dimensionFields[d] = (NumericDimensionField) PersistenceUtils.fromBinary(field); } constraints = new BasicNumericDataset(data); init(constraints, dimensionFields); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/ClientVisibilityFilter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter; import java.util.Set; import java.util.function.Predicate; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.data.visibility.VisibilityExpression; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.entities.GeoWaveValue; /** * Provides a visibility filter for UNMERGED rows. The filter only operates on the first * {@link GeoWaveValue} of each row and must be applied prior to row merging. */ public class ClientVisibilityFilter implements Predicate { private final Set auths; public ClientVisibilityFilter(final Set auths) { this.auths = auths; } @Override public boolean test(final GeoWaveRow input) { String visibility = ""; final GeoWaveValue[] fieldValues = input.getFieldValues(); if ((fieldValues.length > 0) && (fieldValues[0].getVisibility() != null)) { visibility = StringUtils.stringFromBinary(input.getFieldValues()[0].getVisibility()); } return VisibilityExpression.evaluate(visibility, auths); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/CoordinateRangeQueryFilter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.MultiDimensionalCoordinateRangesArray; import org.locationtech.geowave.core.index.MultiDimensionalCoordinateRangesArray.ArrayOfArrays; import org.locationtech.geowave.core.index.MultiDimensionalCoordinates; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding; import org.locationtech.geowave.core.store.index.CommonIndexModel; import org.locationtech.geowave.core.store.query.constraints.CoordinateRangeUtils.RangeCache; import org.locationtech.geowave.core.store.query.constraints.CoordinateRangeUtils.RangeLookupFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class CoordinateRangeQueryFilter implements QueryFilter { private static final Logger LOGGER = LoggerFactory.getLogger(CoordinateRangeQueryFilter.class); protected NumericIndexStrategy indexStrategy; protected RangeCache rangeCache; protected MultiDimensionalCoordinateRangesArray[] coordinateRanges; public CoordinateRangeQueryFilter() {} public CoordinateRangeQueryFilter( final NumericIndexStrategy indexStrategy, final MultiDimensionalCoordinateRangesArray[] coordinateRanges) { this.indexStrategy = indexStrategy; this.coordinateRanges = coordinateRanges; rangeCache = RangeLookupFactory.createMultiRangeLookup(coordinateRanges); } @Override public boolean accept( final CommonIndexModel indexModel, final IndexedPersistenceEncoding persistenceEncoding) { if ((persistenceEncoding == null) || ((persistenceEncoding.getInsertionPartitionKey() == null) && (persistenceEncoding.getInsertionSortKey() == null))) { return false; } return inBounds( persistenceEncoding.getInsertionPartitionKey(), persistenceEncoding.getInsertionSortKey()); } private boolean inBounds(final byte[] partitionKey, final byte[] sortKey) { final MultiDimensionalCoordinates coordinates = indexStrategy.getCoordinatesPerDimension(partitionKey, sortKey); return rangeCache.inBounds(coordinates); } @Override public byte[] toBinary() { final byte[] indexStrategyBytes = PersistenceUtils.toBinary(indexStrategy); final byte[] coordinateRangesBinary = new ArrayOfArrays(coordinateRanges).toBinary(); final ByteBuffer buf = ByteBuffer.allocate( coordinateRangesBinary.length + indexStrategyBytes.length + VarintUtils.unsignedIntByteLength(indexStrategyBytes.length)); VarintUtils.writeUnsignedInt(indexStrategyBytes.length, buf); buf.put(indexStrategyBytes); buf.put(coordinateRangesBinary); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); try { final int indexStrategyLength = VarintUtils.readUnsignedInt(buf); final byte[] indexStrategyBytes = ByteArrayUtils.safeRead(buf, indexStrategyLength); indexStrategy = (NumericIndexStrategy) PersistenceUtils.fromBinary(indexStrategyBytes); final byte[] coordRangeBytes = new byte[buf.remaining()]; buf.get(coordRangeBytes); final ArrayOfArrays arrays = new ArrayOfArrays(); arrays.fromBinary(coordRangeBytes); coordinateRanges = arrays.getCoordinateArrays(); rangeCache = RangeLookupFactory.createMultiRangeLookup(coordinateRanges); } catch (final Exception e) { LOGGER.warn("Unable to read parameters", e); } } public NumericIndexStrategy getIndexStrategy() { return indexStrategy; } public MultiDimensionalCoordinateRangesArray[] getCoordinateRanges() { return coordinateRanges; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/DataIdQueryFilter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.HashSet; import java.util.Set; import java.util.stream.Collectors; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding; import org.locationtech.geowave.core.store.index.CommonIndexModel; public class DataIdQueryFilter implements QueryFilter { private Set dataIds; public DataIdQueryFilter() {} public DataIdQueryFilter(final byte[][] dataIds) { this.dataIds = Arrays.stream(dataIds).map(i -> new ByteArray(i)).collect(Collectors.toSet()); } @Override public boolean accept( final CommonIndexModel indexModel, final IndexedPersistenceEncoding persistenceEncoding) { return dataIds.contains(new ByteArray(persistenceEncoding.getDataId())); } @Override public byte[] toBinary() { int size = VarintUtils.unsignedIntByteLength(dataIds.size()); for (final ByteArray id : dataIds) { size += (id.getBytes().length + VarintUtils.unsignedIntByteLength(id.getBytes().length)); } final ByteBuffer buf = ByteBuffer.allocate(size); VarintUtils.writeUnsignedInt(dataIds.size(), buf); for (final ByteArray id : dataIds) { final byte[] idBytes = id.getBytes(); VarintUtils.writeUnsignedInt(idBytes.length, buf); buf.put(idBytes); } return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int size = VarintUtils.readUnsignedInt(buf); dataIds = new HashSet<>(size); for (int i = 0; i < size; i++) { final int bsize = VarintUtils.readUnsignedInt(buf); final byte[] dataIdBytes = ByteArrayUtils.safeRead(buf, bsize); dataIds.add(new ByteArray(dataIdBytes)); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/DataIdRangeQueryFilter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding; import org.locationtech.geowave.core.store.index.CommonIndexModel; import com.google.common.primitives.UnsignedBytes; public class DataIdRangeQueryFilter implements QueryFilter { private byte[] startDataIdInclusive; private byte[] endDataIdInclusive; public DataIdRangeQueryFilter() {} public DataIdRangeQueryFilter( final byte[] startDataIdInclusive, final byte[] endDataIdInclusive) { this.startDataIdInclusive = startDataIdInclusive; this.endDataIdInclusive = endDataIdInclusive; } @Override public boolean accept( final CommonIndexModel indexModel, final IndexedPersistenceEncoding persistenceEncoding) { return ((startDataIdInclusive == null) || (UnsignedBytes.lexicographicalComparator().compare( startDataIdInclusive, persistenceEncoding.getDataId()) <= 0)) && ((endDataIdInclusive == null) || (UnsignedBytes.lexicographicalComparator().compare( endDataIdInclusive, persistenceEncoding.getDataId()) >= 0)); } public byte[] getStartDataIdInclusive() { return startDataIdInclusive; } public byte[] getEndDataIdInclusive() { return endDataIdInclusive; } @Override public byte[] toBinary() { int size = 1; byte nullIndicator = 0; if (startDataIdInclusive != null) { size += (VarintUtils.unsignedIntByteLength(startDataIdInclusive.length) + startDataIdInclusive.length); } else { nullIndicator++; } if (endDataIdInclusive != null) { size += (VarintUtils.unsignedIntByteLength(endDataIdInclusive.length) + endDataIdInclusive.length); } else { nullIndicator += 2; } final ByteBuffer buf = ByteBuffer.allocate(size); buf.put(nullIndicator); if (startDataIdInclusive != null) { VarintUtils.writeUnsignedInt(startDataIdInclusive.length, buf); buf.put(startDataIdInclusive); } if (endDataIdInclusive != null) { VarintUtils.writeUnsignedInt(endDataIdInclusive.length, buf); buf.put(endDataIdInclusive); } return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final byte nullIndicator = buf.get(); if ((nullIndicator % 2) == 0) { startDataIdInclusive = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); } else { startDataIdInclusive = null; } if (nullIndicator < 2) { endDataIdInclusive = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); } else { endDataIdInclusive = null; } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/DedupeFilter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding; import org.locationtech.geowave.core.store.index.CommonIndexModel; /** * This filter will perform de-duplication using the combination of data adapter ID and data ID to * determine uniqueness. It can be performed client-side and/or distributed. */ public class DedupeFilter implements QueryFilter { private final Map> adapterIdToVisitedDataIdMap; private boolean dedupAcrossIndices = false; public DedupeFilter() { adapterIdToVisitedDataIdMap = new HashMap<>(); } @Override public boolean accept( final CommonIndexModel indexModel, final IndexedPersistenceEncoding persistenceEncoding) { if (!persistenceEncoding.isDeduplicationEnabled()) { // certain types of data such as raster do not intend to be // duplicated // short circuit this check if the row is does not support // deduplication return true; } if (!isDedupAcrossIndices() && !persistenceEncoding.isDuplicated()) { // short circuit this check if the row is not duplicated anywhere // and this is only intended to support a single index return true; } return applyDedupeFilter( persistenceEncoding.getInternalAdapterId(), new ByteArray(persistenceEncoding.getDataId())); } public boolean applyDedupeFilter(final short adapterId, final ByteArray dataId) { synchronized (adapterIdToVisitedDataIdMap) { Set visitedDataIds = adapterIdToVisitedDataIdMap.get(adapterId); if (visitedDataIds == null) { visitedDataIds = new HashSet<>(); adapterIdToVisitedDataIdMap.put(adapterId, visitedDataIds); } else if (visitedDataIds.contains(dataId)) { return false; } visitedDataIds.add(dataId); return true; } } public void setDedupAcrossIndices(final boolean dedupAcrossIndices) { this.dedupAcrossIndices = dedupAcrossIndices; } public boolean isDedupAcrossIndices() { return dedupAcrossIndices; } @Override public byte[] toBinary() { return new byte[] {}; } @Override public void fromBinary(final byte[] bytes) {} } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/ExpressionQueryFilter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter; import java.nio.ByteBuffer; import java.util.Map; import java.util.Set; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.adapter.AbstractAdapterPersistenceEncoding; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.MapRowBuilder; import org.locationtech.geowave.core.store.api.IndexFieldMapper; import org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding; import org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset; import org.locationtech.geowave.core.store.data.PersistentDataset; import org.locationtech.geowave.core.store.index.CommonIndexModel; import org.locationtech.geowave.core.store.query.filter.expression.Filter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Maps; import com.google.common.collect.Sets; /** * Accepts entries that pass the given GeoWave filter expression. */ public class ExpressionQueryFilter implements QueryFilter { private static final Logger LOGGER = LoggerFactory.getLogger(ExpressionQueryFilter.class); private InternalDataAdapter adapter; private AdapterToIndexMapping indexMapping; private Filter filter; private Set referencedFields = null; private Map> fieldToIndexFieldMap = null; private boolean referencedFieldsInitialized = false; public ExpressionQueryFilter() { super(); } public ExpressionQueryFilter( final Filter filter, final InternalDataAdapter adapter, final AdapterToIndexMapping indexMapping) { this.filter = filter; this.adapter = adapter; this.indexMapping = indexMapping; } public String getTypeName() { return adapter.getTypeName(); } public Filter getFilter() { return filter; } private void initReferencedFields() { synchronized (indexMapping) { if (!referencedFieldsInitialized) { this.referencedFields = Sets.newHashSet(); this.fieldToIndexFieldMap = Maps.newHashMap(); filter.addReferencedFields(referencedFields); for (final IndexFieldMapper mapper : indexMapping.getIndexFieldMappers()) { for (final String field : mapper.getAdapterFields()) { fieldToIndexFieldMap.put(field, mapper); } } referencedFieldsInitialized = true; } } } @SuppressWarnings({"unchecked", "rawtypes"}) @Override public boolean accept( final CommonIndexModel indexModel, final IndexedPersistenceEncoding persistenceEncoding) { if ((filter != null) && (indexModel != null) && (adapter != null) && (indexMapping != null)) { final Map fieldValues = Maps.newHashMap(); if (!referencedFieldsInitialized) { initReferencedFields(); } final PersistentDataset commonData = persistenceEncoding.getCommonData(); PersistentDataset adapterExtendedValues = null; for (final String field : referencedFields) { if (fieldValues.containsKey(field)) { continue; } if (fieldToIndexFieldMap.containsKey(field)) { final IndexFieldMapper mapper = fieldToIndexFieldMap.get(field); final Object indexValue = commonData.getValue(mapper.indexFieldName()); ((IndexFieldMapper) mapper).toAdapter(indexValue, new MapRowBuilder(fieldValues)); } else { final Object value = commonData.getValue(field); if (value != null) { fieldValues.put(field, value); } else { if (adapterExtendedValues == null) { adapterExtendedValues = new MultiFieldPersistentDataset<>(); if (persistenceEncoding instanceof AbstractAdapterPersistenceEncoding) { ((AbstractAdapterPersistenceEncoding) persistenceEncoding).convertUnknownValues( adapter, indexModel); final PersistentDataset existingExtValues = ((AbstractAdapterPersistenceEncoding) persistenceEncoding).getAdapterExtendedData(); if (persistenceEncoding.isAsync()) { return false; } if (existingExtValues != null) { adapterExtendedValues.addValues(existingExtValues.getValues()); } } } fieldValues.put(field, adapterExtendedValues.getValue(field)); } } } return filter.evaluate(fieldValues); } return true; } @Override public byte[] toBinary() { byte[] filterBytes; if (filter == null) { LOGGER.warn("Filter is null"); filterBytes = new byte[] {}; } else { filterBytes = PersistenceUtils.toBinary(filter); } byte[] adapterBytes; if (adapter != null) { adapterBytes = PersistenceUtils.toBinary(adapter); } else { LOGGER.warn("Feature Data Adapter is null"); adapterBytes = new byte[] {}; } byte[] mappingBytes; if (indexMapping != null) { mappingBytes = PersistenceUtils.toBinary(indexMapping); } else { LOGGER.warn("Adapter to index mapping is null"); mappingBytes = new byte[] {}; } final ByteBuffer buf = ByteBuffer.allocate( filterBytes.length + adapterBytes.length + mappingBytes.length + VarintUtils.unsignedIntByteLength(filterBytes.length) + VarintUtils.unsignedIntByteLength(adapterBytes.length)); VarintUtils.writeUnsignedInt(filterBytes.length, buf); buf.put(filterBytes); VarintUtils.writeUnsignedInt(adapterBytes.length, buf); buf.put(adapterBytes); buf.put(mappingBytes); return buf.array(); } @SuppressWarnings({"rawtypes", "unchecked"}) @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int filterBytesLength = VarintUtils.readUnsignedInt(buf); if (filterBytesLength > 0) { final byte[] filterBytes = ByteArrayUtils.safeRead(buf, filterBytesLength); filter = (Filter) PersistenceUtils.fromBinary(filterBytes); } else { LOGGER.warn("Filter is empty bytes"); filter = null; } final int adapterBytesLength = VarintUtils.readUnsignedInt(buf); if (adapterBytesLength > 0) { final byte[] adapterBytes = ByteArrayUtils.safeRead(buf, adapterBytesLength); try { adapter = (InternalDataAdapter) PersistenceUtils.fromBinary(adapterBytes); } catch (final Exception e) { throw new IllegalArgumentException("Unable to read adapter from binary", e); } } else { LOGGER.warn("Data Adapter is empty bytes"); adapter = null; } final int mappingBytesLength = buf.remaining(); if (mappingBytesLength > 0) { final byte[] mappingBytes = ByteArrayUtils.safeRead(buf, mappingBytesLength); try { indexMapping = (AdapterToIndexMapping) PersistenceUtils.fromBinary(mappingBytes); } catch (final Exception e) { throw new IllegalArgumentException( "Unable to read adapter to index mapping from binary", e); } } else { LOGGER.warn("Adapter to index mapping is empty bytes"); indexMapping = null; } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/FilterList.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding; import org.locationtech.geowave.core.store.index.CommonIndexModel; /** * This class wraps a list of filters into a single filter such that if any one filter fails this * class will fail acceptance. */ public class FilterList implements QueryFilter { protected List filters; protected boolean logicalAnd = true; public FilterList() {} protected FilterList(final boolean logicalAnd) { this.logicalAnd = logicalAnd; } public FilterList(final List filters) { this.filters = filters; } public FilterList(final boolean logicalAnd, final List filters) { this.logicalAnd = logicalAnd; this.filters = filters; } @Override public boolean accept( final CommonIndexModel indexModel, final IndexedPersistenceEncoding entry) { if (filters == null) { return true; } for (final QueryFilter filter : filters) { final boolean ok = filter.accept(indexModel, entry); if (!ok && logicalAnd) { return false; } if (ok && !logicalAnd) { return true; } } return logicalAnd; } @Override public byte[] toBinary() { int byteBufferLength = VarintUtils.unsignedIntByteLength(filters.size()) + 1; final List filterBinaries = new ArrayList<>(filters.size()); for (final QueryFilter filter : filters) { final byte[] filterBinary = PersistenceUtils.toBinary(filter); byteBufferLength += (VarintUtils.unsignedIntByteLength(filterBinary.length) + filterBinary.length); filterBinaries.add(filterBinary); } final ByteBuffer buf = ByteBuffer.allocate(byteBufferLength); buf.put((byte) (logicalAnd ? 1 : 0)); VarintUtils.writeUnsignedInt(filters.size(), buf); for (final byte[] filterBinary : filterBinaries) { VarintUtils.writeUnsignedInt(filterBinary.length, buf); buf.put(filterBinary); } return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); logicalAnd = buf.get() > 0; final int numFilters = VarintUtils.readUnsignedInt(buf); ByteArrayUtils.verifyBufferSize(buf, numFilters); filters = new ArrayList<>(numFilters); for (int i = 0; i < numFilters; i++) { final byte[] filter = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); filters.add((QueryFilter) PersistenceUtils.fromBinary(filter)); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/FixedResolutionSubsampleQueryFilter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter; public interface FixedResolutionSubsampleQueryFilter { } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/InsertionIdQueryFilter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter; import java.nio.ByteBuffer; import java.util.Objects; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding; import org.locationtech.geowave.core.store.index.CommonIndexModel; public class InsertionIdQueryFilter implements QueryFilter { private byte[] partitionKey; private byte[] sortKey; private byte[] dataId; public InsertionIdQueryFilter() {} public InsertionIdQueryFilter( final byte[] partitionKey, final byte[] sortKey, final byte[] dataId) { this.partitionKey = partitionKey; this.sortKey = sortKey; this.dataId = dataId; } @Override public boolean accept( final CommonIndexModel indexModel, final IndexedPersistenceEncoding persistenceEncoding) { return Objects.deepEquals( partitionKey, persistenceEncoding.getInsertionPartitionKey() != null ? persistenceEncoding.getInsertionPartitionKey() : new byte[] {}) && Objects.deepEquals( sortKey, persistenceEncoding.getInsertionSortKey() != null ? persistenceEncoding.getInsertionSortKey() : new byte[] {}) && Objects.deepEquals( dataId, persistenceEncoding.getDataId() != null ? persistenceEncoding.getDataId() : new byte[] {}); } @Override public byte[] toBinary() { final ByteBuffer buf = ByteBuffer.allocate( partitionKey.length + sortKey.length + dataId.length + VarintUtils.unsignedIntByteLength(partitionKey.length) + VarintUtils.unsignedIntByteLength(sortKey.length)); VarintUtils.writeUnsignedInt(partitionKey.length, buf); buf.put(partitionKey); VarintUtils.writeUnsignedInt(sortKey.length, buf); buf.put(sortKey); buf.put(dataId); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); partitionKey = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); sortKey = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); dataId = new byte[buf.remaining()]; buf.get(dataId); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/PrefixIdQueryFilter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter; import java.nio.ByteBuffer; import java.util.Arrays; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding; import org.locationtech.geowave.core.store.index.CommonIndexModel; public class PrefixIdQueryFilter implements QueryFilter { private byte[] partitionKey; private byte[] sortKeyPrefix; public PrefixIdQueryFilter() {} public PrefixIdQueryFilter(final byte[] partitionKey, final byte[] sortKeyPrefix) { this.partitionKey = (partitionKey != null) ? partitionKey : new byte[0]; this.sortKeyPrefix = sortKeyPrefix; } @Override public boolean accept( final CommonIndexModel indexModel, final IndexedPersistenceEncoding persistenceEncoding) { final byte[] otherPartitionKey = persistenceEncoding.getInsertionPartitionKey(); final byte[] otherPartitionKeyBytes = (otherPartitionKey != null) ? otherPartitionKey : new byte[0]; final byte[] sortKey = persistenceEncoding.getInsertionSortKey(); return (Arrays.equals(sortKeyPrefix, Arrays.copyOf(sortKey, sortKeyPrefix.length)) && Arrays.equals(partitionKey, otherPartitionKeyBytes)); } @Override public byte[] toBinary() { final ByteBuffer buf = ByteBuffer.allocate( partitionKey.length + sortKeyPrefix.length + VarintUtils.unsignedIntByteLength(partitionKey.length)); VarintUtils.writeUnsignedInt(partitionKey.length, buf); buf.put(partitionKey); buf.put(sortKeyPrefix); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); partitionKey = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); sortKeyPrefix = new byte[buf.remaining()]; buf.get(sortKeyPrefix); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/QueryFilter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding; import org.locationtech.geowave.core.store.index.CommonIndexModel; /** * A simple filter interface to determine inclusion/exclusion based on a generic persistence * encoding. Client-side filters will be given an AdapterPersistenceEncoding but distributable * filters will be given a generic PersistenceEncoding. */ public interface QueryFilter extends Persistable { public boolean accept( CommonIndexModel indexModel, IndexedPersistenceEncoding persistenceEncoding); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/And.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression; import java.util.Arrays; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import com.google.common.base.Predicates; import com.google.common.collect.Sets; /** * Combines multiple filters using the AND operator. The expression will only evaluate to true if * all child filters also resolve to true. */ public class And extends MultiFilterOperator { public And() {} public And(final Filter... children) { super(children); } @Override public boolean evaluate(final Map fieldValues) { return Arrays.stream(getChildren()).allMatch(f -> f.evaluate(fieldValues)); } @Override public boolean evaluate(final DataTypeAdapter adapter, final T entry) { return Arrays.stream(getChildren()).allMatch(f -> f.evaluate(adapter, entry)); } @Override public > FilterConstraints getConstraints( final Class constraintClass, final DataStatisticsStore statsStore, final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index, final Set indexedFields) { final Filter[] children = getChildren(); if (children.length == 0) { return FilterConstraints.empty(); } final FilterConstraints finalConstraints = children[0].getConstraints( constraintClass, statsStore, adapter, indexMapping, index, indexedFields); for (int i = 1; i < children.length; i++) { finalConstraints.and( children[i].getConstraints( constraintClass, statsStore, adapter, indexMapping, index, indexedFields)); } return finalConstraints; } @Override public Set getConstrainableFields() { return Arrays.stream(getChildren()).map(Filter::getConstrainableFields).collect( () -> Sets.newHashSet(), Set::addAll, Set::addAll); } @Override public Filter removePredicatesForFields(Set fields) { Filter[] updatedChildren = Arrays.stream(getChildren()).map(f -> f.removePredicatesForFields(fields)).filter( Predicates.notNull()).toArray(Filter[]::new); if (updatedChildren.length == 0) { return null; } else if (updatedChildren.length == 1) { return updatedChildren[0]; } return new And(updatedChildren); } @Override public String toString() { return Arrays.stream(getChildren()).map( f -> f instanceof MultiFilterOperator ? "(" + f.toString() + ")" : f.toString()).collect( Collectors.joining(" AND ")); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/Between.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression; import java.util.List; import java.util.Map; import java.util.Set; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import com.google.common.collect.Sets; /** * An abstract between implementation for any comparable object. * * @param the expression class * @param the comparable class */ public abstract class Between, C extends Comparable> implements Predicate { protected E valueExpr; protected E lowerBoundExpr; protected E upperBoundExpr; public Between() {} /** * Construct a new Between instance with the given value, lower bound, and upper bound * expressions. * * @param value the expression that represents the value to compare * @param lowerBound the expression that represents the lower bound * @param upperBound the expression that represents the upper bound */ public Between(final E value, final E lowerBound, final E upperBound) { valueExpr = value; lowerBoundExpr = lowerBound; upperBoundExpr = upperBound; } public E getValue() { return valueExpr; } public E getLowerBound() { return lowerBoundExpr; } public E getUpperBound() { return upperBoundExpr; } @Override public void addReferencedFields(final Set fields) { valueExpr.addReferencedFields(fields); lowerBoundExpr.addReferencedFields(fields); upperBoundExpr.addReferencedFields(fields); } @Override public Set getConstrainableFields() { if ((valueExpr instanceof FieldValue) && lowerBoundExpr.isLiteral() && upperBoundExpr.isLiteral()) { return Sets.newHashSet(((FieldValue) valueExpr).getFieldName()); } return Sets.newHashSet(); } @SuppressWarnings("unchecked") @Override public > FilterConstraints getConstraints( final Class constraintClass, final DataStatisticsStore statsStore, final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index, final Set indexedFields) { if ((valueExpr instanceof FieldValue) && indexedFields.contains(((FieldValue) valueExpr).getFieldName()) && lowerBoundExpr.isLiteral() && upperBoundExpr.isLiteral() && indexSupported(index)) { final C lowerBound = lowerBoundExpr.evaluateValue(null, null); final C upperBound = upperBoundExpr.evaluateValue(null, null); if ((lowerBound != null) && (upperBound != null) && constraintClass.isAssignableFrom(lowerBound.getClass()) && constraintClass.isAssignableFrom(upperBound.getClass())) { return FilterConstraints.of( adapter, indexMapping, index, ((FieldValue) valueExpr).getFieldName(), (IndexFieldConstraints) toConstraints(lowerBound, upperBound)); } } return FilterConstraints.empty(); } protected abstract boolean indexSupported(final Index index); protected abstract IndexFieldConstraints toConstraints(final C lowerBound, final C upperBound); @Override public boolean evaluate(final Map fieldValues) { final C value = valueExpr.evaluateValue(fieldValues); final C lowerBound = lowerBoundExpr.evaluateValue(fieldValues); final C upperBound = upperBoundExpr.evaluateValue(fieldValues); if ((value == null) || (lowerBound == null) || (upperBound == null)) { return false; } return evaluateInternal(value, lowerBound, upperBound); } @Override public boolean evaluate(final DataTypeAdapter adapter, final T entry) { final C value = valueExpr.evaluateValue(adapter, entry); final C lowerBound = lowerBoundExpr.evaluateValue(adapter, entry); final C upperBound = upperBoundExpr.evaluateValue(adapter, entry); if ((value == null) || (lowerBound == null) || (upperBound == null)) { return false; } return evaluateInternal(value, lowerBound, upperBound); } @Override public Filter removePredicatesForFields(Set fields) { final Set referencedFields = Sets.newHashSet(); valueExpr.addReferencedFields(referencedFields); lowerBoundExpr.addReferencedFields(referencedFields); upperBoundExpr.addReferencedFields(referencedFields); if (fields.containsAll(referencedFields)) { return null; } return this; } protected abstract boolean evaluateInternal( final C value, final C lowerBound, final C upperBound); @Override public String toString() { final StringBuilder sb = new StringBuilder(valueExpr.toString()); sb.append(" BETWEEN "); sb.append(lowerBoundExpr.toString()); sb.append(" AND "); sb.append(upperBoundExpr.toString()); return sb.toString(); } @Override public byte[] toBinary() { return PersistenceUtils.toBinary(new Persistable[] {valueExpr, lowerBoundExpr, upperBoundExpr}); } @SuppressWarnings("unchecked") @Override public void fromBinary(final byte[] bytes) { final List expressions = PersistenceUtils.fromBinaryAsList(bytes); valueExpr = (E) expressions.get(0); lowerBoundExpr = (E) expressions.get(1); upperBoundExpr = (E) expressions.get(2); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/BinaryPredicate.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression; import java.util.List; import java.util.Set; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import com.beust.jcommander.internal.Sets; /** * An abstract predicate for comparing two expressions of the same type. * * @param the expression class */ public abstract class BinaryPredicate> implements Predicate { protected E expression1; protected E expression2; public BinaryPredicate() {} public BinaryPredicate(final E expr1, final E expr2) { expression1 = expr1; expression2 = expr2; } public E getExpression1() { return expression1; } public E getExpression2() { return expression2; } @Override public Filter removePredicatesForFields(Set fields) { final Set referencedFields = Sets.newHashSet(); expression1.addReferencedFields(referencedFields); expression2.addReferencedFields(referencedFields); if (fields.containsAll(referencedFields)) { return null; } return this; } @Override public void addReferencedFields(final Set fields) { expression1.addReferencedFields(fields); expression2.addReferencedFields(fields); } @Override public byte[] toBinary() { return PersistenceUtils.toBinary(new Persistable[] {expression1, expression2}); } @SuppressWarnings("unchecked") @Override public void fromBinary(final byte[] bytes) { final List expressions = PersistenceUtils.fromBinaryAsList(bytes); expression1 = (E) expressions.get(0); expression2 = (E) expressions.get(1); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/BooleanExpression.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression; import java.util.Map; import org.locationtech.geowave.core.store.api.DataTypeAdapter; /** * An expression representing a boolean value. Also acts as a predicate since the expression itself * can be interpreted as either true or false. Any non-boolean object will evaluate to {@code true} * if it is non-null. */ public interface BooleanExpression extends GenericExpression, Predicate { @Override default boolean evaluate(final Map fieldValues) { return (Boolean) evaluateValue(fieldValues); } @Override default boolean evaluate(final DataTypeAdapter adapter, final T entry) { return (Boolean) evaluateValue(adapter, entry); } /** * Evaluate an object to determine if it should be interpreted as {@code true} or {@code false}. * * @param value the object to evaluate * @return the evaluated boolean */ public static boolean evaluateObject(final Object value) { if (value == null) { return false; } if (value instanceof Boolean) { return value.equals(true); } if (value instanceof Number) { return ((Number) value).longValue() != 0; } // Any non-null value should be considered true return true; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/BooleanFieldValue.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression; import java.util.Map; import java.util.Set; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import com.google.common.collect.Sets; /** * A field value implementation for interpreting any adapter field as a boolean. Non-boolean field * values will evaluate to {@code true} if they are non-null. */ public class BooleanFieldValue extends FieldValue implements BooleanExpression { public BooleanFieldValue() {} public BooleanFieldValue(final String fieldName) { super(fieldName); } @Override public void addReferencedFields(final Set fields) { fields.add(fieldName); } @Override public Boolean evaluateValue(final Map fieldValues) { return BooleanExpression.evaluateObject(fieldValues.get(fieldName)); } @Override public Boolean evaluateValue(final DataTypeAdapter adapter, final T entry) { return BooleanExpression.evaluateObject(adapter.getFieldValue(entry, fieldName)); } @Override protected Object evaluateValueInternal(final Object value) { return BooleanExpression.evaluateObject(value); } public static BooleanFieldValue of(final String fieldName) { return new BooleanFieldValue(fieldName); } @Override public void prepare( final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index) {} @Override public Set getConstrainableFields() { return Sets.newHashSet(fieldName); } @Override public Filter removePredicatesForFields(Set fields) { if (fields.contains(fieldName)) { return null; } return this; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/BooleanLiteral.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression; import java.util.Map; import java.util.Set; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import com.google.common.collect.Sets; /** * A literal implementation that evaluates to either {@code true} or {@code false}. */ public class BooleanLiteral extends Literal implements BooleanExpression, Predicate { public BooleanLiteral() {} public BooleanLiteral(final Object literal) { super(literal); } @Override public void prepare( final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index) {} @Override public void addReferencedFields(final Set fields) {} @Override public Boolean evaluateValue(final Map fieldValues) { return BooleanExpression.evaluateObject(literal); } @Override public Boolean evaluateValue(final DataTypeAdapter adapter, final T entry) { return BooleanExpression.evaluateObject(literal); } @Override public Filter removePredicatesForFields(Set fields) { return this; } @Override public Set getConstrainableFields() { return Sets.newHashSet(); } @Override public String toString() { return BooleanExpression.evaluateObject(literal) ? "TRUE" : "FALSE"; } public static BooleanLiteral of(final Object object) { return new BooleanLiteral(object); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/ComparableExpression.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression; /** * An extension of the expression interface for comparable expression types. * * @param the comparable class */ public interface ComparableExpression extends Expression { /** * Create a predicate that tests to see if this expression is less than the provided object. The * operand can be either another expression or should evaluate to a literal of the same type. * * @param other the object to test against * @return the less than predicate */ Predicate isLessThan(final Object other); /** * Create a predicate that tests to see if this expression is less than or equal to the provided * object. The operand can be either another expression or should evaluate to a literal of the * same type. * * @param other the object to test against * @return the less than or equal to predicate */ Predicate isLessThanOrEqualTo(final Object other); /** * Create a predicate that tests to see if this expression is greater than the provided object. * The operand can be either another expression or should evaluate to a literal of the same type. * * @param other the object to test against * @return the greater than predicate */ Predicate isGreaterThan(final Object other); /** * Create a predicate that tests to see if this expression is greater than or equal to the * provided object. The operand can be either another expression or should evaluate to a literal * of the same type. * * @param other the object to test against * @return the greater than or equal to predicate */ Predicate isGreaterThanOrEqualTo(final Object other); /** * Create a predicate that tests to see if this expression is between the provided lower and upper * bounds. The operands can be either other expressions or should evaluate to literals of the same * type. * * @param lowerBound the lower bound to test against * @param upperBound the upper bound to test against * @return the between predicate */ Predicate isBetween(final Object lowerBound, final Object upperBound); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/ComparisonOperator.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression; import java.nio.ByteBuffer; import java.util.List; import java.util.Map; import java.util.Set; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import com.google.common.collect.Lists; import com.google.common.collect.Sets; /** * Abstract implementation for comparing two expressions that evaluate to comparable objects. * * @param the expression class * @param the comparable class */ public abstract class ComparisonOperator, C extends Comparable> extends BinaryPredicate { public enum CompareOp { LESS_THAN, LESS_THAN_OR_EQUAL, GREATER_THAN, GREATER_THAN_OR_EQUAL, EQUAL_TO, NOT_EQUAL_TO } protected CompareOp compareOperator; public ComparisonOperator() {} public ComparisonOperator( final E expression1, final E expression2, final CompareOp compareOperator) { super(expression1, expression2); this.compareOperator = compareOperator; } public CompareOp getCompareOp() { return compareOperator; } @Override public boolean evaluate(final Map fieldValues) { final C value1 = expression1.evaluateValue(fieldValues); final C value2 = expression2.evaluateValue(fieldValues); return evaluateValues(value1, value2); } @Override public boolean evaluate(final DataTypeAdapter adapter, final T entry) { final C value1 = expression1.evaluateValue(adapter, entry); final C value2 = expression2.evaluateValue(adapter, entry); return evaluateValues(value1, value2); } private boolean evaluateValues(final C value1, final C value2) { if (value1 == null) { if (compareOperator.equals(CompareOp.EQUAL_TO)) { return value2 == null; } if (compareOperator.equals(CompareOp.NOT_EQUAL_TO)) { return value2 != null; } return false; } if (value2 == null) { if (compareOperator.equals(CompareOp.EQUAL_TO)) { return false; } if (compareOperator.equals(CompareOp.NOT_EQUAL_TO)) { return true; } return false; } switch (compareOperator) { case EQUAL_TO: return equalTo(value1, value2); case NOT_EQUAL_TO: return notEqualTo(value1, value2); case LESS_THAN: return lessThan(value1, value2); case LESS_THAN_OR_EQUAL: return lessThanOrEqual(value1, value2); case GREATER_THAN: return greaterThan(value1, value2); case GREATER_THAN_OR_EQUAL: return greaterThanOrEqual(value1, value2); } return false; } protected abstract boolean equalTo(final C value1, final C value2); protected abstract boolean notEqualTo(final C value1, final C value2); protected abstract boolean lessThan(final C value1, final C value2); protected abstract boolean lessThanOrEqual(final C value1, final C value2); protected abstract boolean greaterThan(final C value1, final C value2); protected abstract boolean greaterThanOrEqual(final C value1, final C value2); protected abstract boolean indexSupported(final Index index); protected FilterRange toFilterRange( final C start, final C end, final boolean startInclusve, final boolean endInclusive) { return FilterRange.of(start, end, startInclusve, endInclusive, isExact()); } protected boolean isExact() { return true; } protected abstract IndexFieldConstraints toFieldConstraints(final List> ranges); @Override public Set getConstrainableFields() { if ((expression1 instanceof FieldValue) && expression2.isLiteral()) { return Sets.newHashSet(((FieldValue) expression1).getFieldName()); } else if ((expression2 instanceof FieldValue) && expression1.isLiteral()) { return Sets.newHashSet(((FieldValue) expression2).getFieldName()); } return Sets.newHashSet(); } @SuppressWarnings("unchecked") @Override public > FilterConstraints getConstraints( final Class constraintClass, final DataStatisticsStore statsStore, final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index, final Set indexedFields) { final List> ranges = Lists.newArrayList(); if (!indexSupported(index)) { return FilterConstraints.empty(); } C literal = null; String fieldName = null; CompareOp compareOp = this.compareOperator; if ((expression1 instanceof FieldValue) && indexedFields.contains(((FieldValue) expression1).getFieldName()) && expression2.isLiteral()) { literal = expression2.evaluateValue(null, null); fieldName = ((FieldValue) expression1).getFieldName(); } else if ((expression2 instanceof FieldValue) && indexedFields.contains(((FieldValue) expression2).getFieldName()) && expression1.isLiteral()) { literal = expression1.evaluateValue(null, null); fieldName = ((FieldValue) expression2).getFieldName(); switch (compareOperator) { case LESS_THAN: compareOp = CompareOp.GREATER_THAN; break; case LESS_THAN_OR_EQUAL: compareOp = CompareOp.GREATER_THAN_OR_EQUAL; break; case GREATER_THAN: compareOp = CompareOp.LESS_THAN; break; case GREATER_THAN_OR_EQUAL: compareOp = CompareOp.LESS_THAN_OR_EQUAL; break; default: break; } } else { return FilterConstraints.empty(); } if (literal != null) { if (!constraintClass.isAssignableFrom(literal.getClass())) { return FilterConstraints.empty(); } switch (compareOp) { case LESS_THAN: ranges.add(toFilterRange(null, literal, true, false)); break; case LESS_THAN_OR_EQUAL: ranges.add(toFilterRange(null, literal, true, true)); break; case GREATER_THAN: ranges.add(toFilterRange(literal, null, false, true)); break; case GREATER_THAN_OR_EQUAL: ranges.add(toFilterRange(literal, null, true, true)); break; case EQUAL_TO: ranges.add(toFilterRange(literal, literal, true, true)); break; case NOT_EQUAL_TO: ranges.add(toFilterRange(null, literal, true, false)); ranges.add(toFilterRange(literal, null, false, true)); break; } } return FilterConstraints.of( adapter, indexMapping, index, fieldName, (IndexFieldConstraints) toFieldConstraints(ranges)); } @Override public String toString() { StringBuilder sb = new StringBuilder(expression1.toString()); sb.append(' '); switch (compareOperator) { case LESS_THAN: sb.append("<"); break; case LESS_THAN_OR_EQUAL: sb.append("<="); break; case GREATER_THAN: sb.append(">"); break; case GREATER_THAN_OR_EQUAL: sb.append(">="); break; case EQUAL_TO: sb.append("="); break; case NOT_EQUAL_TO: sb.append("<>"); break; } sb.append(' '); sb.append(expression2.toString()); return sb.toString(); } @Override public byte[] toBinary() { final byte[] superBinary = super.toBinary(); final ByteBuffer buffer = ByteBuffer.allocate( VarintUtils.unsignedIntByteLength(compareOperator.ordinal()) + superBinary.length); VarintUtils.writeUnsignedInt(compareOperator.ordinal(), buffer); buffer.put(superBinary); return buffer.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); compareOperator = CompareOp.values()[VarintUtils.readUnsignedInt(buffer)]; final byte[] superBinary = new byte[buffer.remaining()]; buffer.get(superBinary); super.fromBinary(superBinary); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/Exclude.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression; import java.util.Map; import java.util.Set; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import com.google.common.collect.Sets; /** * A filter that implementation always evaluates to {@code false}. */ public class Exclude implements Filter { public Exclude() {} @Override public void addReferencedFields(final Set fields) {} @Override public boolean evaluate(final Map fieldValues) { return false; } @Override public boolean evaluate(final DataTypeAdapter adapter, final T entry) { return false; } @Override public Filter removePredicatesForFields(Set fields) { return this; } @Override public Set getConstrainableFields() { return Sets.newHashSet(); } @Override public byte[] toBinary() { return new byte[0]; } @Override public void fromBinary(final byte[] bytes) {} @Override public String toString() { return "EXCLUDE"; } @Override public void prepare( final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index) {} } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/Expression.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression; import java.util.Map; import java.util.Set; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.api.DataTypeAdapter; /** * Base interface for any expression that evaluates to some value to be used by a predicate. * * @param the evaluated value class */ public interface Expression extends Persistable { /** * Evaluate the expression using the provided field values. * * @param fieldValues the field values to use * @return the evaluated expression value */ V evaluateValue(Map fieldValues); /** * Evaluate the expression using the provided adapter and entry. * * @param the data type of the adapter * @param adapter the data type adapter * @param entry the entry * @return the evaluated expression value */ V evaluateValue(DataTypeAdapter adapter, T entry); /** * @return {@code true} if this expression does not require any adapter field values to compute */ boolean isLiteral(); /** * Adds any fields referenced by this expression to the provided set. * * @param fields the set to add any referenced fields to */ void addReferencedFields(final Set fields); /** * Create a predicate that tests to see if this expression is equal ton the provided object. The * operand can be either another expression or should evaluate to a literal of the same type. * * @param other the object to test against * @return the equals predicate */ Predicate isEqualTo(final Object other); /** * Create a predicate that tests to see if this expression is not equal ton the provided object. * The operand can be either another expression or should evaluate to a literal of the same type. * * @param other the object to test against * @return the not equals predicate */ Predicate isNotEqualTo(final Object other); /** * Create a predicate that tests to see if this expression is null. * * @return the is null predicate */ default Predicate isNull() { return new IsNull(this); } /** * Create a predicate that tests to see if this expression is not null. * * @return the not null predicate */ default Predicate isNotNull() { return new IsNotNull(this); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/FieldValue.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression; import java.util.Map; import java.util.Set; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.api.DataTypeAdapter; /** * An abstract expression implementation for turning fields values from an adapter entry into an * object to be used by the expression. * * @param the class of the resolved field value */ public abstract class FieldValue implements Expression { protected String fieldName; public FieldValue() {} public FieldValue(final String fieldName) { this.fieldName = fieldName; } public String getFieldName() { return fieldName; } @Override public void addReferencedFields(final Set fields) { fields.add(fieldName); } @Override public boolean isLiteral() { return false; } @Override public V evaluateValue(final Map fieldValues) { final Object value = fieldValues.get(fieldName); if (value == null) { return null; } return evaluateValueInternal(value); } @Override public V evaluateValue(final DataTypeAdapter adapter, final T entry) { final Object value = adapter.getFieldValue(entry, fieldName); if (value == null) { return null; } return evaluateValueInternal(value); } protected abstract V evaluateValueInternal(final Object value); @Override public String toString() { return fieldName; } @Override public byte[] toBinary() { return StringUtils.stringToBinary(fieldName); } @Override public void fromBinary(final byte[] bytes) { fieldName = StringUtils.stringFromBinary(bytes); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/Filter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression; import java.util.Map; import java.util.Set; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; /** * Base interface for GeoWave filter objects. These filters can be used to generate constraints and * test entries of a data type adapter to see if they match a set of conditions. */ public interface Filter extends Persistable { /** * Evaluate this filter using a map of field values. * * @param fieldValues the field values to evaluate the expression with, the key represents the * field name, and the value represents the field value * @return {@code true} if the filter passes */ boolean evaluate(Map fieldValues); /** * Evaluate this filter using the given adapter and entry. * * @param the class of the adapter entries * @param adapter the data type adapter * @param entry the entry to test * @return {@code true} if the filter passes */ boolean evaluate(DataTypeAdapter adapter, T entry); /** * Prepare this filter for efficient testing using the provided adapter and index. * * @param adapter the data type adapter * @param indexMapping the adapter to index mapping * @param index the index */ void prepare(DataTypeAdapter adapter, AdapterToIndexMapping indexMapping, Index index); /** * Adds all adapter fields referenced by this filter to the provided set. * * @param fields the set to populate with the referenced fields */ void addReferencedFields(Set fields); /** * @return a set of all fields that can potentially be constrained by the filter */ Set getConstrainableFields(); /** * Remove any exact and constrained predicates that reference fields in the provided set. * * @param fields the fields to remove * @return an updated filter with the predicates removed */ Filter removePredicatesForFields(Set fields); /** * Generate constraints for the given index based on this filter. * * @param constraintClass the class that the index expects for constraints * @param statsStore the data statistics store * @param adapter the data type adapter * @param indexMapping the adapter to index mapping * @param index the index * @param indexedFields a set of all adapter fields used by the index mapping * @return the constraints for the index that this filter represents */ default > FilterConstraints getConstraints( final Class constraintClass, final DataStatisticsStore statsStore, final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index, final Set indexedFields) { return FilterConstraints.empty(); } /** * Combine this filter with one or more other filters using an AND operator. * * @param other the other filters to combine this one with * @return the combined filter */ default Filter and(final Filter... other) { final Filter[] filters = new Filter[other.length + 1]; filters[0] = this; System.arraycopy(other, 0, filters, 1, other.length); return new And(filters); } /** * Combine this filter with one or more other filters using an OR operator. * * @param other the other filters to combine this one with * @return the combined filter */ default Filter or(final Filter... other) { final Filter[] filters = new Filter[other.length + 1]; filters[0] = this; System.arraycopy(other, 0, filters, 1, other.length); return new Or(filters); } /** * Create the inverse filter for the provided filter. * * @param filter the filter to invert * @return the inverted filter */ public static Filter not(final Filter filter) { return new Not(filter); } /** * Create a filter that always evaluates to {@code true} * * @return the include filter */ public static Filter include() { return new Include(); } /** * Create a filter that always evaluates to {@code false} * * @return the exclude filter */ public static Filter exclude() { return new Exclude(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/FilterConstraints.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import org.locationtech.geowave.core.index.IndexMetaData; import org.locationtech.geowave.core.index.MultiDimensionalIndexData; import org.locationtech.geowave.core.index.QueryRanges; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.text.MultiDimensionalTextData; import org.locationtech.geowave.core.index.text.TextIndexStrategy; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.IndexFieldMapper; import org.locationtech.geowave.core.store.base.BaseQueryOptions; import org.locationtech.geowave.core.store.dimension.NumericDimensionField; import org.locationtech.geowave.core.store.index.CommonIndexModel; import org.locationtech.geowave.core.store.index.CustomIndex; import org.locationtech.geowave.core.store.index.TextAttributeIndexProvider.AdapterFieldTextIndexEntryConverter; import org.locationtech.geowave.core.store.query.filter.expression.IndexFieldConstraints.DimensionConstraints; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldConstraints; import org.locationtech.geowave.core.store.query.filter.expression.text.TextFieldConstraints; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.locationtech.geowave.core.store.statistics.InternalStatisticsHelper; import org.locationtech.geowave.core.store.statistics.index.IndexMetaDataSetStatistic; import org.locationtech.geowave.core.store.statistics.index.IndexMetaDataSetStatistic.IndexMetaDataSetValue; import org.locationtech.geowave.core.store.util.DataStoreUtils; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; /** * Provides constraints for an adapter/index based on a GeoWave filter expression. */ public class FilterConstraints> { private DataTypeAdapter adapter; private AdapterToIndexMapping indexMapping; private Index index; private Map> fieldConstraints; private List> cachedIndexData = null; public FilterConstraints( final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index, final Map> fieldConstraints) { this.adapter = adapter; this.indexMapping = indexMapping; this.index = index; this.fieldConstraints = fieldConstraints; } /** * Get the constraints for the given field. * * @param fieldName the field to get constraints for * @return the field constraints, or {@code null} if there weren't any */ public IndexFieldConstraints getFieldConstraints(final String fieldName) { return fieldConstraints.get(fieldName); } /** * @return the number of constrained fields */ public int getFieldCount() { return fieldConstraints.size(); } /** * Determines whether or not all of the provided fields are constrained. * * @param fields the fields to check * @return {@code true} if all of the fields are constrained */ public boolean constrainsAllFields(final Set fields) { return fields.stream().allMatch(f -> fieldConstraints.containsKey(f)); } /** * @return a set of fields that are exactly constrained, i.e. the ranges represent the predicate * exactly */ public Set getExactConstrainedFields() { return fieldConstraints.entrySet().stream().filter(e -> e.getValue().isExact()).map( e -> e.getKey()).collect(Collectors.toSet()); } private boolean isSingleDimension( final String indexFieldName, final NumericDimensionField[] dimensions) { return Arrays.stream(dimensions).filter( dim -> dim.getFieldName().equals(indexFieldName)).count() == 1; } /** * Get the multi-dimensional index data from these constraints. * * @return the multi-dimensional index data */ @SuppressWarnings({"unchecked", "rawtypes"}) public List> getIndexData() { if (cachedIndexData == null) { if ((adapter == null) || (index == null) || (indexMapping == null)) { return Lists.newArrayList(); } if (index instanceof CustomIndex) { final TextIndexStrategy indexStrategy = (TextIndexStrategy) ((CustomIndex) index).getCustomIndexStrategy(); if (!(indexStrategy.getEntryConverter() instanceof AdapterFieldTextIndexEntryConverter)) { throw new RuntimeException("Unable to determine adapter field used by text index."); } final String fieldName = ((AdapterFieldTextIndexEntryConverter) indexStrategy.getEntryConverter()).getFieldName(); final IndexFieldConstraints fieldConstraint = fieldConstraints.get(fieldName); final List> dimensionConstraints = Lists.newArrayList(); if (fieldConstraint == null) { dimensionConstraints.add( DimensionConstraints.of( Lists.newArrayList( FilterRange.of((String) null, (String) null, true, true, true)))); } else if (fieldConstraint instanceof TextFieldConstraints) { final DimensionConstraints dimensionConstraint = ((TextFieldConstraints) fieldConstraint).getDimensionRanges(0); if (dimensionConstraint == null) { dimensionConstraints.add( DimensionConstraints.of( Lists.newArrayList( FilterRange.of((String) null, (String) null, true, true, true)))); } else { dimensionConstraints.add(dimensionConstraint); } } else { throw new RuntimeException("Non-text field constraints cannot be used for a text index."); } cachedIndexData = (List) TextFieldConstraints.toIndexData(dimensionConstraints); } else { // Right now all index strategies that aren't custom are numeric final CommonIndexModel indexModel = index.getIndexModel(); final int numStrategyDimensions = index.getIndexStrategy().getOrderedDimensionDefinitions().length; final List> dimensionConstraints = Lists.newArrayListWithCapacity(numStrategyDimensions); final Map indexFieldDimensions = Maps.newHashMap(); final NumericDimensionField[] dimensions = indexModel.getDimensions(); int dimensionIndex = 0; for (final NumericDimensionField indexField : dimensions) { if (dimensionIndex >= numStrategyDimensions) { // Only build constraints for dimensions used by the index strategy. break; } dimensionIndex++; final String indexFieldName = indexField.getFieldName(); if (!indexFieldDimensions.containsKey(indexFieldName)) { indexFieldDimensions.put(indexFieldName, 0); } final int indexFieldDimension = indexFieldDimensions.get(indexFieldName); final IndexFieldMapper mapper = indexMapping.getMapperForIndexField(indexFieldName); final String[] adapterFields = mapper.getIndexOrderedAdapterFields(); IndexFieldConstraints fieldConstraint = null; if (adapterFields.length > 1 && isSingleDimension(indexFieldName, dimensions)) { // If multiple fields are mapped to the same index dimension, combine all of their // constraints for (int i = 0; i < adapterFields.length; i++) { final IndexFieldConstraints constraint = fieldConstraints.get(adapterFields[i]); if (fieldConstraint == null) { fieldConstraint = constraint; } else { fieldConstraint.and((IndexFieldConstraints) constraint); } } } else { fieldConstraint = fieldConstraints.get(adapterFields[indexFieldDimension % adapterFields.length]); } if (fieldConstraint == null) { dimensionConstraints.add( DimensionConstraints.of( Lists.newArrayList( FilterRange.of((Double) null, (Double) null, true, true, true)))); } else if (fieldConstraint instanceof NumericFieldConstraints) { final DimensionConstraints dimensionConstraint = ((NumericFieldConstraints) fieldConstraint).getDimensionRanges( indexFieldDimension % fieldConstraint.getDimensionCount()); if (dimensionConstraint == null) { dimensionConstraints.add( DimensionConstraints.of( Lists.newArrayList( FilterRange.of((Double) null, (Double) null, true, true, true)))); } else { dimensionConstraints.add(dimensionConstraint); } indexFieldDimensions.put(indexFieldName, indexFieldDimension + 1); } else { throw new RuntimeException( "Non-numeric field constraints cannot be used for a numeric index."); } } cachedIndexData = (List) NumericFieldConstraints.toIndexData(dimensionConstraints); } } return cachedIndexData; } /** * Combine these constraints with another set of constraints using the OR operator. * * @param other the constraints to combine */ public void or(final FilterConstraints other) { if (adapter == null) { adapter = other.adapter; index = other.index; indexMapping = other.indexMapping; } final Set constrainedFields = getCombinedFields(other); for (final String field : constrainedFields) { final IndexFieldConstraints fieldRanges1 = fieldConstraints.get(field); final IndexFieldConstraints fieldRanges2 = other.fieldConstraints.get(field); if ((fieldRanges1 == null) || (fieldRanges2 == null)) { fieldConstraints.remove(field); } else { fieldRanges1.or(fieldRanges2); } } } /** * Combine these constraints with another set of constraints using the AND operator. * * @param other the constraints to combine */ public void and(final FilterConstraints other) { if (adapter == null) { adapter = other.adapter; index = other.index; indexMapping = other.indexMapping; fieldConstraints = other.fieldConstraints; } else { final Set constrainedFields = getCombinedFields(other); for (final String field : constrainedFields) { final IndexFieldConstraints fieldRanges1 = fieldConstraints.get(field); final IndexFieldConstraints fieldRanges2 = other.fieldConstraints.get(field); if (fieldRanges1 == null) { fieldConstraints.put(field, fieldRanges2); } else if (fieldRanges2 != null) { fieldRanges1.and(fieldRanges2); } } } } /** * Get the inverse of these constraints. Only 1-dimensional field constraints can be accurately * inverted, anything else will result in no constraints. */ public void invert() { for (final IndexFieldConstraints fieldConstraint : fieldConstraints.values()) { // Only invert if there is one constrained dimension, see Not#getConstraints for why this is. if (fieldConstraint.getDimensionCount() == 1) { fieldConstraint.invert(); } else { fieldConstraints.clear(); break; } } } /** * Get the raw query ranges represented by this filter's index data. * * @param baseOptions the base query options * @param statisticsStore the data statistics store * @return the query ranges */ @SuppressWarnings({"unchecked", "rawtypes"}) public QueryRanges getQueryRanges( final BaseQueryOptions baseOptions, final DataStatisticsStore statisticsStore) { if ((index instanceof CustomIndex) && (((CustomIndex) index).getCustomIndexStrategy() instanceof TextIndexStrategy)) { final List indexData = (List) getIndexData(); if (indexData.size() > 0) { final TextIndexStrategy indexStrategy = (TextIndexStrategy) ((CustomIndex) index).getCustomIndexStrategy(); final List ranges = indexData.stream().map(data -> indexStrategy.getQueryRanges(data)).collect( Collectors.toList()); if (ranges.size() == 1) { return ranges.get(0); } return new QueryRanges(ranges); } } else if (!(index instanceof CustomIndex)) { final List indexData = (List) getIndexData(); if (indexData.size() > 0) { final IndexMetaData[] hints; final IndexMetaDataSetValue value = InternalStatisticsHelper.getIndexStatistic( statisticsStore, IndexMetaDataSetStatistic.STATS_TYPE, index.getName(), adapter.getTypeName(), null, baseOptions.getAuthorizations()); if (value != null) { hints = value.getValue().toArray(new IndexMetaData[value.getValue().size()]); } else { hints = new IndexMetaData[0]; } int maxRangeDecomposition = baseOptions.getMaxRangeDecomposition() != null ? baseOptions.getMaxRangeDecomposition() : 2000; return DataStoreUtils.constraintsToQueryRanges( indexData, index, baseOptions.getTargetResolutionPerDimensionForHierarchicalIndex(), maxRangeDecomposition, hints); } } return new QueryRanges(); } private Set getCombinedFields(final FilterConstraints other) { final Set constrainedFields = Sets.newHashSet(fieldConstraints.keySet()); constrainedFields.addAll(other.fieldConstraints.keySet()); return constrainedFields; } /** * Create a filter constraint for a single field. * * @param the constraint class * @param adapter the data type adapter * @param indexMapping the adapter to index mapping * @param index the index * @param fieldName the name of the constrained field * @param constraints the field constraints for the field * @return the constructed filter constraints */ public static > FilterConstraints of( final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index, final String fieldName, final IndexFieldConstraints constraints) { final Map> fieldConstraints = Maps.newHashMap(); fieldConstraints.put(fieldName, constraints); return new FilterConstraints<>(adapter, indexMapping, index, fieldConstraints); } /** * Create a set of empty filter constraints. Empty filter constraints result in a full table scan. * * @param the constraint class * @return a set of empty filter constraints */ public static > FilterConstraints empty() { return new FilterConstraints<>(null, null, null, Maps.newHashMap()); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/FilterRange.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression; import java.util.Collections; import java.util.List; import javax.annotation.Nullable; import com.google.common.collect.Lists; /** * A range of data represented by a predicate. * * @param the class of the filtered data */ public class FilterRange> implements Comparable> { private final T start; private final T end; private boolean startInclusive = true; private boolean endInclusive = true; private boolean exact = false; /** * Create a new filter range with the given parameters. A {@code null} start indicates an open * ended start, while a {@code null} end indicates an open ended end. * * @param start the start of the range * @param end the end of the range * @param startInclusive whether or not the start is inclusive * @param endInclusive whether or not the end is inclusive * @param exact whether or not this range exactly represents the predicate */ public FilterRange( final @Nullable T start, final @Nullable T end, final boolean startInclusive, final boolean endInclusive, final boolean exact) { this.start = start; this.end = end; this.startInclusive = startInclusive; this.endInclusive = endInclusive; this.exact = exact; } public T getStart() { return start; } public T getEnd() { return end; } public boolean isStartInclusive() { return startInclusive; } public boolean isEndInclusive() { return endInclusive; } /** * @return {@code true} if this range exactly represents the predicate */ public boolean isExact() { return exact; } /** * @return {@code true} if this range represents all data */ public boolean isFullRange() { return (start == null) && (end == null) && startInclusive && endInclusive; } protected boolean isAfter(final FilterRange other, final boolean startPoint) { if (getStart() == null) { return false; } final T point = startPoint ? other.start : other.end; if (point == null) { return startPoint; } return start.compareTo(point) > 0; } protected boolean isBefore(final FilterRange other, final boolean startPoint) { if (getEnd() == null) { return false; } final T point = startPoint ? other.start : other.end; if (point == null) { return !startPoint; } return end.compareTo(point) < 0; } protected boolean overlaps(final FilterRange other) { return !isAfter(other, false) && !isBefore(other, true); } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + (start == null ? -1 : start.hashCode()); result = (prime * result) + (end == null ? -1 : end.hashCode()); result = (prime * result) + (startInclusive ? 1 : 0); result = (prime * result) + (endInclusive ? 1 : 0); return result; } @Override public boolean equals(final Object other) { if (other == null) { return false; } if (!(other instanceof FilterRange)) { return false; } final FilterRange otherRange = (FilterRange) other; final boolean startMatches = start == null ? otherRange.start == null : start.equals(otherRange.start); final boolean endMatches = end == null ? otherRange.end == null : end.equals(otherRange.end); return startMatches && endMatches && (startInclusive == otherRange.startInclusive) && (endInclusive == otherRange.endInclusive); } @Override public int compareTo(final FilterRange o) { int compare; if (getStart() == null) { if (o.getStart() == null) { compare = 0; } else { compare = -1; } } else if (o.getStart() == null) { compare = 1; } else { compare = getStart().compareTo(o.getStart()); if (compare == 0) { compare = Boolean.compare(o.startInclusive, startInclusive); } } if (compare == 0) { if (getEnd() == null) { if (o.getEnd() == null) { compare = 0; } else { compare = -1; } } else if (o.getEnd() == null) { compare = 1; } else { compare = getEnd().compareTo(o.getEnd()); if (compare == 0) { compare = Boolean.compare(o.endInclusive, endInclusive); } } } return compare; } private FilterRange intersectRange(final FilterRange other) { final T intersectStart; final T intersectEnd; final boolean startInc; final boolean endInc; if (start == null) { if (other.start == null) { intersectStart = null; startInc = startInclusive && other.startInclusive; } else { intersectStart = other.start; startInc = other.startInclusive; } } else if (other.start == null) { intersectStart = start; startInc = startInclusive; } else { final int compare = start.compareTo(other.start); if (compare > 0) { intersectStart = start; startInc = startInclusive; } else if (compare == 0) { intersectStart = start; startInc = startInclusive && other.startInclusive; } else { intersectStart = other.start; startInc = other.startInclusive; } } if (end == null) { if (other.end == null) { intersectEnd = null; endInc = endInclusive && other.endInclusive; } else { intersectEnd = other.end; endInc = other.endInclusive; } } else if (other.end == null) { intersectEnd = end; endInc = endInclusive; } else { final int compare = end.compareTo(other.end); if (compare < 0) { intersectEnd = end; endInc = endInclusive; } else if (compare == 0) { intersectEnd = end; endInc = endInclusive && other.endInclusive; } else { intersectEnd = other.end; endInc = other.endInclusive; } } return FilterRange.of(intersectStart, intersectEnd, startInc, endInc, exact && other.exact); } /** * Create a new filter range with the given parameters. A {@code null} start indicates an open * ended start, while a {@code null} end indicates an open ended end. * * @param T the class of the filter range * @param start the start of the range * @param end the end of the range * @param startInclusive whether or not the start is inclusive * @param endInclusive whether or not the end is inclusive * @param exact whether or not this range exactly represents the predicate * @return the filter range */ public static > FilterRange of( final T start, final T end, final boolean startInclusive, final boolean endInclusive, final boolean exact) { return new FilterRange<>(start, end, startInclusive, endInclusive, exact); } /** * Creates a new filter range that represents all data. * * @param the class of the filter range * @return the filter range */ public static > FilterRange include() { return FilterRange.of(null, null, true, true, true); } /** * Merges a list of filter ranges into their most simple form. Overlapping ranges will be merged * together. * * @param the class of the filter range * @param ranges the ranges to merge * @return the merged ranges */ public static > List> mergeRanges( final List> ranges) { if (ranges.size() <= 1) { return ranges; } Collections.sort(ranges); final List> mergedRanges = Lists.newArrayList(); FilterRange currentRange = null; for (final FilterRange range : ranges) { if (currentRange == null) { currentRange = range; continue; } if (currentRange.isBefore(range, true)) { mergedRanges.add(currentRange); currentRange = range; } if (currentRange.isBefore(range, false)) { currentRange = FilterRange.of( currentRange.start, range.end, currentRange.startInclusive, range.endInclusive, currentRange.exact && range.exact); } } if (currentRange != null) { mergedRanges.add(currentRange); } return mergedRanges; } /** * Intersects a list of filter ranges with another list of filter ranges. It is assumed that both * lists represent merged (non-overlapping) data. * * @param the class of the filter range * @param ranges1 the first set of ranges * @param ranges2 the second set of ranges * @return a list of filter ranges that represents the data that is represented by both lists */ public static > List> intersectRanges( final List> ranges1, final List> ranges2) { Collections.sort(ranges1); Collections.sort(ranges2); final List> intersections = Lists.newLinkedList(); int i = 0, j = 0; while ((i < ranges1.size()) && (j < ranges2.size())) { final FilterRange range1 = ranges1.get(i); final FilterRange range2 = ranges2.get(j); if (range1.isBefore(range2, false)) { i++; } else { j++; } if (range1.overlaps(range2)) { intersections.add(range1.intersectRange(range2)); } } return intersections; } /** * Inverts a list of filter ranges. It is a assumed that the ranges in the list do not overlap. * * @param the class of the filter range * @param ranges the ranges to invert * @return a list of ranges that represents the inverse of the provided ranges */ public static > List> invertRanges( final List> ranges) { Collections.sort(ranges); if (ranges.size() == 0) { return Lists.newArrayList(FilterRange.include()); } final List> newRanges = Lists.newArrayList(); T start = null; boolean startInclusive = true; boolean exact = true; for (int i = 0; i < ranges.size(); i++) { final FilterRange nextRange = ranges.get(i); if ((start != null) || (nextRange.getStart() != null)) { newRanges.add( FilterRange.of( start, nextRange.getStart(), startInclusive, !nextRange.startInclusive, exact && nextRange.exact)); } start = nextRange.getEnd(); startInclusive = !nextRange.endInclusive; exact = nextRange.exact; } if (start != null) { newRanges.add(FilterRange.of(start, null, startInclusive, true, exact)); } return newRanges; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/GenericEqualTo.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression; import java.util.Map; import java.util.Set; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import com.google.common.collect.Sets; /** * A generic predicate to compare two expressions using {@code Object.equals}. */ public class GenericEqualTo extends BinaryPredicate> { public GenericEqualTo() {} public GenericEqualTo( final Expression expression1, final Expression expression2) { super(expression1, expression2); } @Override public boolean evaluate(final Map fieldValues) { final Object value1 = expression1.evaluateValue(fieldValues); final Object value2 = expression2.evaluateValue(fieldValues); if (value1 == null) { return value2 == null; } if (value2 == null) { return false; } return value1.equals(value2); } @Override public boolean evaluate(final DataTypeAdapter adapter, final T entry) { final Object value1 = expression1.evaluateValue(adapter, entry); final Object value2 = expression2.evaluateValue(adapter, entry); if (value1 == null) { return value2 == null; } if (value2 == null) { return false; } return value1.equals(value2); } @Override public Set getConstrainableFields() { return Sets.newHashSet(); } @Override public void prepare( final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index) {} @Override public String toString() { final StringBuilder sb = new StringBuilder(expression1.toString()); sb.append(" = "); sb.append(expression2.toString()); return sb.toString(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/GenericExpression.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression; /** * A generic expression for doing basic comparisons of field values and literals that are not * represented by other expression implementations. */ public interface GenericExpression extends Expression { @Override default Predicate isEqualTo(final Object other) { return new GenericEqualTo(this, toExpression(other)); } @Override default Predicate isNotEqualTo(final Object other) { return new GenericNotEqualTo(this, toExpression(other)); } @SuppressWarnings("unchecked") public static Expression toExpression(final Object object) { if (object instanceof Expression) { return (Expression) object; } return GenericLiteral.of(object); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/GenericFieldValue.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression; /** * A field value implementation for any field value. */ public class GenericFieldValue extends FieldValue implements GenericExpression { public GenericFieldValue() {} public GenericFieldValue(final String fieldName) { super(fieldName); } @Override protected Object evaluateValueInternal(final Object value) { return value; } public static GenericFieldValue of(final String fieldName) { return new GenericFieldValue(fieldName); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/GenericLiteral.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression; /** * A generic implementation of literal, representing any object that can be serialized and * deserialized. */ public class GenericLiteral extends Literal implements GenericExpression { public GenericLiteral() {} public GenericLiteral(final Object literal) { super(literal); } public static GenericLiteral of(final Object literal) { return new GenericLiteral(literal); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/GenericNotEqualTo.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression; import java.util.Map; import java.util.Set; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import com.google.common.collect.Sets; /** * A generic predicate to compare two expressions using the inverse of {@code Object.equals}. */ public class GenericNotEqualTo extends BinaryPredicate> { public GenericNotEqualTo() {} public GenericNotEqualTo( final Expression expression1, final Expression expression2) { super(expression1, expression2); } @Override public boolean evaluate(final Map fieldValues) { final Object value1 = expression1.evaluateValue(fieldValues); final Object value2 = expression2.evaluateValue(fieldValues); if (value1 == null) { return value2 == null; } if (value2 == null) { return false; } return !value1.equals(value2); } @Override public boolean evaluate(final DataTypeAdapter adapter, final T entry) { final Object value1 = expression1.evaluateValue(adapter, entry); final Object value2 = expression2.evaluateValue(adapter, entry); if (value1 == null) { return value2 != null; } if (value2 == null) { return true; } return !value1.equals(value2); } @Override public Set getConstrainableFields() { return Sets.newHashSet(); } @Override public void prepare( final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index) {} @Override public String toString() { final StringBuilder sb = new StringBuilder(expression1.toString()); sb.append(" <> "); sb.append(expression2.toString()); return sb.toString(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/Include.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression; import java.util.Map; import java.util.Set; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import com.google.common.collect.Sets; /** * A filter that implementation always evaluates to {@code true}. */ public class Include implements Filter { public Include() {} @Override public void prepare( final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index) {} @Override public void addReferencedFields(final Set fields) {} @Override public boolean evaluate(final Map fieldValues) { return true; } @Override public boolean evaluate(final DataTypeAdapter adapter, final T entry) { return true; } @Override public Filter removePredicatesForFields(Set fields) { return this; } @Override public Set getConstrainableFields() { return Sets.newHashSet(); } @Override public String toString() { return "INCLUDE"; } @Override public byte[] toBinary() { return new byte[0]; } @Override public void fromBinary(final byte[] bytes) {} } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/IndexFieldConstraints.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import com.google.common.collect.Lists; import com.google.common.collect.Sets; /** * Provides a set of multi-dimensional constraints for a single indexed field. */ public abstract class IndexFieldConstraints> { protected final Map> dimensionConstraints; public IndexFieldConstraints(final Map> dimensionConstraints) { this.dimensionConstraints = dimensionConstraints; } /** * Get the constraints for the given dimension. * * @param dimension the dimension to get constraints of * @return the dimension constraints, or {@code null} if there weren't any */ public DimensionConstraints getDimensionRanges(final int dimension) { return dimensionConstraints.get(dimension); } /** * @return the number of dimensions constrained for this field */ public int getDimensionCount() { return dimensionConstraints.size(); } /** * @return {@code true} if these constraints exactly represent the predicates for this field */ public boolean isExact() { return dimensionConstraints.values().stream().allMatch(d -> d.isExact()); } /** * Combine the constraints of this field with another set of constraints using an OR operator. * * @param other the constraints to combine */ public void or(final IndexFieldConstraints other) { final Set toRemove = Sets.newHashSet(); for (final Entry> dimension : dimensionConstraints.entrySet()) { final DimensionConstraints dimension1 = dimension.getValue(); final DimensionConstraints dimension2 = other.dimensionConstraints.get(dimension.getKey()); if ((dimension1 == null) || (dimension2 == null)) { toRemove.add(dimension.getKey()); } else { dimension1.or(dimension2); } } toRemove.stream().forEach(i -> dimensionConstraints.remove(i)); } /** * Combine the constraints of this field with another set of constraints using an AND operator. * * @param other the constraints to combine */ public void and(final IndexFieldConstraints other) { final Set dimensions = Sets.newHashSet(dimensionConstraints.keySet()); dimensions.addAll(other.dimensionConstraints.keySet()); for (final Integer dimension : dimensions) { final DimensionConstraints dimension1 = dimensionConstraints.get(dimension); final DimensionConstraints dimension2 = other.dimensionConstraints.get(dimension); if (dimension1 == null) { dimensionConstraints.put(dimension, dimension2); } else if (dimension2 != null) { dimension1.and(dimension2); } } } /** * Invert the constraints of each dimension. */ public void invert() { for (final Entry> dimension : dimensionConstraints.entrySet()) { dimension.getValue().invert(); } } /** * A class representing the constraints of a single dimension of the field. * * @param the constraint class */ public static class DimensionConstraints> { private List> dimensionRanges = Lists.newArrayList(); public DimensionConstraints(final List> dimensionRanges) { this.dimensionRanges = dimensionRanges; } /** * @return a list of ranges that are constrained for this dimension */ public List> getRanges() { return dimensionRanges; } /** * @return {@code true} if this dimension constraints exactly represent the predicates for the * dimension */ public boolean isExact() { return dimensionRanges.stream().allMatch(r -> r.isExact()); } /** * Combine the constraints of this dimension with another set of constraints using an OR * operator. * * @param other the constraints to combine */ public void or(final DimensionConstraints other) { dimensionRanges.addAll(other.dimensionRanges); dimensionRanges = FilterRange.mergeRanges(dimensionRanges); } /** * Combine the constraints of this dimension with another set of constraints using an AND * operator. * * @param other the constraints to combine */ public void and(final DimensionConstraints other) { dimensionRanges = FilterRange.intersectRanges(dimensionRanges, other.dimensionRanges); } /** * Invert the ranges of this dimension. */ public void invert() { dimensionRanges = FilterRange.invertRanges(dimensionRanges); } /** * Create new dimension constraints from the specified set of filter ranges. * * @param the constraint class * @param ranges the constrained ranges * @return the constructed dimension constraints */ public static > DimensionConstraints of( final List> ranges) { return new DimensionConstraints<>(ranges); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/InvalidFilterException.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression; /** * Thrown when an invalid filter is made, such as creating a literal with an incompatible object. */ public class InvalidFilterException extends RuntimeException { private static final long serialVersionUID = -2922956287189544264L; public InvalidFilterException(final String message) { super(message); } public InvalidFilterException(final String message, final Throwable cause) { super(message, cause); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/IsNotNull.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression; import java.util.Map; import java.util.Set; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import com.beust.jcommander.internal.Sets; /** * Predicate that passes when the underlying expression does not evaluate to {@code null}. */ public class IsNotNull implements Predicate { private Expression expression; public IsNotNull() {} public IsNotNull(final Expression expression) { this.expression = expression; } public Expression getExpression() { return expression; } @Override public void prepare( final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index) {} @Override public void addReferencedFields(final Set fields) { expression.addReferencedFields(fields); } @Override public boolean evaluate(final Map fieldValues) { return expression.evaluateValue(fieldValues) != null; } @Override public boolean evaluate(final DataTypeAdapter adapter, final T entry) { return expression.evaluateValue(adapter, entry) != null; } @Override public Filter removePredicatesForFields(Set fields) { final Set referencedFields = Sets.newHashSet(); expression.addReferencedFields(referencedFields); if (fields.containsAll(referencedFields)) { return null; } return this; } @Override public Set getConstrainableFields() { return Sets.newHashSet(); } @Override public String toString() { return expression.toString() + " IS NOT NULL"; } @Override public byte[] toBinary() { return PersistenceUtils.toBinary(expression); } @Override public void fromBinary(final byte[] bytes) { expression = (Expression) PersistenceUtils.fromBinary(bytes); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/IsNull.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression; import java.util.Map; import java.util.Set; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import com.beust.jcommander.internal.Sets; /** * Predicate that passes when the underlying expression evaluates to {@code null}. */ public class IsNull implements Predicate { private Expression expression; public IsNull() {} public IsNull(final Expression expression) { this.expression = expression; } public Expression getExpression() { return expression; } @Override public void prepare( final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index) {} @Override public void addReferencedFields(final Set fields) { expression.addReferencedFields(fields); } @Override public boolean evaluate(final Map fieldValues) { return expression.evaluateValue(fieldValues) == null; } @Override public boolean evaluate(final DataTypeAdapter adapter, final T entry) { return expression.evaluateValue(adapter, entry) == null; } @Override public Filter removePredicatesForFields(Set fields) { final Set referencedFields = Sets.newHashSet(); expression.addReferencedFields(referencedFields); if (fields.containsAll(referencedFields)) { return null; } return this; } @Override public Set getConstrainableFields() { return Sets.newHashSet(); } @Override public String toString() { return expression.toString() + " IS NULL"; } @Override public byte[] toBinary() { return PersistenceUtils.toBinary(expression); } @Override public void fromBinary(final byte[] bytes) { expression = (Expression) PersistenceUtils.fromBinary(bytes); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/Literal.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression; import java.nio.ByteBuffer; import java.util.Map; import java.util.Set; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldUtils; import org.locationtech.geowave.core.store.data.field.FieldWriter; /** * An expression representing a raw value, not derived from an adapter entry. * * @param the class that the expression evaluates to */ public abstract class Literal implements Expression { protected V literal; public Literal() {} public Literal(final V literal) { this.literal = literal; } public V getValue() { return literal; } @Override public void addReferencedFields(final Set fields) {} @Override public boolean isLiteral() { return true; } @Override public V evaluateValue(final Map fieldValues) { return literal; } @Override public V evaluateValue(final DataTypeAdapter adapter, final T entry) { return literal; } @Override public String toString() { return literal == null ? "null" : literal.toString(); } @SuppressWarnings("unchecked") @Override public byte[] toBinary() { if (literal == null) { return new byte[] {(byte) 0}; } final byte[] classBytes = StringUtils.stringToBinary(literal.getClass().getName()); final FieldWriter writer = (FieldWriter) FieldUtils.getDefaultWriterForClass(literal.getClass()); final byte[] valueBytes = writer.writeField(literal); final ByteBuffer buffer = ByteBuffer.allocate( 1 + VarintUtils.unsignedIntByteLength(classBytes.length) + VarintUtils.unsignedIntByteLength(valueBytes.length) + classBytes.length + valueBytes.length); buffer.put((byte) 1); VarintUtils.writeUnsignedInt(classBytes.length, buffer); buffer.put(classBytes); VarintUtils.writeUnsignedInt(valueBytes.length, buffer); buffer.put(valueBytes); return buffer.array(); } @SuppressWarnings("unchecked") @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); final byte nullByte = buffer.get(); if (nullByte == 0) { literal = null; return; } final byte[] classBytes = new byte[VarintUtils.readUnsignedInt(buffer)]; buffer.get(classBytes); final byte[] valueBytes = new byte[VarintUtils.readUnsignedInt(buffer)]; buffer.get(valueBytes); final String className = StringUtils.stringFromBinary(classBytes); try { final Class valueClass = Class.forName(className); final FieldReader reader = (FieldReader) FieldUtils.getDefaultReaderForClass(valueClass); literal = (V) reader.readField(valueBytes); } catch (final ClassNotFoundException e) { throw new RuntimeException("Unable to find class for literal: " + className); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/MultiFilterOperator.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression; import java.util.Arrays; import java.util.List; import java.util.Set; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; /** * An abstract filter that is composed of two or more other filters. */ public abstract class MultiFilterOperator implements Filter { private Filter[] children; public MultiFilterOperator(final Filter... children) { this.children = children; } public Filter[] getChildren() { return children; } @Override public void addReferencedFields(final Set fields) { Arrays.stream(getChildren()).forEach(f -> f.addReferencedFields(fields)); } @Override public void prepare( final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index) { Arrays.stream(children).forEach(f -> f.prepare(adapter, indexMapping, index)); } @Override public byte[] toBinary() { return PersistenceUtils.toBinary(children); } @Override public void fromBinary(final byte[] bytes) { final List childrenList = PersistenceUtils.fromBinaryAsList(bytes); children = childrenList.toArray(new Filter[childrenList.size()]); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/Not.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression; import java.util.Map; import java.util.Set; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; /** * A filter that inverts the result of another filter. */ public class Not implements Filter { private Filter baseCondition; public Not() {} public Not(final Filter baseCondition) { this.baseCondition = baseCondition; } public Filter getFilter() { return baseCondition; } @Override public void prepare( final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index) { baseCondition.prepare(adapter, indexMapping, index); } @Override public void addReferencedFields(final Set fields) { baseCondition.addReferencedFields(fields); } @Override public Set getConstrainableFields() { return baseCondition.getConstrainableFields(); } @Override public > FilterConstraints getConstraints( final Class constraintClass, final DataStatisticsStore statsStore, final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index, final Set indexedFields) { final FilterConstraints constraints = baseCondition.getConstraints( constraintClass, statsStore, adapter, indexMapping, index, indexedFields); // TODO: There is room for improvement here in the future. To properly handle the constraints // for Not, all operators need to be inverted, not just the ranges. For example, if you // had A > 5 && B < 5, inverting just the ranges would result in a filter of A <= 5 && B >=5 // which is incorrect, it should really be A <= 5 || B >=5 which becomes unconstrainable on // either A or B attribute indexes. On the other hand if the underlying filter was using || // instead of &&, then the filter would become constrainable where it previously wasn't. For now // we can say that if only one field and one dimension are being constrained, inverting the // ranges produces an accurate constraint. if (constraints.getFieldCount() == 1) { constraints.invert(); return constraints; } return FilterConstraints.empty(); } @Override public boolean evaluate(final Map fieldValues) { return !baseCondition.evaluate(fieldValues); } @Override public boolean evaluate(final DataTypeAdapter adapter, final T entry) { return !baseCondition.evaluate(adapter, entry); } @Override public Filter removePredicatesForFields(Set fields) { final Filter updated = baseCondition.removePredicatesForFields(fields); if (updated == null) { return null; } return new Not(updated); } @Override public String toString() { final StringBuilder sb = new StringBuilder("NOT("); sb.append(baseCondition.toString()); sb.append(")"); return sb.toString(); } @Override public byte[] toBinary() { return PersistenceUtils.toBinary(baseCondition); } @Override public void fromBinary(final byte[] bytes) { baseCondition = (Filter) PersistenceUtils.fromBinary(bytes); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/Or.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression; import java.util.Arrays; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import com.google.common.collect.Sets; import com.google.common.base.Predicates; /** * Combines multiple filters using the OR operator. The expression will evaluate to true if ANY of * the child filters resolve to true. */ public class Or extends MultiFilterOperator { public Or() {} public Or(final Filter... filters) { super(filters); } @Override public > FilterConstraints getConstraints( final Class constraintClass, final DataStatisticsStore statsStore, final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index, final Set indexedFields) { final Filter[] children = getChildren(); if (children.length == 0) { return FilterConstraints.empty(); } final FilterConstraints finalConstraints = children[0].getConstraints( constraintClass, statsStore, adapter, indexMapping, index, indexedFields); for (int i = 1; i < children.length; i++) { finalConstraints.or( children[i].getConstraints( constraintClass, statsStore, adapter, indexMapping, index, indexedFields)); } return finalConstraints; } @Override public boolean evaluate(final Map fieldValues) { return Arrays.stream(getChildren()).anyMatch(f -> f.evaluate(fieldValues)); } @Override public boolean evaluate(final DataTypeAdapter adapter, final T entry) { return Arrays.stream(getChildren()).anyMatch(f -> f.evaluate(adapter, entry)); } @Override public Set getConstrainableFields() { final Filter[] children = getChildren(); Set constrainableFields = null; for (final Filter filter : children) { if (constrainableFields == null) { constrainableFields = filter.getConstrainableFields(); } else { constrainableFields.retainAll(filter.getConstrainableFields()); } } if (constrainableFields == null) { return Sets.newHashSet(); } return constrainableFields; } @Override public Filter removePredicatesForFields(Set fields) { // We can only remove predicates for fields that are on both sides of the final Set removableFields = Arrays.stream(getChildren()).map(Filter::getConstrainableFields).collect( () -> new HashSet<>(fields), Set::retainAll, Set::retainAll); if (removableFields.size() == 0) { return this; } Filter[] updatedChildren = Arrays.stream(getChildren()).map(f -> f.removePredicatesForFields(removableFields)).filter( Predicates.notNull()).toArray(Filter[]::new); if (updatedChildren.length == 0) { return null; } else if (updatedChildren.length == 1) { return updatedChildren[0]; } return new Or(updatedChildren); } @Override public String toString() { return Arrays.stream(getChildren()).map( f -> f instanceof MultiFilterOperator ? "(" + f.toString() + ")" : f.toString()).collect( Collectors.joining(" OR ")); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/Predicate.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression; /** * A predicate is really just a filter, but predicate is a more appropriate name in many cases. */ public interface Predicate extends Filter { } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/numeric/Abs.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression.numeric; import java.util.Map; import java.util.Set; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.api.DataTypeAdapter; /** * An expression that takes the absolute value of the evaluated value of another numeric expression. */ public class Abs implements NumericExpression { private NumericExpression baseExpression; public Abs() {} public Abs(final NumericExpression baseExpression) { this.baseExpression = baseExpression; } public NumericExpression getExpression() { return baseExpression; } @Override public void addReferencedFields(final Set fields) { baseExpression.addReferencedFields(fields); } @Override public boolean isLiteral() { return baseExpression.isLiteral(); } @Override public Double evaluateValue(final Map fieldValues) { final Double value = baseExpression.evaluateValue(fieldValues); if (value == null) { return null; } return Math.abs(value); } @Override public Double evaluateValue(final DataTypeAdapter adapter, final T entry) { final Double value = baseExpression.evaluateValue(adapter, entry); if (value == null) { return null; } return Math.abs(value); } @Override public String toString() { return "abs(" + baseExpression.toString() + ")"; } @Override public byte[] toBinary() { return PersistenceUtils.toBinary(baseExpression); } @Override public void fromBinary(final byte[] bytes) { baseExpression = (NumericExpression) PersistenceUtils.fromBinary(bytes); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/numeric/Add.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression.numeric; /** * An expression that adds the values of two numeric expressions. */ public class Add extends MathExpression { public Add() {} public Add(final NumericExpression expr1, final NumericExpression expr2) { super(expr1, expr2); } @Override protected double doOperation(final double value1, final double value2) { return value1 + value2; } @Override protected String getOperatorString() { return "+"; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/numeric/Divide.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression.numeric; /** * An expression that divides the values of two numeric expressions. */ public class Divide extends MathExpression { public Divide() {} public Divide(final NumericExpression expr1, final NumericExpression expr2) { super(expr1, expr2); } @Override protected double doOperation(final double value1, final double value2) { return value1 / value2; } @Override protected String getOperatorString() { return "/"; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/numeric/MathExpression.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression.numeric; import java.util.List; import java.util.Map; import java.util.Set; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.api.DataTypeAdapter; /** * Abstract implementation for performing math operations on two numeric expressions. */ public abstract class MathExpression implements NumericExpression { private NumericExpression expression1; private NumericExpression expression2; public MathExpression() {} public MathExpression(final NumericExpression expr1, final NumericExpression expr2) { expression1 = expr1; expression2 = expr2; } public NumericExpression getExpression1() { return expression1; } public NumericExpression getExpression2() { return expression2; } @Override public void addReferencedFields(final Set fields) { expression1.addReferencedFields(fields); expression2.addReferencedFields(fields); } @Override public boolean isLiteral() { return expression1.isLiteral() && expression2.isLiteral(); } @Override public Double evaluateValue(final Map fieldValues) { final Double value1 = expression1.evaluateValue(fieldValues); final Double value2 = expression2.evaluateValue(fieldValues); if ((value1 == null) || (value2 == null)) { return null; } return doOperation(value1, value2); } @Override public Double evaluateValue(final DataTypeAdapter adapter, final T entry) { final Double value1 = expression1.evaluateValue(adapter, entry); final Double value2 = expression2.evaluateValue(adapter, entry); if ((value1 == null) || (value2 == null)) { return null; } return doOperation(value1, value2); } protected abstract double doOperation(final double value1, final double value2); @Override public String toString() { final StringBuilder sb = new StringBuilder(); if (expression1 instanceof MathExpression) { sb.append("("); sb.append(expression1.toString()); sb.append(")"); } else { sb.append(expression1.toString()); } sb.append(" "); sb.append(getOperatorString()); sb.append(" "); if (expression2 instanceof MathExpression) { sb.append("("); sb.append(expression2.toString()); sb.append(")"); } else { sb.append(expression2.toString()); } return sb.toString(); } protected abstract String getOperatorString(); @Override public byte[] toBinary() { return PersistenceUtils.toBinary(new Persistable[] {expression1, expression2}); } @Override public void fromBinary(final byte[] bytes) { final List expressions = PersistenceUtils.fromBinaryAsList(bytes); expression1 = (NumericExpression) expressions.get(0); expression2 = (NumericExpression) expressions.get(1); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/numeric/Multiply.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression.numeric; /** * An expression that multiplies the values of two numeric expressions. */ public class Multiply extends MathExpression { public Multiply() {} public Multiply(final NumericExpression expr1, final NumericExpression expr2) { super(expr1, expr2); } @Override protected double doOperation(final double value1, final double value2) { return value1 * value2; } @Override protected String getOperatorString() { return "*"; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/numeric/NumericBetween.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression.numeric; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.index.CustomIndex; import org.locationtech.geowave.core.store.query.filter.expression.Between; import org.locationtech.geowave.core.store.query.filter.expression.IndexFieldConstraints; /** * Implementation of between for numeric data. */ public class NumericBetween extends Between { public NumericBetween() {} public NumericBetween( final NumericExpression valueExpr, final NumericExpression lowerBoundExpr, final NumericExpression upperBoundExpr) { super(valueExpr, lowerBoundExpr, upperBoundExpr); } @Override protected boolean evaluateInternal( final Double value, final Double lowerBound, final Double upperBound) { return (value >= lowerBound) && (value <= upperBound); } @Override protected IndexFieldConstraints toConstraints( final Double lowerBound, final Double upperBound) { return NumericFieldConstraints.of(lowerBound, upperBound, true, true, true); } @Override protected boolean indexSupported(final Index index) { return !(index instanceof CustomIndex); } @Override public void prepare( final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index) { if (valueExpr.isLiteral() && !(valueExpr instanceof NumericLiteral)) { valueExpr = NumericLiteral.of(valueExpr.evaluateValue(null)); } if (lowerBoundExpr.isLiteral() && !(lowerBoundExpr instanceof NumericLiteral)) { lowerBoundExpr = NumericLiteral.of(lowerBoundExpr.evaluateValue(null)); } if (upperBoundExpr.isLiteral() && !(upperBoundExpr instanceof NumericLiteral)) { upperBoundExpr = NumericLiteral.of(upperBoundExpr.evaluateValue(null)); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/numeric/NumericComparisonOperator.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression.numeric; import java.util.List; import org.locationtech.geowave.core.index.FloatCompareUtils; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.index.CustomIndex; import org.locationtech.geowave.core.store.query.filter.expression.ComparisonOperator; import org.locationtech.geowave.core.store.query.filter.expression.FilterRange; import org.locationtech.geowave.core.store.query.filter.expression.IndexFieldConstraints; /** * Implementation of comparison operators for numeric data. */ public class NumericComparisonOperator extends ComparisonOperator { public NumericComparisonOperator() {} public NumericComparisonOperator( final NumericExpression expression1, final NumericExpression expression2, final CompareOp compareOperator) { super(expression1, expression2, compareOperator); } @Override public void prepare( final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index) { if (expression1.isLiteral() && !(expression1 instanceof NumericLiteral)) { expression1 = NumericLiteral.of(expression1.evaluateValue(null)); } if (expression2.isLiteral() && !(expression2 instanceof NumericLiteral)) { expression2 = NumericLiteral.of(expression2.evaluateValue(null)); } } @Override protected boolean equalTo(final Double value1, final Double value2) { return FloatCompareUtils.checkDoublesEqual(value1, value2); } @Override protected boolean notEqualTo(final Double value1, final Double value2) { return !FloatCompareUtils.checkDoublesEqual(value1, value2); } @Override protected boolean lessThan(final Double value1, final Double value2) { return value1 < value2; } @Override protected boolean lessThanOrEqual(final Double value1, final Double value2) { return value1 <= value2; } @Override protected boolean greaterThan(final Double value1, final Double value2) { return value1 > value2; } @Override protected boolean greaterThanOrEqual(final Double value1, final Double value2) { return value1 >= value2; } @Override protected boolean indexSupported(final Index index) { return !(index instanceof CustomIndex); } @Override protected IndexFieldConstraints toFieldConstraints( final List> ranges) { return NumericFieldConstraints.of(ranges); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/numeric/NumericExpression.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression.numeric; import org.locationtech.geowave.core.store.query.filter.expression.ComparableExpression; import org.locationtech.geowave.core.store.query.filter.expression.ComparisonOperator.CompareOp; import org.locationtech.geowave.core.store.query.filter.expression.Predicate; /** * An expression that evaluates to a numeric (double) value. */ public interface NumericExpression extends ComparableExpression { /** * Create a new expression by adding the given operand to this expression. The operand can be * either another numeric expression or should evaluate to a numeric literal. * * @param other the object to add * @return an expression representing the added values */ default NumericExpression add(final Object other) { return new Add(this, toNumericExpression(other)); } /** * Create a new expression by subtracting the given operand from this expression. The operand can * be either another numeric expression or should evaluate to a numeric literal. * * @param other the object to subtract * @return an expression representing the subtracted values */ default NumericExpression subtract(final Object other) { return new Subtract(this, toNumericExpression(other)); } /** * Create a new expression by multiplying this expression by the given operand. The operand can be * either another numeric expression or should evaluate to a numeric literal. * * @param other the object to multiply by * @return an expression representing the multiplied values */ default NumericExpression multiplyBy(final Object other) { return new Multiply(this, toNumericExpression(other)); } /** * Create a new expression by dividing this expression by the given operand. The operand can be * either another numeric expression or should evaluate to a numeric literal. * * @param other the object to divide by * @return an expression representing the divided values */ default NumericExpression divideBy(final Object other) { return new Divide(this, toNumericExpression(other)); } /** * Create a new expression by taking the absolute value of this expression. * * @return an expression representing the absolute value of this expression */ default NumericExpression abs() { return new Abs(this); } /** * Create a predicate that tests to see if this expression is less than the provided object. The * operand can be either another numeric expression, or any object that can be converted to a * numeric literal. * * @param other the numeric object to test against * @return the less than predicate */ @Override default Predicate isLessThan(final Object other) { return new NumericComparisonOperator(this, toNumericExpression(other), CompareOp.LESS_THAN); } /** * Create a predicate that tests to see if this expression is less than or equal to the provided * object. The operand can be either another numeric expression, or any object that can be * converted to a numeric literal. * * @param other the numeric object to test against * @return the less than or equal to predicate */ @Override default Predicate isLessThanOrEqualTo(final Object other) { return new NumericComparisonOperator( this, toNumericExpression(other), CompareOp.LESS_THAN_OR_EQUAL); } /** * Create a predicate that tests to see if this expression is greater than the provided object. * The operand can be either another numeric expression, or any object that can be converted to a * numeric literal. * * @param other the numeric object to test against * @return the greater than predicate */ @Override default Predicate isGreaterThan(final Object other) { return new NumericComparisonOperator(this, toNumericExpression(other), CompareOp.GREATER_THAN); } /** * Create a predicate that tests to see if this expression is greater than or equal to the * provided object. The operand can be either another numeric expression, or any object that can * be converted to a numeric literal. * * @param other the numeric object to test against * @return the greater than or equal to predicate */ @Override default Predicate isGreaterThanOrEqualTo(final Object other) { return new NumericComparisonOperator( this, toNumericExpression(other), CompareOp.GREATER_THAN_OR_EQUAL); } /** * Create a predicate that tests to see if this expression is equal to the provided object. The * operand can be either another numeric expression, or any object that can be converted to a * numeric literal. * * @param other the numeric object to test against * @return the equals predicate */ @Override default Predicate isEqualTo(final Object other) { return new NumericComparisonOperator(this, toNumericExpression(other), CompareOp.EQUAL_TO); } /** * Create a predicate that tests to see if this expression is not equal to the provided object. * The operand can be either another numeric expression, or any object that can be converted to a * numeric literal. * * @param other the numeric object to test against * @return the not equals predicate */ @Override default Predicate isNotEqualTo(final Object other) { return new NumericComparisonOperator(this, toNumericExpression(other), CompareOp.NOT_EQUAL_TO); } /** * Create a predicate that tests to see if this expression is between the provided lower and upper * bounds. The operands can be either numeric expressions, or any objects that can be converted to * numeric literals. * * @param lowerBound the lower bound to test against * @param upperBound the upper bound to test against * @return the between predicate */ @Override default Predicate isBetween(final Object lowerBound, final Object upperBound) { return new NumericBetween( this, toNumericExpression(lowerBound), toNumericExpression(upperBound)); } /** * Convert the given object to a numeric expression, if it isn't one already. * * @param obj the object to convert * @return the numeric expression */ default NumericExpression toNumericExpression(final Object obj) { if (obj instanceof NumericExpression) { return (NumericExpression) obj; } return NumericLiteral.of(obj); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/numeric/NumericFieldConstraints.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression.numeric; import java.util.Arrays; import java.util.List; import java.util.Map; import org.locationtech.geowave.core.index.MultiDimensionalIndexData; import org.locationtech.geowave.core.index.numeric.BasicNumericDataset; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.index.numeric.NumericValue; import org.locationtech.geowave.core.store.query.filter.expression.FilterRange; import org.locationtech.geowave.core.store.query.filter.expression.IndexFieldConstraints; import com.google.common.collect.Lists; import com.google.common.collect.Maps; /** * Represents field constraints for numeric index data. */ public class NumericFieldConstraints extends IndexFieldConstraints { public NumericFieldConstraints( final Map> dimensionConstraints) { super(dimensionConstraints); } /** * Converts the list of dimension constraints into multi-dimensional numeric data. * * @param dimensionConstraints the list of dimension constraints * @return the index data from the constrained dimensions */ public static List> toIndexData( final List> dimensionConstraints) { final List> results = Lists.newLinkedList(); generateNumericData(results, 0, dimensionConstraints, new NumericData[0]); return results; } private static void generateNumericData( final List> results, final int currentDimension, final List> dimensions, final NumericData[] current) { if (currentDimension == dimensions.size()) { results.add(new BasicNumericDataset(current)); return; } final DimensionConstraints dimension = dimensions.get(currentDimension); final List> ranges = dimension.getRanges(); for (int i = 0; i < ranges.size(); i++) { final NumericData[] copy = Arrays.copyOf(current, current.length + 1); final FilterRange range = ranges.get(i); final Double start = toStartRangeValue(range.getStart()); final Double end = toEndRangeValue(range.getEnd()); if (start.equals(end) && range.isStartInclusive() && range.isEndInclusive()) { copy[copy.length - 1] = new NumericValue(start); } else { copy[copy.length - 1] = new NumericRange( toStartRangeValue(range.getStart()), toEndRangeValue(range.getEnd()), range.isStartInclusive(), range.isEndInclusive()); } generateNumericData(results, currentDimension + 1, dimensions, copy); } } private static double toStartRangeValue(final Double value) { if (value == null) { return Double.NEGATIVE_INFINITY; } return value; } private static double toEndRangeValue(final Double value) { if (value == null) { return Double.POSITIVE_INFINITY; } return value; } /** * Create a set of numeric field constraints from the given filter ranges. * * @param ranges the constrained ranges * @return the numeric field constraints */ public static NumericFieldConstraints of(final List> ranges) { final Map> constraints = Maps.newHashMap(); constraints.put(0, DimensionConstraints.of(ranges)); return new NumericFieldConstraints(constraints); } /** * Create a set of numeric field constraints from the given dimension constraints. * * @param dimensionConstraints a map of constraints for each dimension * @return the numeric field constraints */ public static NumericFieldConstraints of( final Map> dimensionConstraints) { return new NumericFieldConstraints(dimensionConstraints); } /** * Create a set of numeric field constraints from the given single range. * * @param start the start of the range * @param end the end of the range * @param startInclusive whether or not the start of the range is inclusive * @param endInclusive whether or not the end of the range is inclusive * @param exact whether or not this range exactly represents the predicate * @return the numeric field constraints */ public static NumericFieldConstraints of( final Double start, final Double end, final boolean startInclusive, final boolean endInclusive, final boolean exact) { return of(0, start, end, startInclusive, endInclusive, exact); } /** * Create a set of numeric field constraints from the given single range for a dimension. * * @param dimension the dimension that this range is on * @param start the start of the range * @param end the end of the range * @param startInclusive whether or not the start of the range is inclusive * @param endInclusive whether or not the end of the range is inclusive * @param exact whether or not this range exactly represents the predicate * @return the numeric field constraints */ public static NumericFieldConstraints of( final Integer dimension, final Double start, final Double end, final boolean startInclusive, final boolean endInclusive, final boolean exact) { final Map> constraints = Maps.newHashMap(); constraints.put( dimension, DimensionConstraints.of( Lists.newArrayList(FilterRange.of(start, end, startInclusive, endInclusive, exact)))); return new NumericFieldConstraints(constraints); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/numeric/NumericFieldValue.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression.numeric; import org.locationtech.geowave.core.store.query.filter.expression.FieldValue; /** * A field value implementation for numeric adapter fields. */ public class NumericFieldValue extends FieldValue implements NumericExpression { public NumericFieldValue() {} public NumericFieldValue(final String fieldName) { super(fieldName); } @Override protected Double evaluateValueInternal(final Object value) { if (value instanceof Number) { return ((Number) value).doubleValue(); } throw new RuntimeException( "Field value did not evaluate to a number: " + value.getClass().toString()); } public static NumericFieldValue of(final String fieldName) { return new NumericFieldValue(fieldName); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/numeric/NumericLiteral.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression.numeric; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.query.filter.expression.Expression; import org.locationtech.geowave.core.store.query.filter.expression.InvalidFilterException; import org.locationtech.geowave.core.store.query.filter.expression.Literal; /** * A numeric implementation of literal, representing numeric literal objects. */ public class NumericLiteral extends Literal implements NumericExpression { public NumericLiteral() {} public NumericLiteral(final Number literal) { super(literal == null ? null : literal.doubleValue()); } @Override public Double evaluateValue(final DataTypeAdapter adapter, final T entry) { final Number value = super.evaluateValue(adapter, entry); if (value == null) { return null; } return value.doubleValue(); } public static NumericLiteral of(Object literal) { if (literal == null) { return new NumericLiteral(null); } if (literal instanceof NumericLiteral) { return (NumericLiteral) literal; } if (literal instanceof Expression && ((Expression) literal).isLiteral()) { literal = ((Expression) literal).evaluateValue(null); } final Number number; if (literal instanceof Number) { number = (Number) literal; } else if (literal instanceof String) { number = Double.parseDouble((String) literal); } else { throw new InvalidFilterException("Unable to resolve numeric literal."); } return new NumericLiteral(number); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/numeric/Subtract.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression.numeric; /** * An expression that subtracts the values of a numeric expression from the value of another numeric * expression. */ public class Subtract extends MathExpression { public Subtract() {} public Subtract(final NumericExpression expr1, final NumericExpression expr2) { super(expr1, expr2); } @Override protected double doOperation(final double value1, final double value2) { return value1 - value2; } @Override protected String getOperatorString() { return "-"; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/text/Concat.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression.text; import java.util.List; import java.util.Map; import java.util.Set; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.api.DataTypeAdapter; /** * An expression that concatenates two text expressions into a single text expression. */ public class Concat implements TextExpression { private TextExpression expression1; private TextExpression expression2; public Concat() {} public Concat(final TextExpression expr1, final TextExpression expr2) { expression1 = expr1; expression2 = expr2; } public TextExpression getExpression1() { return expression1; } public TextExpression getExpression2() { return expression2; } @Override public void addReferencedFields(final Set fields) { expression1.addReferencedFields(fields); expression2.addReferencedFields(fields); } @Override public boolean isLiteral() { return expression1.isLiteral() && expression2.isLiteral(); } @Override public String evaluateValue(final Map fieldValues) { final String value1 = expression1.evaluateValue(fieldValues); final String value2 = expression2.evaluateValue(fieldValues); if (value1 == null) { return value2; } if (value2 == null) { return value1; } return value1.concat(value2); } @Override public String evaluateValue(final DataTypeAdapter adapter, final T entry) { final String value1 = expression1.evaluateValue(adapter, entry); final String value2 = expression2.evaluateValue(adapter, entry); if (value1 == null) { return value2; } if (value2 == null) { return value1; } return value1.concat(value2); } @Override public String toString() { final StringBuilder sb = new StringBuilder("concat("); sb.append(expression1.toString()); sb.append(","); sb.append(expression2.toString()); sb.append(")"); return sb.toString(); } @Override public byte[] toBinary() { return PersistenceUtils.toBinary(new Persistable[] {expression1, expression2}); } @Override public void fromBinary(final byte[] bytes) { final List expressions = PersistenceUtils.fromBinaryAsList(bytes); expression1 = (TextExpression) expressions.get(0); expression2 = (TextExpression) expressions.get(1); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/text/Contains.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression.text; import java.util.Set; import com.google.common.collect.Sets; /** * Predicate that passes when the first operand contains the text of the second operand. */ public class Contains extends TextBinaryPredicate { public Contains() {} public Contains(final TextExpression expression1, final TextExpression expression2) { super(expression1, expression2); } public Contains( final TextExpression expression1, final TextExpression expression2, final boolean ignoreCase) { super(expression1, expression2, ignoreCase); } @Override public boolean evaluateInternal(final String value1, final String value2) { return value1.contains(value2); } @Override public Set getConstrainableFields() { return Sets.newHashSet(); } @Override public String toString() { final StringBuilder sb = new StringBuilder(expression1.toString()); sb.append(" CONTAINS "); sb.append(expression2.toString()); return sb.toString(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/text/EndsWith.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression.text; import java.util.Set; import org.locationtech.geowave.core.index.text.CaseSensitivity; import org.locationtech.geowave.core.index.text.TextIndexStrategy; import org.locationtech.geowave.core.index.text.TextSearchType; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.index.CustomIndex; import org.locationtech.geowave.core.store.query.filter.expression.FieldValue; import org.locationtech.geowave.core.store.query.filter.expression.FilterConstraints; import org.locationtech.geowave.core.store.query.filter.expression.IndexFieldConstraints; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import com.google.common.collect.Sets; /** * Predicate that passes when the first operand ends with the second operand. */ public class EndsWith extends TextBinaryPredicate { public EndsWith() {} public EndsWith(final TextExpression expression1, final TextExpression expression2) { super(expression1, expression2); } public EndsWith( final TextExpression expression1, final TextExpression expression2, final boolean ignoreCase) { super(expression1, expression2, ignoreCase); } @Override public boolean evaluateInternal(final String value1, final String value2) { return value1.endsWith(value2); } @Override public Set getConstrainableFields() { if ((expression1 instanceof FieldValue) && expression2.isLiteral()) { return Sets.newHashSet(((FieldValue) expression1).getFieldName()); } return Sets.newHashSet(); } @SuppressWarnings("unchecked") @Override public > FilterConstraints getConstraints( final Class constraintClass, final DataStatisticsStore statsStore, final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index, final Set indexedFields) { if ((expression1 instanceof FieldValue) && indexedFields.contains(((FieldValue) expression1).getFieldName()) && expression2.isLiteral() && (index instanceof CustomIndex) && (((CustomIndex) index).getCustomIndexStrategy() instanceof TextIndexStrategy) && constraintClass.isAssignableFrom(String.class)) { final TextIndexStrategy indexStrategy = (TextIndexStrategy) ((CustomIndex) index).getCustomIndexStrategy(); final String value = expression2.evaluateValue(null, null); if (value != null) { if ((ignoreCase && indexStrategy.isSupported(CaseSensitivity.CASE_INSENSITIVE) && indexStrategy.isSupported(TextSearchType.ENDS_WITH)) || (!ignoreCase && indexStrategy.isSupported(CaseSensitivity.CASE_SENSITIVE) && indexStrategy.isSupported(TextSearchType.ENDS_WITH))) { return FilterConstraints.of( adapter, indexMapping, index, ((FieldValue) expression1).getFieldName(), (IndexFieldConstraints) TextFieldConstraints.of( value, value, true, true, true, !ignoreCase, true)); } } } return FilterConstraints.empty(); } @Override public String toString() { final StringBuilder sb = new StringBuilder(expression1.toString()); sb.append(" ENDS WITH "); sb.append(expression2.toString()); return sb.toString(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/text/StartsWith.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression.text; import java.util.Set; import org.locationtech.geowave.core.index.text.CaseSensitivity; import org.locationtech.geowave.core.index.text.TextIndexStrategy; import org.locationtech.geowave.core.index.text.TextSearchType; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.index.CustomIndex; import org.locationtech.geowave.core.store.query.filter.expression.FieldValue; import org.locationtech.geowave.core.store.query.filter.expression.FilterConstraints; import org.locationtech.geowave.core.store.query.filter.expression.IndexFieldConstraints; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import com.google.common.collect.Sets; /** * Predicate that passes when the first operand starts with the second operand. */ public class StartsWith extends TextBinaryPredicate { public StartsWith() {} public StartsWith(final TextExpression expression1, final TextExpression expression2) { super(expression1, expression2); } public StartsWith( final TextExpression expression1, final TextExpression expression2, final boolean ignoreCase) { super(expression1, expression2, ignoreCase); } @Override public boolean evaluateInternal(final String value1, final String value2) { return value1.startsWith(value2); } @Override public Set getConstrainableFields() { if ((expression1 instanceof FieldValue) && expression2.isLiteral()) { return Sets.newHashSet(((FieldValue) expression1).getFieldName()); } return Sets.newHashSet(); } @SuppressWarnings("unchecked") @Override public > FilterConstraints getConstraints( final Class constraintClass, final DataStatisticsStore statsStore, final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index, final Set indexedFields) { if ((expression1 instanceof FieldValue) && indexedFields.contains(((FieldValue) expression1).getFieldName()) && expression2.isLiteral() && (index instanceof CustomIndex) && (((CustomIndex) index).getCustomIndexStrategy() instanceof TextIndexStrategy) && constraintClass.isAssignableFrom(String.class)) { final TextIndexStrategy indexStrategy = (TextIndexStrategy) ((CustomIndex) index).getCustomIndexStrategy(); final String value = expression2.evaluateValue(null, null); if (value != null) { if ((ignoreCase && indexStrategy.isSupported(CaseSensitivity.CASE_INSENSITIVE) && indexStrategy.isSupported(TextSearchType.BEGINS_WITH)) || (!ignoreCase && indexStrategy.isSupported(CaseSensitivity.CASE_SENSITIVE) && indexStrategy.isSupported(TextSearchType.BEGINS_WITH))) { return FilterConstraints.of( adapter, indexMapping, index, ((FieldValue) expression1).getFieldName(), (IndexFieldConstraints) TextFieldConstraints.of( value, value, true, true, true, !ignoreCase, false)); } } } return FilterConstraints.empty(); } @Override public String toString() { final StringBuilder sb = new StringBuilder(expression1.toString()); sb.append(" STARTS WITH "); sb.append(expression2.toString()); return sb.toString(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/text/TextBetween.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression.text; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.text.CaseSensitivity; import org.locationtech.geowave.core.index.text.TextIndexStrategy; import org.locationtech.geowave.core.index.text.TextSearchType; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.index.CustomIndex; import org.locationtech.geowave.core.store.query.filter.expression.Between; import org.locationtech.geowave.core.store.query.filter.expression.IndexFieldConstraints; /** * Implementation of between for text data. */ public class TextBetween extends Between { private boolean ignoreCase; public TextBetween() {} public TextBetween( final TextExpression valueExpr, final TextExpression lowerBoundExpr, final TextExpression upperBoundExpr) { this(valueExpr, lowerBoundExpr, upperBoundExpr, false); } public TextBetween( final TextExpression valueExpr, final TextExpression lowerBoundExpr, final TextExpression upperBoundExpr, final boolean ignoreCase) { super(valueExpr, lowerBoundExpr, upperBoundExpr); this.ignoreCase = ignoreCase; } @Override protected boolean indexSupported(final Index index) { if ((index instanceof CustomIndex) && (((CustomIndex) index).getCustomIndexStrategy() instanceof TextIndexStrategy)) { final TextIndexStrategy indexStrategy = (TextIndexStrategy) ((CustomIndex) index).getCustomIndexStrategy(); return (indexStrategy.isSupported(TextSearchType.BEGINS_WITH) && indexStrategy.isSupported( ignoreCase ? CaseSensitivity.CASE_INSENSITIVE : CaseSensitivity.CASE_SENSITIVE)); } return false; } @Override public void prepare( final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index) { if (valueExpr.isLiteral() && !(valueExpr instanceof TextLiteral)) { valueExpr = TextLiteral.of(valueExpr.evaluateValue(null)); } if (lowerBoundExpr.isLiteral() && !(lowerBoundExpr instanceof TextLiteral)) { lowerBoundExpr = TextLiteral.of(lowerBoundExpr.evaluateValue(null)); } if (upperBoundExpr.isLiteral() && !(upperBoundExpr instanceof TextLiteral)) { upperBoundExpr = TextLiteral.of(upperBoundExpr.evaluateValue(null)); } } @Override protected boolean evaluateInternal( final String value, final String lowerBound, final String upperBound) { if (ignoreCase) { final String valueLower = value.toLowerCase(); return (valueLower.compareTo(lowerBound.toLowerCase()) >= 0) && (valueLower.compareTo(upperBound.toLowerCase()) <= 0); } return (value.compareTo(lowerBound) >= 0) && (value.compareTo(upperBound) <= 0); } @Override public byte[] toBinary() { final byte[] superBinary = super.toBinary(); final ByteBuffer buffer = ByteBuffer.allocate(1 + superBinary.length); buffer.put(ignoreCase ? (byte) 1 : (byte) 0); buffer.put(superBinary); return buffer.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); ignoreCase = buffer.get() == (byte) 1; final byte[] superBinary = new byte[buffer.remaining()]; buffer.get(superBinary); super.fromBinary(superBinary); } @Override protected IndexFieldConstraints toConstraints( final String lowerBound, final String upperBound) { // It's not exact because strings with the upper bound prefix may be greater than the upper // bound return TextFieldConstraints.of(lowerBound, upperBound, true, true, false, !ignoreCase, false); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/text/TextBinaryPredicate.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression.text; import java.nio.ByteBuffer; import java.util.Map; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.query.filter.expression.BinaryPredicate; /** * Abstract class for comparing two text expressions. */ public abstract class TextBinaryPredicate extends BinaryPredicate { protected boolean ignoreCase; public TextBinaryPredicate() {} public TextBinaryPredicate(final TextExpression expression1, final TextExpression expression2) { this(expression1, expression2, false); } public TextBinaryPredicate( final TextExpression expression1, final TextExpression expression2, final boolean ignoreCase) { super(expression1, expression2); this.ignoreCase = ignoreCase; } @Override public void prepare( final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index) { if (expression1.isLiteral() && !(expression1 instanceof TextLiteral)) { expression1 = TextLiteral.of(expression1.evaluateValue(null)); } if (expression2.isLiteral() && !(expression2 instanceof TextLiteral)) { expression2 = TextLiteral.of(expression2.evaluateValue(null)); } } @Override public boolean evaluate(final Map fieldValues) { final Object value1 = expression1.evaluateValue(fieldValues); final Object value2 = expression2.evaluateValue(fieldValues); return evaluateValues(value1, value2); } @Override public boolean evaluate(final DataTypeAdapter adapter, final T entry) { final Object value1 = expression1.evaluateValue(adapter, entry); final Object value2 = expression2.evaluateValue(adapter, entry); return evaluateValues(value1, value2); } private boolean evaluateValues(final Object value1, final Object value2) { if ((value1 == null) || (value2 == null)) { return false; } if (ignoreCase) { return evaluateInternal(value1.toString().toLowerCase(), value2.toString().toLowerCase()); } return evaluateInternal(value1.toString(), value2.toString()); } protected abstract boolean evaluateInternal(String value1, String value2); public boolean isIgnoreCase() { return ignoreCase; } @Override public byte[] toBinary() { final byte[] superBinary = super.toBinary(); final ByteBuffer buffer = ByteBuffer.allocate(1 + superBinary.length); buffer.put(ignoreCase ? (byte) 1 : (byte) 0); buffer.put(superBinary); return buffer.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); ignoreCase = buffer.get() == (byte) 1; final byte[] superBinary = new byte[buffer.remaining()]; buffer.get(superBinary); super.fromBinary(superBinary); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/text/TextComparisonOperator.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression.text; import java.nio.ByteBuffer; import java.util.List; import org.locationtech.geowave.core.index.text.CaseSensitivity; import org.locationtech.geowave.core.index.text.TextIndexStrategy; import org.locationtech.geowave.core.index.text.TextSearchType; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.index.CustomIndex; import org.locationtech.geowave.core.store.query.filter.expression.ComparisonOperator; import org.locationtech.geowave.core.store.query.filter.expression.FilterRange; import org.locationtech.geowave.core.store.query.filter.expression.IndexFieldConstraints; /** * Implementation of comparison operators for text data. */ public class TextComparisonOperator extends ComparisonOperator { private boolean ignoreCase; public TextComparisonOperator() {} public TextComparisonOperator( final TextExpression expression1, final TextExpression expression2, final CompareOp compareOperator) { this(expression1, expression2, compareOperator, false); } public TextComparisonOperator( final TextExpression expression1, final TextExpression expression2, final CompareOp compareOperator, final boolean ignoreCase) { super(expression1, expression2, compareOperator); this.ignoreCase = ignoreCase; } @Override public void prepare( final DataTypeAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index) { if (expression1.isLiteral() && !(expression1 instanceof TextLiteral)) { expression1 = TextLiteral.of(expression1.evaluateValue(null)); } if (expression2.isLiteral() && !(expression2 instanceof TextLiteral)) { expression2 = TextLiteral.of(expression2.evaluateValue(null)); } } @Override public boolean isExact() { // TODO: This should really be dependent on the index strategy, but for now, the text index // strategy will only be exact if the operator is >= or < due to the way the prefix range scans // work switch (compareOperator) { case GREATER_THAN_OR_EQUAL: case LESS_THAN: return true; default: return false; } } @Override protected boolean equalTo(final String value1, final String value2) { if (ignoreCase) { return value1.equalsIgnoreCase(value2); } return value1.equals(value2); } @Override protected boolean notEqualTo(final String value1, final String value2) { if (ignoreCase) { return !value1.equalsIgnoreCase(value2); } return !value1.equals(value2); } @Override protected boolean lessThan(final String value1, final String value2) { if (ignoreCase) { return value1.toLowerCase().compareTo(value2.toLowerCase()) < 0; } return value1.compareTo(value2) < 0; } @Override protected boolean lessThanOrEqual(final String value1, final String value2) { if (ignoreCase) { return value1.toLowerCase().compareTo(value2.toLowerCase()) <= 0; } return value1.compareTo(value2) <= 0; } @Override protected boolean greaterThan(final String value1, final String value2) { if (ignoreCase) { return value1.toLowerCase().compareTo(value2.toLowerCase()) > 0; } return value1.compareTo(value2) > 0; } @Override protected boolean greaterThanOrEqual(final String value1, final String value2) { if (ignoreCase) { return value1.toLowerCase().compareTo(value2.toLowerCase()) >= 0; } return value1.compareTo(value2) >= 0; } @Override public byte[] toBinary() { final byte[] superBinary = super.toBinary(); final ByteBuffer buffer = ByteBuffer.allocate(1 + superBinary.length); buffer.put(ignoreCase ? (byte) 1 : (byte) 0); buffer.put(superBinary); return buffer.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); ignoreCase = buffer.get() == (byte) 1; final byte[] superBinary = new byte[buffer.remaining()]; buffer.get(superBinary); super.fromBinary(superBinary); } @Override protected boolean indexSupported(final Index index) { if ((index instanceof CustomIndex) && (((CustomIndex) index).getCustomIndexStrategy() instanceof TextIndexStrategy)) { final TextIndexStrategy indexStrategy = (TextIndexStrategy) ((CustomIndex) index).getCustomIndexStrategy(); return (indexStrategy.isSupported(TextSearchType.BEGINS_WITH) && indexStrategy.isSupported( ignoreCase ? CaseSensitivity.CASE_INSENSITIVE : CaseSensitivity.CASE_SENSITIVE)); } return false; } @Override protected IndexFieldConstraints toFieldConstraints( final List> ranges) { return TextFieldConstraints.of(ranges); } @Override protected FilterRange toFilterRange( final String start, final String end, boolean startInclusive, final boolean endInclusive) { // Entries with the same prefix may be greater than the prefix or not equal to it, so these // operators need to include those prefixes in the scan switch (compareOperator) { case GREATER_THAN: case NOT_EQUAL_TO: startInclusive = true; break; default: break; } return TextFilterRange.of( start, end, startInclusive, endInclusive, isExact(), !ignoreCase, false); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/text/TextExpression.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression.text; import org.locationtech.geowave.core.store.query.filter.expression.ComparableExpression; import org.locationtech.geowave.core.store.query.filter.expression.ComparisonOperator.CompareOp; import org.locationtech.geowave.core.store.query.filter.expression.FieldValue; import org.locationtech.geowave.core.store.query.filter.expression.Predicate; /** * An expression that evaluates to a text (string) value. */ public interface TextExpression extends ComparableExpression { /** * Create a new expression by concatenating this expression and a given operand. The operand can * be either another text expression or should evaluate to a text literal. * * @param other the object to concatenate * @return an expression representing the concatenated values */ default TextExpression concat(final Object other) { return new Concat(this, toTextExpression(other)); } /** * Create a predicate that tests to see if this expression ends with the provided object. The * operand can be either another text expression, or any object that can be converted to a text * literal. * * @param other the text object to test against * @return the ends with predicate */ default Predicate endsWith(final Object other) { return new EndsWith(this, toTextExpression(other)); } /** * Create a predicate that tests to see if this expression ends with the provided object. The * operand can be either another text expression, or any object that can be converted to a text * literal. * * @param other the text object to test against * @param ignoreCase whether or not to ignore the casing of the expressions * @return the ends with predicate */ default Predicate endsWith(final Object other, final boolean ignoreCase) { return new EndsWith(this, toTextExpression(other), ignoreCase); } /** * Create a predicate that tests to see if this expression starts with the provided object. The * operand can be either another text expression, or any object that can be converted to a text * literal. * * @param other the text object to test against * @return the starts with predicate */ default Predicate startsWith(final Object other) { return new StartsWith(this, toTextExpression(other)); } /** * Create a predicate that tests to see if this expression starts with the provided object. The * operand can be either another text expression, or any object that can be converted to a text * literal. * * @param other the text object to test against * @param ignoreCase whether or not to ignore the casing of the expressions * @return the starts with predicate */ default Predicate startsWith(final Object other, final boolean ignoreCase) { return new StartsWith(this, toTextExpression(other), ignoreCase); } /** * Create a predicate that tests to see if this expression contains the provided object. The * operand can be either another text expression, or any object that can be converted to a text * literal. * * @param other the text object to test against * @return the contains predicate */ default Predicate contains(final Object other) { return new Contains(this, toTextExpression(other)); } /** * Create a predicate that tests to see if this expression contains the provided object. The * operand can be either another text expression, or any object that can be converted to a text * literal. * * @param other the text object to test against * @param ignoreCase whether or not to ignore the casing of the expressions * @return the contains predicate */ default Predicate contains(final Object other, final boolean ignoreCase) { return new Contains(this, toTextExpression(other), ignoreCase); } /** * Create a predicate that tests to see if this expression is less than the provided object. The * operand can be either another text expression, or any object that can be converted to a text * literal. * * @param other the text object to test against * @return the less than predicate */ @Override default Predicate isLessThan(final Object other) { return new TextComparisonOperator(this, toTextExpression(other), CompareOp.LESS_THAN); } /** * Create a predicate that tests to see if this expression is less than the provided object. The * operand can be either another text expression, or any object that can be converted to a text * literal. * * @param other the text object to test against * @param ignoreCase whether or not to ignore the casing of the expressions * @return the less than predicate */ default Predicate isLessThan(final Object other, final boolean ignoreCase) { return new TextComparisonOperator( this, toTextExpression(other), CompareOp.LESS_THAN, ignoreCase); } /** * Create a predicate that tests to see if this expression is less than or equal to the provided * object. The operand can be either another text expression, or any object that can be converted * to a text literal. * * @param other the text object to test against * @return the less than or equal to predicate */ @Override default Predicate isLessThanOrEqualTo(final Object other) { return new TextComparisonOperator(this, toTextExpression(other), CompareOp.LESS_THAN_OR_EQUAL); } /** * Create a predicate that tests to see if this expression is less than or equal to the provided * object. The operand can be either another text expression, or any object that can be converted * to a text literal. * * @param other the text object to test against * @param ignoreCase whether or not to ignore the casing of the expressions * @return the less than or equal to predicate */ default Predicate isLessThanOrEqualTo(final Object other, final boolean ignoreCase) { return new TextComparisonOperator( this, toTextExpression(other), CompareOp.LESS_THAN_OR_EQUAL, ignoreCase); } /** * Create a predicate that tests to see if this expression is greater than the provided object. * The operand can be either another text expression, or any object that can be converted to a * text literal. * * @param other the text object to test against * @return the greater than predicate */ @Override default Predicate isGreaterThan(final Object other) { return new TextComparisonOperator(this, toTextExpression(other), CompareOp.GREATER_THAN); } /** * Create a predicate that tests to see if this expression is greater than the provided object. * The operand can be either another text expression, or any object that can be converted to a * text literal. * * @param other the text object to test against * @param ignoreCase whether or not to ignore the casing of the expressions * @return the greater than predicate */ default Predicate isGreaterThan(final Object other, final boolean ignoreCase) { return new TextComparisonOperator( this, toTextExpression(other), CompareOp.GREATER_THAN, ignoreCase); } /** * Create a predicate that tests to see if this expression is greater than or equal to the * provided object. The operand can be either another text expression, or any object that can be * converted to a text literal. * * @param other the text object to test against * @return the greater than or equal to predicate */ @Override default Predicate isGreaterThanOrEqualTo(final Object other) { return new TextComparisonOperator( this, toTextExpression(other), CompareOp.GREATER_THAN_OR_EQUAL); } /** * Create a predicate that tests to see if this expression is greater than or equal to the * provided object. The operand can be either another text expression, or any object that can be * converted to a text literal. * * @param other the text object to test against * @param ignoreCase whether or not to ignore the casing of the expressions * @return the greater than or equal to predicate */ default Predicate isGreaterThanOrEqualTo(final Object other, final boolean ignoreCase) { return new TextComparisonOperator( this, toTextExpression(other), CompareOp.GREATER_THAN_OR_EQUAL, ignoreCase); } /** * Create a predicate that tests to see if this expression is between the provided lower and upper * bounds. The operands can be either other text expressions, or any objects that can be converted * to text literals. * * @param lowerBound the lower bound text object to test against * @param upperBound the upper bound text object to test against * @return the between predicate */ @Override default Predicate isBetween(final Object lowerBound, final Object upperBound) { return new TextBetween(this, toTextExpression(lowerBound), toTextExpression(upperBound)); } /** * Create a predicate that tests to see if this expression is between the provided lower and upper * bounds. The operands can be either other text expressions, or any objects that can be converted * to text literals. * * @param lowerBound the lower bound text object to test against * @param upperBound the upper bound text object to test against * @param ignoreCase whether or not to ignore the casing of the expressions * @return the between predicate */ default Predicate isBetween( final Object lowerBound, final Object upperBound, final boolean ignoreCase) { return new TextBetween( this, toTextExpression(lowerBound), toTextExpression(upperBound), ignoreCase); } /** * Create a predicate that tests to see if this expression is equal to the provided object. The * operand can be either another text expression, or any object that can be converted to a text * literal. * * @param other the text object to test against * @return the equals predicate */ @Override default Predicate isEqualTo(final Object other) { return new TextComparisonOperator(this, toTextExpression(other), CompareOp.EQUAL_TO); } /** * Create a predicate that tests to see if this expression is equal to the provided object. The * operand can be either another text expression, or any object that can be converted to a text * literal. * * @param other the text object to test against * @param ignoreCase whether or not to ignore the casing of the expressions * @return the equals predicate */ default Predicate isEqualTo(final Object other, final boolean ignoreCase) { return new TextComparisonOperator( this, toTextExpression(other), CompareOp.EQUAL_TO, ignoreCase); } /** * Create a predicate that tests to see if this expression is not equal to the provided object. * The operand can be either another text expression, or any object that can be converted to a * text literal. * * @param other the text object to test against * @return the not equals predicate */ @Override default Predicate isNotEqualTo(final Object other) { return new TextComparisonOperator(this, toTextExpression(other), CompareOp.NOT_EQUAL_TO); } /** * Create a predicate that tests to see if this expression is not equal to the provided object. * The operand can be either another text expression, or any object that can be converted to a * text literal. * * @param other the text object to test against * @param ignoreCase whether or not to ignore the casing of the expressions * @return the not equals predicate */ default Predicate isNotEqualTo(final Object other, final boolean ignoreCase) { return new TextComparisonOperator( this, toTextExpression(other), CompareOp.NOT_EQUAL_TO, ignoreCase); } /** * Convert the given object to a text expression, if it isn't one already. * * @param obj the object to convert * @return the text expression */ default TextExpression toTextExpression(final Object obj) { if (obj instanceof TextExpression) { return (TextExpression) obj; } else if (obj instanceof FieldValue) { return TextFieldValue.of(((FieldValue) obj).getFieldName()); } return TextLiteral.of(obj); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/text/TextFieldConstraints.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression.text; import java.util.Arrays; import java.util.List; import java.util.Map; import org.locationtech.geowave.core.index.MultiDimensionalIndexData; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.text.BasicTextDataset; import org.locationtech.geowave.core.index.text.TextData; import org.locationtech.geowave.core.index.text.TextRange; import org.locationtech.geowave.core.index.text.TextValue; import org.locationtech.geowave.core.store.query.filter.expression.FilterRange; import org.locationtech.geowave.core.store.query.filter.expression.IndexFieldConstraints; import com.google.common.collect.Lists; import com.google.common.collect.Maps; /** * Represents field constraints for text index data. */ public class TextFieldConstraints extends IndexFieldConstraints { public TextFieldConstraints( final Map> dimensionConstraints) { super(dimensionConstraints); } /** * Converts the list of dimension constraints into multi-dimensional text data. * * @param dimensionConstraints the list of dimension constraints * @return the index data from the constrained dimensions */ public static List> toIndexData( final List> dimensionConstraints) { final List> results = Lists.newLinkedList(); generateTextData(results, 0, dimensionConstraints, new TextData[0]); return results; } private static void generateTextData( final List> results, final int currentDimension, final List> dimensions, final TextData[] current) { if (currentDimension == dimensions.size()) { results.add(new BasicTextDataset(current)); return; } final DimensionConstraints dimension = dimensions.get(currentDimension); final List> ranges = dimension.getRanges(); for (int i = 0; i < ranges.size(); i++) { final TextData[] copy = Arrays.copyOf(current, current.length + 1); final TextFilterRange range = (TextFilterRange) ranges.get(i); final String start = toStartRangeValue(range.getStart()); final String end = toEndRangeValue(range.getEnd()); if (start.equals(end) && range.isStartInclusive() && range.isEndInclusive()) { copy[copy.length - 1] = new TextValue(start, range.isCaseSensitive(), range.isReversed()); } else { copy[copy.length - 1] = new TextRange( toStartRangeValue(range.getStart()), toEndRangeValue(range.getEnd()), range.isStartInclusive(), range.isEndInclusive(), range.isCaseSensitive(), range.isReversed()); } generateTextData(results, currentDimension + 1, dimensions, copy); } } private static String toStartRangeValue(final String value) { if (value == null) { return ""; } return value; } private static String toEndRangeValue(final String value) { if (value == null) { return StringUtils.stringFromBinary( new byte[] { (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF}); } return value; } /** * Create a set of text field constraints from the given filter ranges. * * @param ranges the constrained ranges * @return the text field constraints */ public static TextFieldConstraints of(final List> ranges) { final Map> constraints = Maps.newHashMap(); constraints.put(0, DimensionConstraints.of(ranges)); return new TextFieldConstraints(constraints); } /** * Create a set of text field constraints from the given single range. * * @param start the start of the range * @param end the end of the range * @param startInclusive whether or not the start of the range is inclusive * @param endInclusive whether or not the end of the range is inclusive * @param exact whether or not this range exactly represents the predicate * @param caseSensitive whether or not this range is case sensitive * @param reversed whether or not this range is for a reversed text index * @return the numeric field constraints */ public static TextFieldConstraints of( final String start, final String end, final boolean startInclusive, final boolean endInclusive, final boolean exact, final boolean caseSensitive, final boolean reversed) { return of(0, start, end, startInclusive, endInclusive, exact, caseSensitive, reversed); } /** * Create a set of text field constraints for a specific dimension from the given single range. * * @param dimension the dimension for the constraints * @param start the start of the range * @param end the end of the range * @param startInclusive whether or not the start of the range is inclusive * @param endInclusive whether or not the end of the range is inclusive * @param exact whether or not this range exactly represents the predicate * @param caseSensitive whether or not this range is case sensitive * @param reversed whether or not this range is for a reversed text index * @return the numeric field constraints */ public static TextFieldConstraints of( final Integer dimension, final String start, final String end, final boolean startInclusive, final boolean endInclusive, final boolean exact, final boolean caseSensitive, final boolean reversed) { final Map> constraints = Maps.newHashMap(); constraints.put( dimension, DimensionConstraints.of( Lists.newArrayList( TextFilterRange.of( start, end, startInclusive, endInclusive, exact, caseSensitive, reversed)))); return new TextFieldConstraints(constraints); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/text/TextFieldValue.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression.text; import org.locationtech.geowave.core.store.query.filter.expression.FieldValue; /** * A field value implementation for string adapter fields. */ public class TextFieldValue extends FieldValue implements TextExpression { public TextFieldValue() {} public TextFieldValue(final String fieldName) { super(fieldName); } public static TextFieldValue of(final String fieldName) { return new TextFieldValue(fieldName); } @Override protected String evaluateValueInternal(final Object value) { return value.toString(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/text/TextFilterRange.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression.text; import org.locationtech.geowave.core.store.query.filter.expression.FilterRange; /** * Overrides much of the logic for filter ranges to prevent constraints with different casing * parameters from being merged together. */ public class TextFilterRange extends FilterRange { private final boolean caseSensitive; private final boolean reversed; public TextFilterRange( final String start, final String end, final boolean startInclusive, final boolean endInclusive, final boolean exact, final boolean caseSensitive, final boolean reversed) { super(start, end, startInclusive, endInclusive, exact); this.caseSensitive = caseSensitive; this.reversed = reversed; } public boolean isCaseSensitive() { return caseSensitive; } public boolean isReversed() { return reversed; } @Override protected boolean isAfter(final FilterRange other, final boolean startPoint) { final TextFilterRange textRange = (TextFilterRange) other; if ((caseSensitive == textRange.caseSensitive) && (reversed == textRange.reversed)) { return super.isAfter(other, startPoint); } final int caseCompare = Boolean.compare(caseSensitive, textRange.caseSensitive); if (caseCompare < 0) { return false; } if (caseCompare > 0) { return true; } final int reverseCompare = Boolean.compare(reversed, textRange.reversed); if (reverseCompare < 0) { return false; } return true; } @Override protected boolean isBefore(final FilterRange other, final boolean startPoint) { final TextFilterRange textRange = (TextFilterRange) other; if ((caseSensitive == textRange.caseSensitive) && (reversed == textRange.reversed)) { return super.isAfter(other, startPoint); } final int caseCompare = Boolean.compare(caseSensitive, textRange.caseSensitive); if (caseCompare < 0) { return true; } if (caseCompare > 0) { return false; } final int reverseCompare = Boolean.compare(reversed, textRange.reversed); if (reverseCompare < 0) { return true; } return false; } @Override protected boolean overlaps(final FilterRange other) { if ((caseSensitive == ((TextFilterRange) other).caseSensitive) && (reversed == ((TextFilterRange) other).reversed)) { return super.overlaps(other); } return false; } @Override public int hashCode() { final int prime = 31; int result = super.hashCode(); result = (prime * result) + (caseSensitive ? 1 : 0); result = (prime * result) + (reversed ? 1 : 0); return result; } @Override public boolean equals(final Object other) { if (super.equals(other) && (other instanceof TextFilterRange)) { final TextFilterRange otherRange = (TextFilterRange) other; return (caseSensitive == otherRange.caseSensitive) && (reversed == otherRange.reversed); } return false; } @Override public int compareTo(final FilterRange o) { if (!(o instanceof TextFilterRange)) { return -1; } final TextFilterRange other = (TextFilterRange) o; int compare = Boolean.compare(caseSensitive, other.caseSensitive); if (compare == 0) { compare = Boolean.compare(reversed, other.reversed); } if (compare == 0) { return super.compareTo(other); } return compare; } public static TextFilterRange of( final String start, final String end, final boolean startInclusive, final boolean endInclusive, final boolean exact, final boolean caseSensitive, final boolean reversed) { return new TextFilterRange( start, end, startInclusive, endInclusive, exact, caseSensitive, reversed); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/filter/expression/text/TextLiteral.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression.text; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.query.filter.expression.Expression; import org.locationtech.geowave.core.store.query.filter.expression.Literal; /** * A text implementation of literal, representing text literal objects. */ public class TextLiteral extends Literal implements TextExpression { public TextLiteral() {} public TextLiteral(final String literal) { super(literal); } public static TextLiteral of(Object literal) { if (literal == null) { return new TextLiteral(null); } if (literal instanceof TextLiteral) { return (TextLiteral) literal; } if (literal instanceof Expression && ((Expression) literal).isLiteral()) { literal = ((Expression) literal).evaluateValue(null); } return new TextLiteral(literal.toString()); } @Override public String toString() { return literal == null ? "null" : "'" + literal + "'"; } @Override public byte[] toBinary() { if (literal == null) { return new byte[] {(byte) 0}; } final byte[] literalBytes = StringUtils.stringToBinary(literal); final ByteBuffer buffer = ByteBuffer.allocate(1 + literalBytes.length); buffer.put((byte) 1); buffer.put(literalBytes); return buffer.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); final byte nullByte = buffer.get(); if (nullByte == 0) { literal = null; return; } final byte[] literalBytes = new byte[buffer.remaining()]; buffer.get(literalBytes); literal = StringUtils.stringFromBinary(literalBytes); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/AdapterEntryResultSet.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql; import java.util.List; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import com.google.common.collect.Lists; /** * A result set that wraps adapter entries using a given set of column selectors. */ public class AdapterEntryResultSet implements ResultSet { private final List selectors; private final DataTypeAdapter adapter; private final CloseableIterator entries; /** * @param selectors the columns to select from the entries * @param adapter the data type adapter * @param entries the query results */ public AdapterEntryResultSet( final List selectors, final DataTypeAdapter adapter, final CloseableIterator entries) { this.selectors = selectors; this.adapter = adapter; this.entries = entries; } @Override public void close() { entries.close(); } @Override public boolean hasNext() { return entries.hasNext(); } @Override public Result next() { T entry = entries.next(); List values = Lists.newArrayListWithCapacity(selectors.size()); for (Selector column : selectors) { if (column instanceof ColumnSelector) { values.add(adapter.getFieldValue(entry, ((ColumnSelector) column).columnName())); } } return new Result(values); } @Override public int columnCount() { return selectors.size(); } @Override public String columnName(final int index) { return selectors.get(index).name(); } @Override public int columnIndex(final String columnName) { for (int i = 0; i < selectors.size(); i++) { if (selectors.get(i).name().equals(columnName)) { return i; } } return -1; } @Override public Class columnType(final int index) { ColumnSelector column = (ColumnSelector) selectors.get(index); return adapter.getFieldDescriptor(column.columnName()).bindingClass(); } /** * @return the adapter */ public DataTypeAdapter getAdapter() { return adapter; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/AggregationSelector.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql; /** * Selector that applies an aggregation function to the query. */ public class AggregationSelector extends Selector { private final String functionName; private final String[] functionArgs; private final String name; /** * @param functionName the name of the function * @param functionArgs the function arguments */ public AggregationSelector(final String functionName, final String[] functionArgs) { this(functionName, functionArgs, null); } /** * @param functionName the name of the function * @param functionArgs the funciton arguments * @param alias the column alias of this selector */ public AggregationSelector( final String functionName, final String[] functionArgs, final String alias) { super(SelectorType.AGGREGATION, alias); this.functionName = functionName; this.functionArgs = functionArgs; name = functionName.toUpperCase() + "(" + String.join(",", functionArgs) + ")"; } /** * @return the function name */ public String functionName() { return functionName; } /** * @return the function arguments */ public String[] functionArgs() { return functionArgs; } /** * @return the display name of this selector */ @Override public String selectorName() { return name; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/CastableType.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql; import org.locationtech.geowave.core.store.query.filter.expression.Expression; public interface CastableType { String getName(); Expression cast(final Object objectOrExpression); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/ColumnSelector.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql; /** * Selector that pulls a value from a single column of the results set. */ public class ColumnSelector extends Selector { private final String columnName; /** * @param columnName the column to select */ public ColumnSelector(final String columnName) { this(columnName, null); } /** * @param columnName the column to select * @param alias the alias of the column */ public ColumnSelector(final String columnName, final String alias) { super(SelectorType.SIMPLE, alias); this.columnName = columnName; } /** * @return the selected column name */ public String columnName() { return columnName; } /** * @return the display name of this selector */ @Override public String selectorName() { return columnName; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/ErrorListener.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql; import org.antlr.v4.runtime.BaseErrorListener; import org.antlr.v4.runtime.RecognitionException; import org.antlr.v4.runtime.Recognizer; /** * Error listener that wraps ANTLR syntax errors in our own exception class. */ public class ErrorListener extends BaseErrorListener { @Override public void syntaxError( Recognizer recognizer, Object offendingSymbol, int line, int position, String message, RecognitionException e) throws GWQLParseException { throw new GWQLParseException(line, position, message.replace(" K_", " ")); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/GWQLCoreExtensions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql; import org.locationtech.geowave.core.store.query.filter.expression.BooleanFieldValue; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldValue; import org.locationtech.geowave.core.store.query.filter.expression.text.TextFieldValue; import org.locationtech.geowave.core.store.query.gwql.function.aggregation.AggregationFunction; import org.locationtech.geowave.core.store.query.gwql.function.aggregation.CountFunction; import org.locationtech.geowave.core.store.query.gwql.function.aggregation.MaxFunction; import org.locationtech.geowave.core.store.query.gwql.function.aggregation.MinFunction; import org.locationtech.geowave.core.store.query.gwql.function.aggregation.SumFunction; import org.locationtech.geowave.core.store.query.gwql.function.expression.AbsFunction; import org.locationtech.geowave.core.store.query.gwql.function.expression.ConcatFunction; import org.locationtech.geowave.core.store.query.gwql.function.expression.ExpressionFunction; import org.locationtech.geowave.core.store.query.gwql.function.operator.OperatorFunction; import org.locationtech.geowave.core.store.query.gwql.function.predicate.PredicateFunction; import org.locationtech.geowave.core.store.query.gwql.function.predicate.TextPredicates; import org.locationtech.geowave.core.store.query.gwql.type.NumberCastableType; import org.locationtech.geowave.core.store.query.gwql.type.TextCastableType; import com.google.common.collect.Lists; /** * The built-in set of functions used by the GeoWave query language. */ public class GWQLCoreExtensions implements GWQLExtensionRegistrySpi { @Override public AggregationFunction[] getAggregationFunctions() { return new AggregationFunction[] { new CountFunction(), new MinFunction(), new MaxFunction(), new SumFunction()}; } @Override public PredicateFunction[] getPredicateFunctions() { return new PredicateFunction[] { new TextPredicates.StrStartsWithFunction(), new TextPredicates.StrEndsWithFunction(), new TextPredicates.StrContainsFunction()}; } @Override public ExpressionFunction[] getExpressionFunctions() { return new ExpressionFunction[] {new AbsFunction(), new ConcatFunction()}; } @Override public OperatorFunction[] getOperatorFunctions() { return null; } @Override public CastableType[] getCastableTypes() { return new CastableType[] {new TextCastableType(), new NumberCastableType(),}; } @Override public FieldValueBuilder[] getFieldValueBuilders() { return new FieldValueBuilder[] { new FieldValueBuilder(Lists.newArrayList(Number.class), (fieldName) -> { return NumericFieldValue.of(fieldName); }), new FieldValueBuilder(Lists.newArrayList(String.class), (fieldName) -> { return TextFieldValue.of(fieldName); }), new FieldValueBuilder(Lists.newArrayList(Boolean.class), (fieldName) -> { return BooleanFieldValue.of(fieldName); })}; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/GWQLExtensionRegistry.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Map; import org.locationtech.geowave.core.index.SPIServiceRegistry; import org.locationtech.geowave.core.store.query.filter.expression.FieldValue; import org.locationtech.geowave.core.store.query.gwql.GWQLExtensionRegistrySpi.FieldValueBuilder; import org.locationtech.geowave.core.store.query.gwql.function.aggregation.AggregationFunction; import org.locationtech.geowave.core.store.query.gwql.function.expression.ExpressionFunction; import org.locationtech.geowave.core.store.query.gwql.function.operator.OperatorFunction; import org.locationtech.geowave.core.store.query.gwql.function.predicate.PredicateFunction; import com.google.common.collect.Lists; import com.google.common.collect.Maps; /** * Singleton registry for all GWQL extensions. Functionality can be added to the language using * {@link GWQLExtensionRegistrySpi}. */ public class GWQLExtensionRegistry { private static GWQLExtensionRegistry INSTANCE = null; private List fieldValueBuilders = Lists.newArrayList(); private Map> aggregationFunctions = Maps.newHashMap(); private Map predicateFunctions = Maps.newHashMap(); private Map operatorFunctions = Maps.newHashMap(); private Map> expressionFunctions = Maps.newHashMap(); private Map> castableTypes = Maps.newHashMap(); private GWQLExtensionRegistry() { final Iterator spiIter = new SPIServiceRegistry(GWQLExtensionRegistry.class).load(GWQLExtensionRegistrySpi.class); while (spiIter.hasNext()) { final GWQLExtensionRegistrySpi functionSet = spiIter.next(); final AggregationFunction[] aggregations = functionSet.getAggregationFunctions(); if (aggregations != null) { Arrays.stream(aggregations).forEach(f -> registerFunction(f, aggregationFunctions)); } final PredicateFunction[] predicates = functionSet.getPredicateFunctions(); if (predicates != null) { Arrays.stream(predicates).forEach(f -> registerFunction(f, predicateFunctions)); } final OperatorFunction[] operators = functionSet.getOperatorFunctions(); if (operators != null) { Arrays.stream(operators).forEach(f -> registerFunction(f, operatorFunctions)); } final ExpressionFunction[] expressions = functionSet.getExpressionFunctions(); if (expressions != null) { Arrays.stream(expressions).forEach(f -> registerFunction(f, expressionFunctions)); } final CastableType[] types = functionSet.getCastableTypes(); if (types != null) { Arrays.stream(types).forEach(t -> registerCastableType(t)); } final FieldValueBuilder[] fieldValues = functionSet.getFieldValueBuilders(); if (fieldValues != null) { Arrays.stream(fieldValues).forEach(f -> fieldValueBuilders.add(f)); } } } public static GWQLExtensionRegistry instance() { if (INSTANCE == null) { INSTANCE = new GWQLExtensionRegistry(); } return INSTANCE; } private > void registerFunction( final T function, final Map registeredFunctions) { if (registeredFunctions.containsKey(function.getName())) { throw new RuntimeException( "A function with the name " + function.getName() + " is already registered."); } registeredFunctions.put(function.getName(), function); } private void registerCastableType(final CastableType type) { if (castableTypes.containsKey(type.getName())) { throw new RuntimeException( "A type with the name " + type.getName() + " is already registered."); } castableTypes.put(type.getName(), type); } /** * Retrieves the aggregation function with the given name. * * @param functionName the function name * @return the function that matches the given name, or {@code null} if it could not be found */ public AggregationFunction getAggregationFunction(final String functionName) { return aggregationFunctions.get(functionName.toUpperCase()); } /** * Retrieves the predicate function with the given name. * * @param functionName the function name * @return the function that matches the given name, or {@code null} if it could not be found */ public PredicateFunction getPredicateFunction(final String functionName) { return predicateFunctions.get(functionName.toUpperCase()); } /** * Retrieves the operator function with the given operator. * * @param operator the operator * @return the function that matches the given operator, or {@code null} if it could not be found */ public OperatorFunction getOperatorFunction(final String operator) { return operatorFunctions.get(operator.toUpperCase()); } /** * Retrieves the expression function with the given name. * * @param functionName the function name * @return the function that matches the given name, or {@code null} if it could not be found */ public ExpressionFunction getExpressionFunction(final String functionName) { return expressionFunctions.get(functionName.toUpperCase()); } /** * Get a castable type with the given name. * * @param typeName the castable type name * @return the castable type, or {@code null} if it could not befound */ public CastableType getCastableType(final String typeName) { return castableTypes.get(typeName.toLowerCase()); } /** * Create a field value expression for the given field name and class. * * @param fieldClass the class of the field * @param fieldName the name of the field * @return an appropriate field value expression for the field, or {@code null} if a matching * field value builder could not be found */ public FieldValue createFieldValue(final Class fieldClass, final String fieldName) { for (final FieldValueBuilder builder : fieldValueBuilders) { if (builder.isSupported(fieldClass)) { return builder.createFieldValue(fieldName); } } return null; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/GWQLExtensionRegistrySpi.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql; import java.util.List; import java.util.function.Function; import org.locationtech.geowave.core.store.query.filter.expression.FieldValue; import org.locationtech.geowave.core.store.query.gwql.function.aggregation.AggregationFunction; import org.locationtech.geowave.core.store.query.gwql.function.expression.ExpressionFunction; import org.locationtech.geowave.core.store.query.gwql.function.operator.OperatorFunction; import org.locationtech.geowave.core.store.query.gwql.function.predicate.PredicateFunction; /** * Class for adding functionality to the GeoWave query language. */ public interface GWQLExtensionRegistrySpi { /** * @return a list of field value builders to add */ FieldValueBuilder[] getFieldValueBuilders(); /** * @return a list of castable types */ CastableType[] getCastableTypes(); /** * @return the aggregation functions to add */ AggregationFunction[] getAggregationFunctions(); /** * @return the predicate functions to add */ PredicateFunction[] getPredicateFunctions(); /** * @return the expression functions to add */ ExpressionFunction[] getExpressionFunctions(); /** * @return the operator functions to add */ OperatorFunction[] getOperatorFunctions(); public static class FieldValueBuilder { private final List> supportedClasses; private final Function> buildFunction; public FieldValueBuilder( final List> supportedClasses, final Function> buildFunction) { this.supportedClasses = supportedClasses; this.buildFunction = buildFunction; } public boolean isSupported(final Class fieldClass) { return supportedClasses.stream().anyMatch(c -> c.isAssignableFrom(fieldClass)); } public FieldValue createFieldValue(final String fieldName) { return buildFunction.apply(fieldName); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/GWQLParseException.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql; import org.antlr.v4.runtime.misc.ParseCancellationException; /** * Exception class for syntax errors in the query language. */ public class GWQLParseException extends ParseCancellationException { private static final long serialVersionUID = 1L; public GWQLParseException(final String message) { super(message); } public GWQLParseException(final String message, final Throwable cause) { super(message, cause); } public GWQLParseException(int line, int position, String message) { super("Invalid Syntax: " + message + " at [" + line + ":" + position + "]"); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/GWQLParseHelper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql; import java.util.List; import org.apache.commons.text.StringEscapeUtils; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.query.filter.expression.ComparableExpression; import org.locationtech.geowave.core.store.query.filter.expression.Expression; import org.locationtech.geowave.core.store.query.filter.expression.FieldValue; import org.locationtech.geowave.core.store.query.filter.expression.GenericFieldValue; import org.locationtech.geowave.core.store.query.filter.expression.InvalidFilterException; import org.locationtech.geowave.core.store.query.filter.expression.Literal; import org.locationtech.geowave.core.store.query.filter.expression.Predicate; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericExpression; import org.locationtech.geowave.core.store.query.filter.expression.text.TextLiteral; import org.locationtech.geowave.core.store.query.gwql.function.expression.ExpressionFunction; import org.locationtech.geowave.core.store.query.gwql.function.operator.OperatorFunction; import org.locationtech.geowave.core.store.query.gwql.function.predicate.PredicateFunction; /** * Helper functions for transforming GWQL into GeoWave objects. */ public class GWQLParseHelper { /** * Convert a GWQL text literal to a {@link TextLiteral} expression. * * @param literal the GWQL literal * @return a {@code TextLiteral} that contains the literal string */ public static TextLiteral evaluateTextLiteral(final String literal) { final String text = literal.substring(1, literal.length() - 1).replace("''", "'").replace("\\'", "'"); return TextLiteral.of(StringEscapeUtils.unescapeJava(text)); } /** * Gets a {@link FieldValue} expression from an adapter for the given field name. * * @param adapter the data type adapter * @param fieldName the field name * @return the field value expression for the field */ public static FieldValue getFieldValue( final DataTypeAdapter adapter, final String fieldName) { final FieldDescriptor descriptor = adapter.getFieldDescriptor(fieldName); if (descriptor != null) { final FieldValue fieldValue = GWQLExtensionRegistry.instance().createFieldValue(descriptor.bindingClass(), fieldName); if (fieldValue == null) { return GenericFieldValue.of(fieldName); } return fieldValue; } throw new GWQLParseException("Field " + fieldName + " did not exist in the specified type."); } /** * Gets an expression representing the sum of two input expressions. * * @param expression1 the first expression * @param expression2 the expression to add * @return the added expressions */ public static Expression getAddExpression( final Expression expression1, final Expression expression2) { if (expression1 instanceof NumericExpression && expression2 instanceof NumericExpression) { return ((NumericExpression) expression1).add(expression2); } throw new GWQLParseException("Math operations require numeric expressions."); } /** * Gets an expression that represents one expression subtracted from another expression. * * @param expression1 the first expression * @param expression2 the expression to subtract * @return the subtracted expressions */ public static Expression getSubtractExpression( final Expression expression1, final Expression expression2) { if (expression1 instanceof NumericExpression && expression2 instanceof NumericExpression) { return ((NumericExpression) expression1).subtract(expression2); } throw new GWQLParseException("Math operations require numeric expressions."); } /** * Gets an expression that represents the one expression multiplied by another expression. * * @param expression1 the first expression * @param expression2 the expression to multiply by * @return the multiplied expressions */ public static Expression getMultiplyExpression( final Expression expression1, final Expression expression2) { if (expression1 instanceof NumericExpression && expression2 instanceof NumericExpression) { return ((NumericExpression) expression1).multiplyBy(expression2); } throw new GWQLParseException("Math operations require numeric expressions."); } /** * Gets an expression that represents one expression divided by another expression. * * @param expression1 the first expression * @param expression2 the expression to divide by * @return the divided expressions */ public static Expression getDivideExpression( final Expression expression1, final Expression expression2) { if (expression1 instanceof NumericExpression && expression2 instanceof NumericExpression) { return ((NumericExpression) expression1).divideBy(expression2); } throw new GWQLParseException("Math operations require numeric expressions."); } /** * Gets a between predicate for the given comparable expression. * * @param value the expression to evaluate * @param lowerBound the lower bound * @param upperBound the upper bound * @return a between predicate */ public static Predicate getBetweenPredicate( final Expression value, final Expression lowerBound, final Expression upperBound) { try { if (value instanceof ComparableExpression && lowerBound instanceof ComparableExpression && upperBound instanceof ComparableExpression) { return ((ComparableExpression) value).isBetween(lowerBound, upperBound); } } catch (InvalidFilterException e) { // operands were incompatible } throw new GWQLParseException( "The BETWEEN operation is only supported for comparable expressions."); } /** * Gets an equals predicate for the given expressions. * * @param expression1 the first expression * @param expression2 the second expression * @return the equals predicate */ public static Predicate getEqualsPredicate( final Expression expression1, final Expression expression2) { return expression1.isEqualTo(expression2); } /** * Gets a not equals predicate for the given expressions. * * @param expression1 the first expression * @param expression2 the second expression * @return the not equals predicate */ public static Predicate getNotEqualsPredicate( final Expression expression1, final Expression expression2) { return expression1.isNotEqualTo(expression2); } /** * Gets a less than predicate for the given expressions. * * @param expression1 the first expression * @param expression2 the second expression * @return the less than predicate */ public static Predicate getLessThanPredicate( final Expression expression1, final Expression expression2) { try { if (expression1 instanceof ComparableExpression && expression2 instanceof ComparableExpression) { return ((ComparableExpression) expression1).isLessThan(expression2); } } catch (InvalidFilterException e) { // operand was incompatible } throw new GWQLParseException( "Comparison operators can only be used on comparable expressions."); } /** * Gets a less than or equals predicate for the given expressions. * * @param expression1 the first expression * @param expression2 the second expression * @return the less than or equals predicate */ public static Predicate getLessThanOrEqualsPredicate( final Expression expression1, final Expression expression2) { try { if (expression1 instanceof ComparableExpression && expression2 instanceof ComparableExpression) { return ((ComparableExpression) expression1).isLessThanOrEqualTo(expression2); } } catch (InvalidFilterException e) { // operand was incompatible } throw new GWQLParseException( "Comparison operators can only be used on comparable expressions."); } /** * Gets a greater than predicate for the given expressions. * * @param expression1 the first expression * @param expression2 the second expression * @return the greater than predicate */ public static Predicate getGreaterThanPredicate( final Expression expression1, final Expression expression2) { try { if (expression1 instanceof ComparableExpression && expression2 instanceof ComparableExpression) { return ((ComparableExpression) expression1).isGreaterThan(expression2); } } catch (InvalidFilterException e) { // operand was incompatible } throw new GWQLParseException( "Comparison operators can only be used on comparable expressions."); } /** * Gets a greater than or equals predicate for the given expressions. * * @param expression1 the first expression * @param expression2 the second expression * @return the greater than or equals predicate */ public static Predicate getGreaterThanOrEqualsPredicate( final Expression expression1, final Expression expression2) { try { if (expression1 instanceof ComparableExpression && expression2 instanceof ComparableExpression) { return ((ComparableExpression) expression1).isGreaterThanOrEqualTo(expression2); } } catch (InvalidFilterException e) { // operand was incompatible } throw new GWQLParseException( "Comparison operators can only be used on comparable expressions."); } /** * Gets an expression that matches the given function name and arguments. * * @param functionName the name of the expression function * @param arguments the arguments of the function * @return the expression function */ public static Expression getExpressionFunction( final String functionName, final List> arguments) { final ExpressionFunction function = GWQLExtensionRegistry.instance().getExpressionFunction(functionName); if (function != null) { return function.create(arguments); } throw new GWQLParseException("No expression function was found with the name: " + functionName); } /** * Gets a predicate that matches the given function name and arguments. * * @param functionName the name of the predicate function * @param arguments the arguments of the function * @return the predicate function */ public static Predicate getPredicateFunction( final String functionName, final List> arguments) { final PredicateFunction function = GWQLExtensionRegistry.instance().getPredicateFunction(functionName); if (function != null) { return function.create(arguments); } throw new GWQLParseException("No predicate function was found with the name: " + functionName); } /** * Gets the operator predicate that matches the given operator. * * @param operator the operator * @param expression1 the first operand * @param expression2 the second operand * @return the operator predicate */ public static Predicate getOperatorPredicate( final String operator, final Expression expression1, final Expression expression2) { final OperatorFunction function = GWQLExtensionRegistry.instance().getOperatorFunction(operator); if (function != null) { return function.create(expression1, expression2); } throw new GWQLParseException("No '" + operator + "' operator was found"); } /** * Casts the given expression to the target type. * * @param targetType the type to cast to * @param expression the base expression * @return the casted expression */ public static Expression castExpression( final String targetType, final Expression expression) { final CastableType type = GWQLExtensionRegistry.instance().getCastableType(targetType); if (type != null) { return type.cast( expression.isLiteral() ? ((Literal) expression).evaluateValue(null) : expression); } throw new GWQLParseException("Type '" + targetType + "' is undefined"); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/QLFunction.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql; /** * Base interface for all functions in the query language. */ public interface QLFunction { String getName(); Class getReturnType(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/Result.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql; import java.util.List; /** * A single immutable query result. */ public class Result { private final List values; /** * @param values the column values of this result */ public Result(List values) { this.values = values; } /** * @param index the column index to get * @return the value of the column at the given index for this result */ public Object columnValue(final int index) { return values.get(index); } public List values() { return values; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/ResultSet.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql; import org.locationtech.geowave.core.store.CloseableIterator; /** * Interface for a set of results from a GeoWave query. */ public interface ResultSet extends CloseableIterator { /** * @return the number of columns that each result contains */ public int columnCount(); /** * @param index the index of the column * @return the display name of the column at the given index */ public String columnName(final int index); /** * @param columnName the name of the column to find * @return the index of the column with the given display name */ public int columnIndex(final String columnName); /** * @param index the index of the column * @return the Class of the objects that can be found in the given column */ public Class columnType(final int index); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/Selector.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql; /** * Abstract class for selecting data from a GeoWave query. */ public abstract class Selector { private final String alias; private final SelectorType type; public enum SelectorType { AGGREGATION, SIMPLE } /** * @param type the type of this selector */ public Selector(final SelectorType type) { this(type, null); } /** * @param type the type of this selector * @param alias an alternate display name for the selector */ public Selector(final SelectorType type, final String alias) { this.alias = alias; this.type = type; } /** * @return the alias of the selector */ public String alias() { return alias; } /** * @return the type of this selector */ public SelectorType type() { return type; } /** * @return the display name of the selector */ public String name() { return alias != null ? alias : selectorName(); } /** * @return the non-aliased display name of the selector */ protected abstract String selectorName(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/SingletonResultSet.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql; import java.util.List; import java.util.NoSuchElementException; /** * A result set that wraps a single result. */ public class SingletonResultSet implements ResultSet { private Result next; private final List columnNames; private final List> columnTypes; /** * @param columnNames the display name of each column * @param columnTypes the type of each column * @param values the values of each column */ public SingletonResultSet( final List columnNames, final List> columnTypes, final List values) { this.columnNames = columnNames; this.columnTypes = columnTypes; next = new Result(values); } @Override public void close() {} @Override public boolean hasNext() { return next != null; } @Override public Result next() { if (next != null) { Result retVal = next; next = null; return retVal; } throw new NoSuchElementException(); } @Override public int columnCount() { return columnNames.size(); } @Override public String columnName(final int index) { return columnNames.get(index); } @Override public int columnIndex(final String columnName) { return columnNames.indexOf(columnName); } @Override public Class columnType(int index) { return columnTypes.get(index); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/function/aggregation/AggregationFunction.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql.function.aggregation; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.query.gwql.QLFunction; public interface AggregationFunction extends QLFunction { /** * Gets the {@link Aggregation} associated with this function. * * @param adapter the adapter to perform the aggregation on * @param functionArgs the function arguments * @return the raw aggregation for this function */ public Aggregation getAggregation( final DataTypeAdapter adapter, final String[] functionArgs); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/function/aggregation/CountFunction.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql.function.aggregation; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.query.aggregate.FieldNameParam; import org.locationtech.geowave.core.store.query.aggregate.OptimalCountAggregation.FieldCountAggregation; /** * Count aggregation function that accepts a single argument. If `*` is passed to the function, all * simple features will be counted. Otherwise, all non-null values of the given column will be * counted. */ public class CountFunction implements AggregationFunction { @Override public String getName() { return "COUNT"; } @Override public Class getReturnType() { return Long.class; } @Override public Aggregation getAggregation( final DataTypeAdapter adapter, final String[] functionArgs) { if (functionArgs == null || functionArgs.length != 1) { throw new RuntimeException("COUNT takes exactly 1 parameter"); } final FieldNameParam columnName = functionArgs[0].equals("*") ? null : new FieldNameParam(functionArgs[0]); if (columnName != null && adapter.getFieldDescriptor(columnName.getFieldName()) == null) { throw new RuntimeException( "No attribute called '" + columnName.getFieldName() + "' was found in the given type."); } return new FieldCountAggregation<>(columnName); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/function/aggregation/MathAggregationFunction.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql.function.aggregation; import java.math.BigDecimal; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.query.aggregate.FieldNameParam; /** * Base aggregation function for performing math aggregations on numeric columns. */ public abstract class MathAggregationFunction implements AggregationFunction { @Override public Class getReturnType() { return BigDecimal.class; } @Override public Aggregation getAggregation( final DataTypeAdapter adapter, final String[] functionArgs) { if (functionArgs == null || functionArgs.length != 1) { throw new RuntimeException(getName() + " takes exactly 1 parameter"); } if (functionArgs[0].equals("*")) { throw new RuntimeException(getName() + " expects a numeric column."); } final FieldNameParam columnName = new FieldNameParam(functionArgs[0]); FieldDescriptor descriptor = adapter.getFieldDescriptor(columnName.getFieldName()); if (descriptor == null) { throw new RuntimeException( "No attribute called '" + columnName.getFieldName() + "' was found in the given type."); } if (!Number.class.isAssignableFrom(descriptor.bindingClass())) { throw new RuntimeException( getName() + " aggregation only works on numeric fields, given field was of type " + descriptor.bindingClass().getName() + "."); } return aggregation(columnName); } protected abstract Aggregation aggregation(final FieldNameParam columnName); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/function/aggregation/MaxFunction.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql.function.aggregation; import java.math.BigDecimal; import org.locationtech.geowave.core.store.query.aggregate.FieldMaxAggregation; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.query.aggregate.FieldNameParam; /** * Aggregation function that finds the maximum value of a given numeric column. */ public class MaxFunction extends MathAggregationFunction { @Override public String getName() { return "MAX"; } @Override protected Aggregation aggregation(FieldNameParam columnName) { return new FieldMaxAggregation<>(columnName); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/function/aggregation/MinFunction.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql.function.aggregation; import java.math.BigDecimal; import org.locationtech.geowave.core.store.query.aggregate.FieldMinAggregation; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.query.aggregate.FieldNameParam; /** * Aggregation function that finds the minimum value of a given numeric column. */ public class MinFunction extends MathAggregationFunction { @Override public String getName() { return "MIN"; } @Override protected Aggregation aggregation(final FieldNameParam columnName) { return new FieldMinAggregation<>(columnName); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/function/aggregation/SumFunction.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql.function.aggregation; import java.math.BigDecimal; import org.locationtech.geowave.core.store.query.aggregate.FieldSumAggregation; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.query.aggregate.FieldNameParam; /** * Aggregation function that sums all non-null numeric values of a given column. */ public class SumFunction extends MathAggregationFunction { @Override public String getName() { return "SUM"; } @Override protected Aggregation aggregation(FieldNameParam columnName) { return new FieldSumAggregation<>(columnName); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/function/expression/AbsFunction.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql.function.expression; import java.util.List; import org.locationtech.geowave.core.store.query.filter.expression.Expression; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericExpression; import org.locationtech.geowave.core.store.query.gwql.GWQLParseException; public class AbsFunction implements ExpressionFunction { @Override public String getName() { return "ABS"; } @Override public Class getReturnType() { return Double.class; } @Override public Expression create(List> arguments) { if (arguments.size() == 1 && arguments.get(0) instanceof NumericExpression) { return ((NumericExpression) arguments.get(0)).abs(); } throw new GWQLParseException("ABS expects exactly 1 numeric expression."); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/function/expression/ConcatFunction.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql.function.expression; import java.util.List; import org.locationtech.geowave.core.store.query.filter.expression.Expression; import org.locationtech.geowave.core.store.query.filter.expression.text.TextExpression; import org.locationtech.geowave.core.store.query.gwql.GWQLParseException; public class ConcatFunction implements ExpressionFunction { @Override public String getName() { return "CONCAT"; } @Override public Class getReturnType() { return String.class; } @Override public Expression create(List> arguments) { if (arguments.size() == 2 && arguments.stream().allMatch(a -> a instanceof TextExpression)) { return ((TextExpression) arguments.get(0)).concat(arguments.get(1)); } throw new GWQLParseException("CONCAT expects 2 text expressions."); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/function/expression/ExpressionFunction.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql.function.expression; import java.util.List; import org.locationtech.geowave.core.store.query.filter.expression.Expression; import org.locationtech.geowave.core.store.query.gwql.QLFunction; public interface ExpressionFunction extends QLFunction { Expression create(List> arguments); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/function/operator/OperatorFunction.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql.function.operator; import org.locationtech.geowave.core.store.query.filter.expression.Expression; import org.locationtech.geowave.core.store.query.filter.expression.Predicate; import org.locationtech.geowave.core.store.query.gwql.QLFunction; public interface OperatorFunction extends QLFunction { @Override default Class getReturnType() { return Boolean.class; } Predicate create(Expression expression1, Expression expression2); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/function/predicate/PredicateFunction.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql.function.predicate; import java.util.List; import org.locationtech.geowave.core.store.query.filter.expression.Expression; import org.locationtech.geowave.core.store.query.filter.expression.Predicate; import org.locationtech.geowave.core.store.query.gwql.QLFunction; public interface PredicateFunction extends QLFunction { @Override default Class getReturnType() { return Boolean.class; } Predicate create(List> arguments); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/function/predicate/TextPredicates.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql.function.predicate; import java.util.List; import org.locationtech.geowave.core.store.query.filter.expression.BooleanLiteral; import org.locationtech.geowave.core.store.query.filter.expression.Expression; import org.locationtech.geowave.core.store.query.filter.expression.Predicate; import org.locationtech.geowave.core.store.query.filter.expression.text.TextExpression; import org.locationtech.geowave.core.store.query.gwql.GWQLParseException; import org.locationtech.geowave.core.store.query.gwql.type.TextCastableType; public class TextPredicates { private static abstract class TextPredicateFunction implements PredicateFunction { @Override public Predicate create(List> arguments) { if (arguments.size() < 2 || arguments.size() > 3) { throw new GWQLParseException("Function expects 2 or 3 arguments, got " + arguments.size()); } final TextExpression expression1 = TextCastableType.toTextExpression(arguments.get(0)); final TextExpression expression2 = TextCastableType.toTextExpression(arguments.get(1)); final boolean ignoreCase; if (arguments.size() == 3) { if (arguments.get(2) instanceof BooleanLiteral) { ignoreCase = ((BooleanLiteral) arguments.get(2)).evaluateValue(null); } else { throw new GWQLParseException( "Function expects a boolean literal for the third argument."); } } else { ignoreCase = false; } return createInternal(expression1, expression2, ignoreCase); } protected abstract Predicate createInternal( final TextExpression expression1, final TextExpression expression2, final boolean ignoreCase); } public static class StrStartsWithFunction extends TextPredicateFunction { @Override public String getName() { return "STRSTARTSWITH"; } @Override protected Predicate createInternal( TextExpression expression1, TextExpression expression2, final boolean ignoreCase) { return expression1.startsWith(expression2, ignoreCase); } } public static class StrEndsWithFunction extends TextPredicateFunction { @Override public String getName() { return "STRENDSWITH"; } @Override protected Predicate createInternal( TextExpression expression1, TextExpression expression2, final boolean ignoreCase) { return expression1.endsWith(expression2, ignoreCase); } } public static class StrContainsFunction extends TextPredicateFunction { @Override public String getName() { return "STRCONTAINS"; } @Override protected Predicate createInternal( TextExpression expression1, TextExpression expression2, final boolean ignoreCase) { return expression1.contains(expression2, ignoreCase); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/statement/DeleteStatement.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql.statement; import javax.annotation.Nullable; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Query; import org.locationtech.geowave.core.store.api.QueryBuilder; import org.locationtech.geowave.core.store.query.filter.expression.Filter; import org.locationtech.geowave.core.store.query.gwql.ResultSet; import org.locationtech.geowave.core.store.query.gwql.SingletonResultSet; import com.google.common.collect.Lists; /** * Deletes data from a GeoWave store. */ public class DeleteStatement implements Statement { private final DataStore dataStore; private final DataTypeAdapter adapter; private final Filter filter; /** * * @param typeName the type to delete data from * @param filter delete features that match this filter */ public DeleteStatement( final DataStore dataStore, final DataTypeAdapter adapter, final @Nullable Filter filter) { this.dataStore = dataStore; this.adapter = adapter; this.filter = filter; } @Override public ResultSet execute(final String... authorizations) { final QueryBuilder bldr = QueryBuilder.newBuilder(adapter.getDataClass()).addTypeName(adapter.getTypeName()); bldr.setAuthorizations(authorizations); if (filter != null) { bldr.filter(filter); } final Query query = bldr.build(); final boolean success = dataStore.delete(query); return new SingletonResultSet( Lists.newArrayList("SUCCESS"), Lists.newArrayList(Boolean.class), Lists.newArrayList(success)); } /** * @return the type that data will be deleted from */ public DataTypeAdapter getAdapter() { return adapter; } /** * @return the delete filter */ public Filter getFilter() { return filter; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/statement/SelectStatement.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql.statement; import java.util.Arrays; import java.util.List; import java.util.Set; import javax.annotation.Nullable; import org.locationtech.geowave.core.index.persist.PersistableList; import org.locationtech.geowave.core.store.api.AggregationQueryBuilder; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.QueryBuilder; import org.locationtech.geowave.core.store.query.aggregate.CompositeAggregation; import org.locationtech.geowave.core.store.query.filter.expression.Filter; import org.locationtech.geowave.core.store.query.gwql.AdapterEntryResultSet; import org.locationtech.geowave.core.store.query.gwql.AggregationSelector; import org.locationtech.geowave.core.store.query.gwql.ColumnSelector; import org.locationtech.geowave.core.store.query.gwql.GWQLExtensionRegistry; import org.locationtech.geowave.core.store.query.gwql.ResultSet; import org.locationtech.geowave.core.store.query.gwql.Selector; import org.locationtech.geowave.core.store.query.gwql.Selector.SelectorType; import org.locationtech.geowave.core.store.query.gwql.SingletonResultSet; import org.locationtech.geowave.core.store.query.gwql.function.aggregation.AggregationFunction; import com.google.common.collect.Lists; import com.google.common.collect.Sets; /** * Select data from a GeoWave type. This can be an aggregation or a plain query. */ public class SelectStatement implements Statement { private final DataStore dataStore; private final DataTypeAdapter adapter; private List selectors; private final Filter filter; private final Integer limit; /** * @param adapter the adapter to select data from * @param selectors the selectors to use * @param filter the filter to use * @param limit the limit to use */ public SelectStatement( final DataStore dataStore, final DataTypeAdapter adapter, final List selectors, final @Nullable Filter filter, final @Nullable Integer limit) { this.dataStore = dataStore; this.adapter = adapter; this.selectors = selectors; this.filter = filter; this.limit = limit; } @Override public ResultSet execute(final String... authorizations) { final String typeName = adapter.getTypeName(); if (isAggregation()) { final AggregationQueryBuilder, T, ?> bldr = AggregationQueryBuilder.newBuilder(); bldr.setAuthorizations(authorizations); if (filter != null) { bldr.filter(filter); } if (limit != null) { bldr.limit(limit); } final CompositeAggregation composite = new CompositeAggregation<>(); final List columnNames = Lists.newArrayListWithCapacity(selectors.size()); final List> columnTypes = Lists.newArrayListWithCapacity(selectors.size()); for (final Selector selector : selectors) { final AggregationSelector aggregation = (AggregationSelector) selector; final AggregationFunction function = GWQLExtensionRegistry.instance().getAggregationFunction(aggregation.functionName()); if (function == null) { throw new RuntimeException( "No aggregation function called '" + aggregation.functionName() + "' was found."); } composite.add(function.getAggregation(adapter, aggregation.functionArgs())); columnNames.add(selector.name()); columnTypes.add(function.getReturnType()); } bldr.aggregate(typeName, composite); return new SingletonResultSet(columnNames, columnTypes, dataStore.aggregate(bldr.build())); } else { final QueryBuilder bldr = QueryBuilder.newBuilder(adapter.getDataClass()).addTypeName(typeName); bldr.setAuthorizations(authorizations); if (filter != null) { bldr.filter(filter); } if ((selectors != null) && !selectors.isEmpty()) { final Set usedAttributes = Sets.newHashSet(); selectors.forEach(s -> usedAttributes.add(((ColumnSelector) s).columnName())); if (filter != null) { filter.addReferencedFields(usedAttributes); } for (final String attribute : usedAttributes) { if (adapter.getFieldDescriptor(attribute) == null) { throw new RuntimeException( "No column named " + attribute + " was found in " + typeName); } } bldr.subsetFields(typeName, usedAttributes.toArray(new String[usedAttributes.size()])); } else { selectors = Lists.transform( Arrays.asList(adapter.getFieldDescriptors()), f -> new ColumnSelector(f.fieldName())); } if (limit != null) { bldr.limit(limit); } return new AdapterEntryResultSet<>(selectors, adapter, dataStore.query(bldr.build())); } } /** * @return {@code true} if this select statement represents an aggregation, {@code false} * otherwise */ public boolean isAggregation() { return (selectors != null) && !selectors.isEmpty() && (selectors.get(0).type() == SelectorType.AGGREGATION); } /** * @return the type to select data from */ public DataTypeAdapter getAdapter() { return adapter; } /** * @return the filter for the query */ public Filter getFilter() { return filter; } /** * @return the limit for the query */ public Integer getLimit() { return limit; } /** * @return the selectors for the query */ public List getSelectors() { return selectors; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/statement/Statement.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql.statement; import org.locationtech.geowave.core.store.query.gwql.ResultSet; /** * Interface for GeoWave query language statements. */ public interface Statement { /** * Executes the statement with the provided authorizations. * * @param authorizations authorizations to use for the query * @return the results of the statement */ public ResultSet execute(final String... authorizations); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/type/NumberCastableType.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql.type; import org.locationtech.geowave.core.store.query.filter.expression.Expression; import org.locationtech.geowave.core.store.query.filter.expression.InvalidFilterException; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericExpression; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericLiteral; import org.locationtech.geowave.core.store.query.gwql.CastableType; import org.locationtech.geowave.core.store.query.gwql.GWQLParseException; public class NumberCastableType implements CastableType { @Override public String getName() { return "number"; } @Override public NumericExpression cast(Object objectOrExpression) { return toNumericExpression(objectOrExpression); } public static NumericExpression toNumericExpression(Object objectOrExpression) { if (objectOrExpression instanceof NumericExpression) { return (NumericExpression) objectOrExpression; } if (objectOrExpression instanceof Expression && ((Expression) objectOrExpression).isLiteral()) { objectOrExpression = ((Expression) objectOrExpression).evaluateValue(null); } if (objectOrExpression instanceof Expression) { throw new GWQLParseException("Unable to cast expression to number"); } else { try { return NumericLiteral.of(objectOrExpression); } catch (InvalidFilterException e) { throw new GWQLParseException("Unable to cast literal to date", e); } } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/gwql/type/TextCastableType.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql.type; import org.locationtech.geowave.core.store.query.filter.expression.Expression; import org.locationtech.geowave.core.store.query.filter.expression.FieldValue; import org.locationtech.geowave.core.store.query.filter.expression.InvalidFilterException; import org.locationtech.geowave.core.store.query.filter.expression.text.TextExpression; import org.locationtech.geowave.core.store.query.filter.expression.text.TextFieldValue; import org.locationtech.geowave.core.store.query.filter.expression.text.TextLiteral; import org.locationtech.geowave.core.store.query.gwql.CastableType; import org.locationtech.geowave.core.store.query.gwql.GWQLParseException; public class TextCastableType implements CastableType { @Override public String getName() { return "text"; } @Override public TextExpression cast(Object objectOrExpression) { return toTextExpression(objectOrExpression); } public static TextExpression toTextExpression(Object objectOrExpression) { if (objectOrExpression instanceof TextExpression) { return (TextExpression) objectOrExpression; } if (objectOrExpression instanceof Expression && ((Expression) objectOrExpression).isLiteral()) { objectOrExpression = ((Expression) objectOrExpression).evaluateValue(null); } if (objectOrExpression instanceof Expression) { if (objectOrExpression instanceof FieldValue) { return new TextFieldValue(((FieldValue) objectOrExpression).getFieldName()); } else { throw new GWQLParseException("Unable to cast expression to text"); } } else { try { return TextLiteral.of(objectOrExpression.toString()); } catch (InvalidFilterException e) { throw new GWQLParseException("Unable to cast literal to text", e); } } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/options/AggregateTypeQueryOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.options; import java.nio.ByteBuffer; import java.util.Arrays; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.api.Aggregation; public class AggregateTypeQueryOptions

implements DataTypeQueryOptions { private String[] typeNames; private Aggregation aggregation; public AggregateTypeQueryOptions() {} public AggregateTypeQueryOptions( final Aggregation aggregation, final String... typeNames) { this.typeNames = typeNames; this.aggregation = aggregation; } @Override public String[] getTypeNames() { return typeNames; } public void setTypeNames(String[] typeNames) { this.typeNames = typeNames; } public Aggregation getAggregation() { return aggregation; } public void setAggregation(Aggregation aggregation) { this.aggregation = aggregation; } @Override public byte[] toBinary() { byte[] typeNamesBinary, aggregationBinary; if ((typeNames != null) && (typeNames.length > 0)) { typeNamesBinary = StringUtils.stringsToBinary(typeNames); } else { typeNamesBinary = new byte[0]; } if (aggregation != null) { aggregationBinary = PersistenceUtils.toBinary(aggregation); } else { aggregationBinary = new byte[0]; } final ByteBuffer buf = ByteBuffer.allocate( VarintUtils.unsignedIntByteLength(typeNamesBinary.length) + aggregationBinary.length + typeNamesBinary.length); VarintUtils.writeUnsignedInt(typeNamesBinary.length, buf); buf.put(typeNamesBinary); buf.put(aggregationBinary); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int typeNamesBytesLength = VarintUtils.readUnsignedInt(buf); if (typeNamesBytesLength == 0) { typeNames = new String[0]; } else { final byte[] typeNamesBytes = ByteArrayUtils.safeRead(buf, typeNamesBytesLength); typeNames = StringUtils.stringsFromBinary(typeNamesBytes); } final byte[] aggregationBytes = new byte[buf.remaining()]; if (aggregationBytes.length == 0) { aggregation = null; } else { buf.get(aggregationBytes); aggregation = (Aggregation) PersistenceUtils.fromBinary(aggregationBytes); } } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((aggregation == null) ? 0 : aggregation.hashCode()); result = (prime * result) + Arrays.hashCode(typeNames); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final AggregateTypeQueryOptions other = (AggregateTypeQueryOptions) obj; if (aggregation == null) { if (other.aggregation != null) { return false; } } else if (!aggregation.equals(other.aggregation)) { return false; } if (!Arrays.equals(typeNames, other.typeNames)) { return false; } return true; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/options/CommonQueryOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.options; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import java.util.function.Function; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.data.field.FieldUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.primitives.Bytes; public class CommonQueryOptions implements Persistable { private static final Logger LOGGER = LoggerFactory.getLogger(CommonQueryOptions.class); public static class HintKey implements Persistable { private Class cls; private Function reader; private Function writer; public HintKey() {} public HintKey(final Class cls) { this.cls = cls; init(cls); } private void init(final Class cls) { reader = FieldUtils.getDefaultReaderForClass(cls); writer = FieldUtils.getDefaultWriterForClass(cls); } @Override public byte[] toBinary() { return StringUtils.stringToBinary(cls.getName()); } @Override public void fromBinary(final byte[] bytes) { try { cls = (Class) Class.forName(StringUtils.stringFromBinary(bytes)); init(cls); } catch (final ClassNotFoundException e) { LOGGER.error("Class not found for hint", e); } } } private Map, Object> hints; private Integer limit; private String[] authorizations; public CommonQueryOptions(final String... authorizations) { this((Integer) null, authorizations); } public CommonQueryOptions(final Integer limit, final String... authorizations) { this(limit, new HashMap<>(), authorizations); } public CommonQueryOptions( final Integer limit, final Map, Object> hints, final String... authorizations) { super(); this.hints = hints; this.limit = limit; this.authorizations = authorizations; } public Map, Object> getHints() { return hints; } public Integer getLimit() { return limit; } public String[] getAuthorizations() { return authorizations; } @Override public byte[] toBinary() { Integer limitForBinary; if (limit == null) { limitForBinary = -1; } else { limitForBinary = limit; } final byte[][] hintsBinary = new byte[hints == null ? 0 : hints.size()][]; int hintsLength = 0; if (hints != null) { int i = 0; for (final Entry, Object> e : hints.entrySet()) { final byte[] keyBinary = e.getKey().toBinary(); final ByteBuffer lengthBytes = ByteBuffer.allocate(VarintUtils.unsignedIntByteLength(keyBinary.length)); VarintUtils.writeUnsignedInt(keyBinary.length, lengthBytes); hintsBinary[i] = Bytes.concat( lengthBytes.array(), keyBinary, ((Function) e.getKey().writer).apply(e.getValue())); hintsLength += hintsBinary[i].length + VarintUtils.unsignedIntByteLength(hintsBinary[i].length); i++; } } byte[] authsBinary; if ((authorizations == null) || (authorizations.length == 0)) { authsBinary = new byte[0]; } else { authsBinary = StringUtils.stringsToBinary(authorizations); } final ByteBuffer buf = ByteBuffer.allocate( VarintUtils.unsignedIntByteLength(limitForBinary) + VarintUtils.unsignedIntByteLength(authsBinary.length) + VarintUtils.unsignedIntByteLength(hintsBinary.length) + authsBinary.length + hintsLength); VarintUtils.writeUnsignedInt(limitForBinary, buf); VarintUtils.writeUnsignedInt(authsBinary.length, buf); buf.put(authsBinary); VarintUtils.writeUnsignedInt(hintsBinary.length, buf); for (final byte[] h : hintsBinary) { VarintUtils.writeUnsignedInt(h.length, buf); buf.put(h); } return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int limit = VarintUtils.readUnsignedInt(buf); if (limit <= 0) { this.limit = null; } else { this.limit = limit; } final int authLength = VarintUtils.readUnsignedInt(buf); if (authLength > 0) { final byte[] authBytes = ByteArrayUtils.safeRead(buf, authLength); authorizations = StringUtils.stringsFromBinary(authBytes); } else { authorizations = new String[0]; } final int hintsLength = VarintUtils.readUnsignedInt(buf); ByteArrayUtils.verifyBufferSize(buf, hintsLength); final Map, Object> hints = new HashMap<>(hintsLength); for (int i = 0; i < hintsLength; i++) { final int l = VarintUtils.readUnsignedInt(buf); final byte[] hBytes = ByteArrayUtils.safeRead(buf, l); final ByteBuffer hBuf = ByteBuffer.wrap(hBytes); final byte[] keyBytes = ByteArrayUtils.safeRead(hBuf, VarintUtils.readUnsignedInt(hBuf)); final HintKey key = new HintKey<>(); key.fromBinary(keyBytes); final byte[] vBytes = new byte[hBuf.remaining()]; hBuf.get(vBytes); hints.put(key, key.reader.apply(vBytes)); } this.hints = hints; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + Arrays.hashCode(authorizations); result = (prime * result) + ((hints == null) ? 0 : hints.hashCode()); result = (prime * result) + ((limit == null) ? 0 : limit.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final CommonQueryOptions other = (CommonQueryOptions) obj; if (!Arrays.equals(authorizations, other.authorizations)) { return false; } if (hints == null) { if (other.hints != null) { return false; } } else if (!hints.equals(other.hints)) { return false; } if (limit == null) { if (other.limit != null) { return false; } } else if (!limit.equals(other.limit)) { return false; } return true; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/options/DataTypeQueryOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.options; import org.locationtech.geowave.core.index.persist.Persistable; public interface DataTypeQueryOptions extends Persistable { public String[] getTypeNames(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/options/FilterByTypeQueryOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.options; import java.nio.ByteBuffer; import java.util.Arrays; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; public class FilterByTypeQueryOptions implements DataTypeQueryOptions { private String[] typeNames; private String[] fieldNames; public FilterByTypeQueryOptions() {} public FilterByTypeQueryOptions(final String[] typeNames) { this.typeNames = typeNames; } public FilterByTypeQueryOptions(final String typeName, final String... fieldNames) { super(); typeNames = new String[] {typeName}; this.fieldNames = ((fieldNames != null) && (fieldNames.length == 0)) ? null : fieldNames; } @Override public String[] getTypeNames() { return typeNames; } public String[] getFieldNames() { return fieldNames; } @Override public byte[] toBinary() { byte[] typeNamesBinary, fieldNamesBinary; if ((typeNames != null) && (typeNames.length > 0)) { typeNamesBinary = StringUtils.stringsToBinary(typeNames); } else { typeNamesBinary = new byte[0]; } if ((fieldNames != null) && (fieldNames.length > 0)) { fieldNamesBinary = StringUtils.stringsToBinary(fieldNames); } else { fieldNamesBinary = new byte[0]; } final ByteBuffer buf = ByteBuffer.allocate( VarintUtils.unsignedIntByteLength(typeNamesBinary.length) + fieldNamesBinary.length + typeNamesBinary.length); VarintUtils.writeUnsignedInt(typeNamesBinary.length, buf); buf.put(typeNamesBinary); buf.put(fieldNamesBinary); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int typeNamesBytesLength = VarintUtils.readUnsignedInt(buf); if (typeNamesBytesLength <= 0) { typeNames = new String[0]; } else { final byte[] typeNamesBytes = ByteArrayUtils.safeRead(buf, typeNamesBytesLength); typeNames = StringUtils.stringsFromBinary(typeNamesBytes); } final byte[] fieldNamesBytes = new byte[buf.remaining()]; if (fieldNamesBytes.length == 0) { fieldNames = null; } else { buf.get(fieldNamesBytes); fieldNames = StringUtils.stringsFromBinary(fieldNamesBytes); } } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + Arrays.hashCode(fieldNames); result = (prime * result) + Arrays.hashCode(typeNames); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final FilterByTypeQueryOptions other = (FilterByTypeQueryOptions) obj; if (!Arrays.equals(fieldNames, other.fieldNames)) { return false; } if (!Arrays.equals(typeNames, other.typeNames)) { return false; } return true; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/options/IndexQueryOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.options; import org.locationtech.geowave.core.index.persist.Persistable; public interface IndexQueryOptions extends Persistable { public String getIndexName(); public boolean isAllIndices(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/options/QueryAllIndices.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.options; public class QueryAllIndices extends QuerySingleIndex { public QueryAllIndices() { super(null); } @Override public byte[] toBinary() { return new byte[0]; } @Override public void fromBinary(final byte[] bytes) {} @Override public int hashCode() { return getClass().hashCode(); } @Override public boolean equals(final Object obj) { if (obj == null) { return false; } return getClass() == obj.getClass(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/options/QueryAllTypes.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.options; public class QueryAllTypes extends FilterByTypeQueryOptions { public QueryAllTypes() { super(null); } @Override public byte[] toBinary() { return new byte[] {}; } @Override public void fromBinary(final byte[] bytes) {} @Override public int hashCode() { return getClass().hashCode(); } @Override public boolean equals(final Object obj) { if (obj == null) { return false; } return getClass() == obj.getClass(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/query/options/QuerySingleIndex.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.options; import org.locationtech.geowave.core.index.StringUtils; public class QuerySingleIndex implements IndexQueryOptions { private String indexName; public QuerySingleIndex() { this(null); } public QuerySingleIndex(final String indexName) { this.indexName = indexName; } @Override public String getIndexName() { return indexName; } @Override public byte[] toBinary() { if ((indexName == null) || indexName.isEmpty()) { return new byte[0]; } return StringUtils.stringToBinary(indexName); } @Override public void fromBinary(final byte[] bytes) { if (bytes.length == 0) { indexName = null; } else { indexName = StringUtils.stringFromBinary(bytes); } } @Override public boolean isAllIndices() { return indexName == null || indexName.isEmpty(); } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((indexName == null) ? 0 : indexName.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final QuerySingleIndex other = (QuerySingleIndex) obj; if (indexName == null) { if (other.indexName != null) { return false; } } else if (!indexName.equals(other.indexName)) { return false; } return true; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/server/BasicOptionProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.server; import java.util.Map; import org.locationtech.geowave.core.store.server.ServerOpConfig.OptionProvider; public class BasicOptionProvider implements OptionProvider { private final Map options; public BasicOptionProvider(final Map options) { this.options = options; } @Override public Map getOptions(final Map existingOptions) { return options; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/server/RowMergingAdapterOptionProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.server; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter; import org.locationtech.geowave.core.store.server.ServerOpConfig.OptionProvider; public class RowMergingAdapterOptionProvider implements OptionProvider { public static final String ROW_TRANSFORM_KEY = "ROW_TRANSFORM"; public static final String ROW_MERGING_ADAPTER_CACHE_ID = "ROW_MERGING_ADAPTER"; public static final String ADAPTER_IDS_OPTION = "adapters"; private final RowMergingDataAdapter adapter; private final short internalAdapterId; public RowMergingAdapterOptionProvider( final short internalAdapterId, final RowMergingDataAdapter adapter) { this.internalAdapterId = internalAdapterId; this.adapter = adapter; } @Override public Map getOptions(final Map existingOptions) { final Map newOptions = adapter.getOptions(internalAdapterId, existingOptions); String nextAdapterIdsValue = ByteArrayUtils.shortToString(internalAdapterId); if ((existingOptions != null) && existingOptions.containsKey(ADAPTER_IDS_OPTION)) { final String existingAdapterIds = existingOptions.get(ADAPTER_IDS_OPTION); final Set nextAdapters = new HashSet<>(); for (final String id : nextAdapterIdsValue.split(",")) { nextAdapters.add(id); } final StringBuffer str = new StringBuffer(nextAdapterIdsValue); for (final String id : existingAdapterIds.split(",")) { if (!nextAdapters.contains(id)) { str.append(","); str.append(id); } } nextAdapterIdsValue = str.toString(); } newOptions.put(ADAPTER_IDS_OPTION, nextAdapterIdsValue); newOptions.put( ROW_TRANSFORM_KEY, ByteArrayUtils.byteArrayToString(PersistenceUtils.toBinary(adapter.getTransform()))); return newOptions; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/server/ServerOpConfig.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.server; import java.util.EnumSet; import java.util.Map; public class ServerOpConfig { private final EnumSet scopes; private final int serverOpPriority; private final String serverOpName; private final String serverOpClass; private final OptionProvider optionProvider; public ServerOpConfig( final EnumSet scopes, final int serverOpPriority, final String serverOpName, final String serverOpClass, final OptionProvider optionProvider) { this.scopes = scopes; this.serverOpPriority = serverOpPriority; this.serverOpName = serverOpName; this.serverOpClass = serverOpClass; this.optionProvider = optionProvider; } public EnumSet getScopes() { return scopes; } public int getServerOpPriority() { return serverOpPriority; } public String getServerOpName() { return serverOpName; } public String getServerOpClass() { return serverOpClass; } public Map getOptions(final Map existingOptions) { return optionProvider.getOptions(existingOptions); } public static interface OptionProvider { public Map getOptions(Map existingOptions); } public static enum ServerOpScope { MAJOR_COMPACTION, MINOR_COMPACTION, SCAN } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/server/ServerOpHelper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.server; import java.util.EnumSet; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter; import org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter.RowTransform; import org.locationtech.geowave.core.store.server.ServerOpConfig.OptionProvider; import org.locationtech.geowave.core.store.server.ServerOpConfig.ServerOpScope; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; public class ServerOpHelper { private static final Logger LOGGER = LoggerFactory.getLogger(ServerOpHelper.class); private static final String ROW_MERGING_SUFFIX = "_COMBINER"; public static final String ROW_MERGING_VISIBILITY_SUFFIX = "_VISIBILITY_COMBINER"; public static boolean updateServerOps( final ServerSideOperations operations, final String index, final ServerOpConfig... configs) { if ((configs != null) && (configs.length > 0)) { final Map> iteratorScopes = operations.listServerOps(index); for (final ServerOpConfig config : configs) { boolean mustDelete = false; boolean exists = false; final ImmutableSet existingScopes = iteratorScopes.get(config.getServerOpName()); ImmutableSet configuredScopes; if (config.getScopes() == null) { configuredScopes = Sets.immutableEnumSet(EnumSet.allOf(ServerOpScope.class)); } else { configuredScopes = Sets.immutableEnumSet(config.getScopes()); } Map configuredOptions = null; if (existingScopes != null) { if (existingScopes.size() == configuredScopes.size()) { exists = true; for (final ServerOpScope s : existingScopes) { if (!configuredScopes.contains(s)) { // this iterator exists with the wrong // scope, we will assume we want to remove // it and add the new configuration LOGGER.warn( "found iterator '" + config.getServerOpName() + "' missing scope '" + s.name() + "', removing it and re-attaching"); mustDelete = true; break; } } } if (existingScopes.size() > 0) { // see if the options are the same, if they are not // the same, apply a merge with the existing options // and the configured options final Iterator it = existingScopes.iterator(); while (it.hasNext()) { final ServerOpScope scope = it.next(); final Map existingOptions = operations.getServerOpOptions(index, config.getServerOpName(), scope); configuredOptions = config.getOptions(existingOptions); if (existingOptions == null) { mustDelete = (configuredOptions == null); } else if (configuredOptions == null) { mustDelete = true; } else { // neither are null, compare the size of // the entry sets and check that they // are equivalent final Set> existingEntries = existingOptions.entrySet(); final Set> configuredEntries = configuredOptions.entrySet(); if (existingEntries.size() != configuredEntries.size()) { mustDelete = true; } else { mustDelete = (!existingEntries.containsAll(configuredEntries)); } } // we found the setting existing in one // scope, assume the options are the same // for each scope break; } } } if (configuredOptions == null) { configuredOptions = config.getOptions(new HashMap()); } if (mustDelete) { operations.updateServerOp( index, config.getServerOpPriority(), config.getServerOpName(), config.getServerOpClass(), configuredOptions, existingScopes, configuredScopes); } else if (!exists) { operations.addServerOp( index, config.getServerOpPriority(), config.getServerOpName(), config.getServerOpClass(), configuredOptions, configuredScopes); } } } return true; } public static void addServerSideRowMerging( final RowMergingDataAdapter adapter, final short internalAdapterId, final ServerSideOperations operations, final String serverOpClassName, final String serverOpVisiblityClassName, final String tableName) { final RowTransform rowTransform = adapter.getTransform(); if (rowTransform != null) { final OptionProvider optionProvider = new RowMergingAdapterOptionProvider(internalAdapterId, adapter); final ServerOpConfig rowMergingCombinerConfig = new ServerOpConfig( EnumSet.allOf(ServerOpScope.class), rowTransform.getBaseTransformPriority(), rowTransform.getTransformName() + ROW_MERGING_SUFFIX, serverOpClassName, optionProvider); final ServerOpConfig rowMergingVisibilityCombinerConfig = new ServerOpConfig( EnumSet.of(ServerOpScope.SCAN), rowTransform.getBaseTransformPriority() + 1, rowTransform.getTransformName() + ROW_MERGING_VISIBILITY_SUFFIX, serverOpVisiblityClassName, optionProvider); updateServerOps( operations, tableName, rowMergingCombinerConfig, rowMergingVisibilityCombinerConfig); } } public static void addServerSideMerging( final ServerSideOperations operations, final String mergingOpBaseName, final int mergingOpBasePriority, final String serverOpClassName, final String serverOpVisiblityClassName, final OptionProvider optionProvider, final String tableName) { final ServerOpConfig rowMergingCombinerConfig = new ServerOpConfig( EnumSet.allOf(ServerOpScope.class), mergingOpBasePriority, mergingOpBaseName + ROW_MERGING_SUFFIX, serverOpClassName, optionProvider); final ServerOpConfig rowMergingVisibilityCombinerConfig = new ServerOpConfig( EnumSet.of(ServerOpScope.SCAN), mergingOpBasePriority + 1, mergingOpBaseName + ROW_MERGING_VISIBILITY_SUFFIX, serverOpVisiblityClassName, optionProvider); updateServerOps( operations, tableName, rowMergingCombinerConfig, rowMergingVisibilityCombinerConfig); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/server/ServerSideOperations.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.server; import java.util.Map; import org.locationtech.geowave.core.store.operations.DataStoreOperations; import org.locationtech.geowave.core.store.server.ServerOpConfig.ServerOpScope; import com.google.common.collect.ImmutableSet; public interface ServerSideOperations extends DataStoreOperations { /** * Returns a mapping of existing registered server-side operations with serverop name as the key * and the registered scopes as the value * * @return the mapping */ public Map> listServerOps(String index); /** * get the particular existing configured options for this server op at this scope * * @param index the index/table * @param serverOpName the operation name * @param scope the scope * @return the options */ public Map getServerOpOptions( String index, String serverOpName, ServerOpScope scope); /** * remove this server operation - because accumulo requires scopes as a parameter it is passed * into this method, but the server op will be removed entirely regardless of scopes * * @param index the index/table * @param serverOpName the operation name * @param scopes the existing scopes */ public void removeServerOp(String index, String serverOpName, ImmutableSet scopes); /** * add this server operation * * @param index the index/table * @param priority the operation priority (this is merely relative, it defines how to order * multiple operations, from low to high) * @param name the operation name * @param operationClass the operation class * @param properties the operation options * @param configuredScopes the scopes */ public void addServerOp( String index, int priority, String name, String operationClass, Map properties, ImmutableSet configuredScopes); /** * update this server operation, the current scopes are passed in because accumulo requires * iteratorscope as a parameter to remove the iterator. This will update the server op to the new * scope. * * @param index the index/table * @param priority the operation priority (this is merely relative, it defines how to order * multiple operations, from low to high) * @param name the operation name * @param operationClass the operation class * @param properties the operation options * @param currentScopes the existing scopes * @param newScopes the new configured scopes */ public void updateServerOp( String index, int priority, String name, String operationClass, Map properties, ImmutableSet currentScopes, ImmutableSet newScopes); /** Method to lookup the version of a remote datastore */ public String getVersion(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/spi/ClassLoaderTransformerSpi.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.spi; public interface ClassLoaderTransformerSpi { public ClassLoader transform(ClassLoader classLoader); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/spi/DimensionalityTypeOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.spi; /** This is an interface that all dimensionality types must implement for their options object. */ public interface DimensionalityTypeOptions { } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/spi/DimensionalityTypeProviderSpi.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.spi; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.Index; /** * This interface can be injected using SPI to determine which supported index for an ingest type * will be used. */ public interface DimensionalityTypeProviderSpi { /** * This will represent the name for the dimensionality type that is registered with the ingest * framework and presented as a dimensionality type option via the commandline. For consistency, * this name is preferably lower-case and without spaces, and should uniquely identify the * dimensionality type as much as possible. * * @return the name of this dimensionality type */ String getDimensionalityTypeName(); /** * if the registered dimensionality types are listed by a user, this can provide a user-friendly * description for each * * @return the user-friendly description */ String getDimensionalityTypeDescription(); /** * This will return the primary index that match the options * * @return the primary index */ Index createIndex(DataStore dataStore, T options); /** * These are options specific to the type of index being exposed by this SPI plugin. * * @return the options for the dimensionality type provider */ T createOptions(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/spi/DimensionalityTypeRegistry.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.spi; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import org.locationtech.geowave.core.index.SPIServiceRegistry; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** These are the plugin index types that can be registered and used within Geowave. */ public class DimensionalityTypeRegistry { private static final Logger LOGGER = LoggerFactory.getLogger(DimensionalityTypeRegistry.class); private static Map registeredDimensionalityTypes = null; private static synchronized void initDimensionalityTypeRegistry() { registeredDimensionalityTypes = new HashMap<>(); final Iterator dimensionalityTypesProviders = new SPIServiceRegistry(DimensionalityTypeRegistry.class).load( DimensionalityTypeProviderSpi.class); while (dimensionalityTypesProviders.hasNext()) { final DimensionalityTypeProviderSpi dimensionalityTypeProvider = dimensionalityTypesProviders.next(); if (registeredDimensionalityTypes.containsKey( dimensionalityTypeProvider.getDimensionalityTypeName())) { LOGGER.warn( "Dimensionality type '" + dimensionalityTypeProvider.getDimensionalityTypeName() + "' already registered. Unable to register type provided by " + dimensionalityTypeProvider.getClass().getName()); } else { registeredDimensionalityTypes.put( dimensionalityTypeProvider.getDimensionalityTypeName(), dimensionalityTypeProvider); } } } public static Map getRegisteredDimensionalityTypes() { if (registeredDimensionalityTypes == null) { initDimensionalityTypeRegistry(); } return Collections.unmodifiableMap(registeredDimensionalityTypes); } public static DimensionalityTypeProviderSpi getSelectedDimensionalityProvider( final String dimensionalityType) { if (registeredDimensionalityTypes == null) { initDimensionalityTypeRegistry(); } return registeredDimensionalityTypes.get(dimensionalityType); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/CoreRegisteredStatistics.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics; import org.locationtech.geowave.core.store.statistics.adapter.CountStatistic; import org.locationtech.geowave.core.store.statistics.adapter.CountStatistic.CountValue; import org.locationtech.geowave.core.store.statistics.binning.CompositeBinningStrategy; import org.locationtech.geowave.core.store.statistics.binning.DataTypeBinningStrategy; import org.locationtech.geowave.core.store.statistics.binning.FieldValueBinningStrategy; import org.locationtech.geowave.core.store.statistics.binning.NumericRangeFieldValueBinningStrategy; import org.locationtech.geowave.core.store.statistics.binning.PartitionBinningStrategy; import org.locationtech.geowave.core.store.statistics.field.BloomFilterStatistic; import org.locationtech.geowave.core.store.statistics.field.BloomFilterStatistic.BloomFilterValue; import org.locationtech.geowave.core.store.statistics.field.CountMinSketchStatistic; import org.locationtech.geowave.core.store.statistics.field.CountMinSketchStatistic.CountMinSketchValue; import org.locationtech.geowave.core.store.statistics.field.FixedBinNumericHistogramStatistic; import org.locationtech.geowave.core.store.statistics.field.FixedBinNumericHistogramStatistic.FixedBinNumericHistogramValue; import org.locationtech.geowave.core.store.statistics.field.HyperLogLogStatistic; import org.locationtech.geowave.core.store.statistics.field.HyperLogLogStatistic.HyperLogLogPlusValue; import org.locationtech.geowave.core.store.statistics.field.NumericHistogramStatistic; import org.locationtech.geowave.core.store.statistics.field.NumericHistogramStatistic.NumericHistogramValue; import org.locationtech.geowave.core.store.statistics.field.NumericMeanStatistic; import org.locationtech.geowave.core.store.statistics.field.NumericMeanStatistic.NumericMeanValue; import org.locationtech.geowave.core.store.statistics.field.NumericRangeStatistic; import org.locationtech.geowave.core.store.statistics.field.NumericRangeStatistic.NumericRangeValue; import org.locationtech.geowave.core.store.statistics.field.NumericStatsStatistic; import org.locationtech.geowave.core.store.statistics.field.NumericStatsStatistic.NumericStatsValue; import org.locationtech.geowave.core.store.statistics.index.DifferingVisibilityCountStatistic; import org.locationtech.geowave.core.store.statistics.index.DifferingVisibilityCountStatistic.DifferingVisibilityCountValue; import org.locationtech.geowave.core.store.statistics.index.DuplicateEntryCountStatistic; import org.locationtech.geowave.core.store.statistics.index.DuplicateEntryCountStatistic.DuplicateEntryCountValue; import org.locationtech.geowave.core.store.statistics.index.FieldVisibilityCountStatistic; import org.locationtech.geowave.core.store.statistics.index.FieldVisibilityCountStatistic.FieldVisibilityCountValue; import org.locationtech.geowave.core.store.statistics.index.IndexMetaDataSetStatistic; import org.locationtech.geowave.core.store.statistics.index.IndexMetaDataSetStatistic.IndexMetaDataSetValue; import org.locationtech.geowave.core.store.statistics.index.MaxDuplicatesStatistic; import org.locationtech.geowave.core.store.statistics.index.MaxDuplicatesStatistic.MaxDuplicatesValue; import org.locationtech.geowave.core.store.statistics.index.PartitionsStatistic; import org.locationtech.geowave.core.store.statistics.index.PartitionsStatistic.PartitionsValue; import org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic; import org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic.RowRangeHistogramValue; public class CoreRegisteredStatistics implements StatisticsRegistrySPI { @Override public RegisteredStatistic[] getRegisteredStatistics() { return new RegisteredStatistic[] { // Index Statistics new RegisteredStatistic( DifferingVisibilityCountStatistic.STATS_TYPE, DifferingVisibilityCountStatistic::new, DifferingVisibilityCountValue::new, (short) 2000, (short) 2001), new RegisteredStatistic( DuplicateEntryCountStatistic.STATS_TYPE, DuplicateEntryCountStatistic::new, DuplicateEntryCountValue::new, (short) 2002, (short) 2003), new RegisteredStatistic( FieldVisibilityCountStatistic.STATS_TYPE, FieldVisibilityCountStatistic::new, FieldVisibilityCountValue::new, (short) 2004, (short) 2005), new RegisteredStatistic( IndexMetaDataSetStatistic.STATS_TYPE, IndexMetaDataSetStatistic::new, IndexMetaDataSetValue::new, (short) 2006, (short) 2007), new RegisteredStatistic( MaxDuplicatesStatistic.STATS_TYPE, MaxDuplicatesStatistic::new, MaxDuplicatesValue::new, (short) 2008, (short) 2009), new RegisteredStatistic( PartitionsStatistic.STATS_TYPE, PartitionsStatistic::new, PartitionsValue::new, (short) 2010, (short) 2011), new RegisteredStatistic( RowRangeHistogramStatistic.STATS_TYPE, RowRangeHistogramStatistic::new, RowRangeHistogramValue::new, (short) 2012, (short) 2013), // Data Type Statistics new RegisteredStatistic( CountStatistic.STATS_TYPE, CountStatistic::new, CountValue::new, (short) 2014, (short) 2015), // Field Statistics new RegisteredStatistic( FixedBinNumericHistogramStatistic.STATS_TYPE, FixedBinNumericHistogramStatistic::new, FixedBinNumericHistogramValue::new, (short) 2016, (short) 2017), new RegisteredStatistic( NumericRangeStatistic.STATS_TYPE, NumericRangeStatistic::new, NumericRangeValue::new, (short) 2018, (short) 2019), new RegisteredStatistic( CountMinSketchStatistic.STATS_TYPE, CountMinSketchStatistic::new, CountMinSketchValue::new, (short) 2020, (short) 2021), new RegisteredStatistic( HyperLogLogStatistic.STATS_TYPE, HyperLogLogStatistic::new, HyperLogLogPlusValue::new, (short) 2022, (short) 2023), new RegisteredStatistic( NumericMeanStatistic.STATS_TYPE, NumericMeanStatistic::new, NumericMeanValue::new, (short) 2026, (short) 2027), new RegisteredStatistic( NumericStatsStatistic.STATS_TYPE, NumericStatsStatistic::new, NumericStatsValue::new, (short) 2028, (short) 2029), new RegisteredStatistic( NumericHistogramStatistic.STATS_TYPE, NumericHistogramStatistic::new, NumericHistogramValue::new, (short) 2030, (short) 2031), new RegisteredStatistic( BloomFilterStatistic.STATS_TYPE, BloomFilterStatistic::new, BloomFilterValue::new, (short) 2032, (short) 2033),}; } @Override public RegisteredBinningStrategy[] getRegisteredBinningStrategies() { return new RegisteredBinningStrategy[] { new RegisteredBinningStrategy( PartitionBinningStrategy.NAME, PartitionBinningStrategy::new, (short) 2050), new RegisteredBinningStrategy( DataTypeBinningStrategy.NAME, DataTypeBinningStrategy::new, (short) 2051), new RegisteredBinningStrategy( CompositeBinningStrategy.NAME, CompositeBinningStrategy::new, (short) 2052), new RegisteredBinningStrategy( FieldValueBinningStrategy.NAME, FieldValueBinningStrategy::new, (short) 2053), new RegisteredBinningStrategy( NumericRangeFieldValueBinningStrategy.NAME, NumericRangeFieldValueBinningStrategy::new, (short) 2054)}; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/DataStatisticsStore.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics; import java.util.Iterator; import javax.annotation.Nullable; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.ByteArrayRange; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.api.BinConstraints.ByteArrayConstraints; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.DataTypeStatistic; import org.locationtech.geowave.core.store.api.FieldStatistic; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.IndexStatistic; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.statistics.binning.DataTypeBinningStrategy; /** * This is responsible for persisting data statistics (either in memory or to disk depending on the * implementation). */ public interface DataStatisticsStore { /** * Determines if the given statistic exists in the data store. * * @param statistic the statistic to check for */ boolean exists(Statistic> statistic); /** * Add a statistic to the data store. * * @param statistic the statistic to add */ void addStatistic(Statistic> statistic); /** * Remove a statistic from the data store. * * @param statistic the statistic to remove * @return {@code true} if the statistic existed and was removed */ boolean removeStatistic(Statistic> statistic); /** * Remove a set of statistics from the data store. * * @param statistics the statistics to remove * @return {@code true} if statistics were removed */ boolean removeStatistics(Iterator>> statistics); /** * Remove statistics associated with the given index. * * @param index the index to remove statistics for * @return {@code true} if statistics were removed */ boolean removeStatistics(Index index); /** * Remove statistics associated with the given data type. * * @param type the type to remove statistics for * @param adapterIndices indices used by the data type * @return {@code true} if statistics were removed */ boolean removeStatistics(DataTypeAdapter type, Index... adapterIndices); /** * Get statistics for the given index. * * @param index the index to get statistics for * @param statisticType an optional statistic type filter * @param tag an optional tag filter * @return a list of index statistics for the given index */ CloseableIterator>> getIndexStatistics( final Index index, final @Nullable StatisticType> statisticType, final @Nullable String tag); /** * Get statistics for the given data type. * * @param type the type to get statistics for * @param statisticType an optional statistic type filter * @param tag an optional tag filter * @return a list of data type statistics for the given type */ CloseableIterator>> getDataTypeStatistics( final DataTypeAdapter type, final @Nullable StatisticType> statisticType, final @Nullable String tag); /** * Get all field statistics for the given type. If a field name is specified, only statistics that * pertain to that field will be returned. * * @param type the type to get statistics for * @param statisticType an optional statistic type filter * @param fieldName an optional field name filter * @param tag an optional tag filter * @return a list of field statistics for the given type */ CloseableIterator>> getFieldStatistics( final DataTypeAdapter type, final @Nullable StatisticType> statisticType, final @Nullable String fieldName, final @Nullable String tag); /** * Get all statistics in the data store. * * @param statisticType an optional statistic type filter * @return a list of statistics in the data store */ CloseableIterator>> getAllStatistics( final @Nullable StatisticType> statisticType); /** * Gets the statistic with the given {@link StatisticId}, or {@code null} if it could not be * found. * * @param statisticId the id of the statistic to get * @return the statistic that matched the given ID */ , R> Statistic getStatisticById(final StatisticId statisticId); /** * This will write the statistic value to the underlying store. Note that this will overwrite * whatever the current persisted values are for the given statistic. Use incorporateStatistic to * aggregate the statistic value with any existing values. This method is not applicable to * statistics that use a binning strategy. * * @param statistic the statistic that the value belongs to * @param value the value to set */ , R> void setStatisticValue(Statistic statistic, V value); /** * This will write the statistic value to the underlying store. Note that this will overwrite * whatever the current persisted values are for the given statistic. Use incorporateStatistic to * aggregate the statistic value with any existing values. This method is not applicable to * statistics that do not use a binning strategy. * * @param statistic the statistic that the value belongs to * @param value the value to set * @param bin the bin that the value belongs to */ , R> void setStatisticValue( Statistic statistic, V value, ByteArray bin); /** * Add the statistic value to the store, preserving the existing value. This method is not * applicable to statistics that use a binning strategy. * * @param statistic the statistic to that the value belongs to * @param value the value to add */ , R> void incorporateStatisticValue(Statistic statistic, V value); /** * Add the statistic value to the store, preserving the existing value. This method is not * applicable to statistics that do not use a binning strategy. * * @param statistic the statistic to that the value belongs to * @param value the value to add * @param bin the bin that the value belongs to */ , R> void incorporateStatisticValue( Statistic statistic, V value, ByteArray bin); /** * Removes the value of the given statistic. This method is not applicable to statistics that use * a binning strategy. * * @param statistic the statistic to remove the value for * @return {@code true} if the value was removed */ boolean removeStatisticValue(Statistic> statistic); /** * Removes the value of the given statistic. This method is not applicable to statistics that do * not use a binning strategy. * * @param statistic the statistic to remove the value for * @param bin the bin of the statistic value to remove * @return {@code true} if the value was removed */ boolean removeStatisticValue(Statistic> statistic, ByteArray bin); /** * Removes all values associated with the given statistic. If the statistic uses a binning * strategy, all bins will be removed. * * @param statistic the statistic to remove values for * @return {@code true} if values were removed */ boolean removeStatisticValues(Statistic> statistic); /** * Remove all type-specific values from the given index statistic. If the statistic does not use a * {@link DataTypeBinningStrategy}, nothing will be removed. * * @param statistic * @param typeName * @return */ boolean removeTypeSpecificStatisticValues( IndexStatistic> statistic, String typeName); /** * Creates a writer that can be used to write values for a given statistic. * * @param statistic the statistic to write values for * @return a new statistic value writer */ , R> StatisticValueWriter createStatisticValueWriter( Statistic statistic); /** * Creates a callback that can be used to update statistics for the given index and adapter. * * @param index the index * @param type the data type * @param updateAdapterStats if {@code true} adapter statistics will be updated, otherwise only * index statistics will be updated * @return a statistics update callback */ StatisticUpdateCallback createUpdateCallback( Index index, AdapterToIndexMapping indexMapping, InternalDataAdapter type, boolean updateAdapterStats); /** * Returns all values for each provided statistic. If a set of bins are provided, statistics that * use a binning strategy will only return values that match one of the given bins. * * @param statistics the statistics to get values for * @param binConstraints an optional bins filter * @param authorizations authorizations for the query * @return an iterator for all matching statistic values */ CloseableIterator> getStatisticValues( final Iterator>> statistics, @Nullable final ByteArrayConstraints binConstraints, final String... authorizations); /** * Return the value of the given statistic. This method is not applicable to statistics that use a * binning strategy. * * @param statistic the statistic to get the value of * @param authorizations authorizations for the query * @return the value of the statistic, or {@code null} if it was not found */ , R> V getStatisticValue( final Statistic statistic, String... authorizations); /** * Return the value of the given statistic. This method is not applicable to statistics that do * not use a binning strategy. * * @param statistic the statistic to get the value of * @param bin the bin of the value to get * @param authorizations authorizations for the query * @return the value of the statistic, or {@code null} if it was not found */ , R> V getStatisticValue( final Statistic statistic, ByteArray bin, String... authorizations); /** * Return the values of the given statistic that have bins that match the given ranges. This * method is not applicable to statistics that do not use a binning strategy. * * @param statistic the statistic to get the value of * @param binRanges the ranges of bins to get values for * @param authorizations authorizations for the query * @return the value of the statistic, or {@code null} if it was not found */ , R> CloseableIterator getStatisticValues( final Statistic statistic, ByteArrayRange[] binRanges, String... authorizations); /** * Return the values of the given statistic that have bins that start with the given prefix. This * method is not applicable to statistics that do not use a binning strategy. * * @param statistic the statistic to get the value of * @param binPrefix the bin prefix to get values for * @param authorizations authorizations for the query * @return the matching values of the statistic */ , R> CloseableIterator getStatisticValues( final Statistic statistic, ByteArray binPrefix, String... authorizations); /** * Returns all of the values for a given statistic. If the statistic uses a binning strategy, each * bin will be returned as a separate value. * * @param statistic the statistic to get values for * @param authorizations authorizations for the query * @return the values for the statistic */ , R> CloseableIterator getStatisticValues( final Statistic statistic, String... authorizations); /** * Merges all statistic values that share the same key. Every separate write to a data type can * create new values for a statistic. Over time, this can result in a lot of values for a single * statistic. This function can be used to merge those values to improve statistic query * performance. * * @return {@code true} if the merge was successful */ boolean mergeStats(); /** * Remove all statistics from the data store. */ void removeAll(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/DefaultStatisticsProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics; import java.util.List; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; /** * This interface can be used with data type adapters and indices so that default statistics will be * added to the data store when the adapter/index is added. */ public interface DefaultStatisticsProvider { /** * Get all default statistics for this adapter/index. * * @return the default statistics */ public List>> getDefaultStatistics(); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/InternalStatisticsHelper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics; import java.util.Collection; import java.util.List; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.DataTypeStatistic; import org.locationtech.geowave.core.store.api.FieldStatistic; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.IndexStatistic; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.statistics.adapter.DataTypeStatisticType; import org.locationtech.geowave.core.store.statistics.binning.CompositeBinningStrategy; import org.locationtech.geowave.core.store.statistics.binning.DataTypeBinningStrategy; import org.locationtech.geowave.core.store.statistics.binning.PartitionBinningStrategy; import org.locationtech.geowave.core.store.statistics.field.FieldStatisticType; import org.locationtech.geowave.core.store.statistics.index.DifferingVisibilityCountStatistic; import org.locationtech.geowave.core.store.statistics.index.DifferingVisibilityCountStatistic.DifferingVisibilityCountValue; import org.locationtech.geowave.core.store.statistics.index.DuplicateEntryCountStatistic; import org.locationtech.geowave.core.store.statistics.index.DuplicateEntryCountStatistic.DuplicateEntryCountValue; import org.locationtech.geowave.core.store.statistics.index.FieldVisibilityCountStatistic; import org.locationtech.geowave.core.store.statistics.index.FieldVisibilityCountStatistic.FieldVisibilityCountValue; import org.locationtech.geowave.core.store.statistics.index.IndexMetaDataSetStatistic; import org.locationtech.geowave.core.store.statistics.index.IndexMetaDataSetStatistic.IndexMetaDataSetValue; import org.locationtech.geowave.core.store.statistics.index.IndexStatisticType; import org.locationtech.geowave.core.store.statistics.index.PartitionsStatistic; import org.locationtech.geowave.core.store.statistics.index.PartitionsStatistic.PartitionsValue; import org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic; import org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic.RowRangeHistogramValue; /** * This class contains static methods to make querying internal statistics as efficient as possible. */ public class InternalStatisticsHelper { /** * Get the value of an internal data type statistic that does not use a binning strategy. * * @param statisticsStore the statistics store * @param statisticType the statistic type * @param typeName the data type name * @param authorizations authorizations for the query * @return the value, or {@code null} if it didn't exist */ public static , R> V getDataTypeStatistic( final DataStatisticsStore statisticsStore, final DataTypeStatisticType statisticType, final String typeName, final String... authorizations) { final Statistic statistic = statisticsStore.getStatisticById( DataTypeStatistic.generateStatisticId(typeName, statisticType, Statistic.INTERNAL_TAG)); if (statistic != null) { return statisticsStore.getStatisticValue(statistic, authorizations); } return null; } /** * Get the value of an internal field statistic that does not use a binning strategy. * * @param statisticsStore the statistics store * @param statisticType the statistic type * @param typeName the data type name * @param fieldName the field name * @param authorizations authorizations for the query * @return the value, or {@code null} if it didn't exist */ public static , R> V getFieldStatistic( final DataStatisticsStore statisticsStore, final FieldStatisticType statisticType, final String typeName, final String fieldName, final String... authorizations) { final Statistic statistic = statisticsStore.getStatisticById( FieldStatistic.generateStatisticId( typeName, statisticType, fieldName, Statistic.INTERNAL_TAG)); if (statistic != null) { return statisticsStore.getStatisticValue(statistic, authorizations); } return null; } public static , R> V getIndexStatistic( final DataStatisticsStore statisticsStore, final IndexStatisticType statisticType, final String indexName, final String typeName, final byte[] partitionKey, final String... authorizations) { final StatisticId statisticId = IndexStatistic.generateStatisticId(indexName, statisticType, Statistic.INTERNAL_TAG); final Statistic stat = statisticsStore.getStatisticById(statisticId); if (stat != null) { return statisticsStore.getStatisticValue( stat, partitionKey != null ? CompositeBinningStrategy.getBin( DataTypeBinningStrategy.getBin(typeName), PartitionBinningStrategy.getBin(partitionKey)) : DataTypeBinningStrategy.getBin(typeName), authorizations); } return null; } /** * Get the duplicate counts for an index. * * @param index the index * @param adapterIdsToQuery the adapters to query * @param adapterStore the adapter store * @param statisticsStore the statistics store * @param authorizations authorizations for the query * @return the duplicate counts, or {@code null} if it didn't exist */ public static DuplicateEntryCountValue getDuplicateCounts( final Index index, final Collection adapterIdsToQuery, final PersistentAdapterStore adapterStore, final DataStatisticsStore statisticsStore, final String... authorizations) { return getInternalIndexStatistic( DuplicateEntryCountStatistic.STATS_TYPE, index, adapterIdsToQuery, adapterStore, statisticsStore, authorizations); } /** * Get the index metadtat for an index. * * @param index the index * @param adapterIdsToQuery the adapters to query * @param adapterStore the adapter store * @param statisticsStore the statistics store * @param authorizations authorizations for the query * @return the index metadata, or {@code null} if it didn't exist */ public static IndexMetaDataSetValue getIndexMetadata( final Index index, final Collection adapterIdsToQuery, final PersistentAdapterStore adapterStore, final DataStatisticsStore statisticsStore, final String... authorizations) { return getInternalIndexStatistic( IndexMetaDataSetStatistic.STATS_TYPE, index, adapterIdsToQuery, adapterStore, statisticsStore, authorizations); } /** * Get the partitions for an index. * * @param index the index * @param adapterIdsToQuery the adapters to query * @param adapterStore the adapter store * @param statisticsStore the statistics store * @param authorizations authorizations for the query * @return the partitions, or {@code null} if it didn't exist */ public static PartitionsValue getPartitions( final Index index, final Collection adapterIdsToQuery, final PersistentAdapterStore adapterStore, final DataStatisticsStore statisticsStore, final String... authorizations) { return getInternalIndexStatistic( PartitionsStatistic.STATS_TYPE, index, adapterIdsToQuery, adapterStore, statisticsStore, authorizations); } /** * Get the differing visibility counts for an index. * * @param index the index * @param adapterIdsToQuery the adapters to query * @param adapterStore the adapter store * @param statisticsStore the statistics store * @param authorizations authorizations for the query * @return the differing visibility counts, or {@code null} if it didn't exist */ public static DifferingVisibilityCountValue getDifferingVisibilityCounts( final Index index, final Collection adapterIdsToQuery, final PersistentAdapterStore adapterStore, final DataStatisticsStore statisticsStore, final String... authorizations) { return getInternalIndexStatistic( DifferingVisibilityCountStatistic.STATS_TYPE, index, adapterIdsToQuery, adapterStore, statisticsStore, authorizations); } /** * Get the field visibility counts for an index. * * @param index the index * @param adapterIdsToQuery the adapters to query * @param adapterStore the adapter store * @param statisticsStore the statistics store * @param authorizations authorizations for the query * @return the field visibility counts, or {@code null} if it didn't exist */ public static FieldVisibilityCountValue getVisibilityCounts( final Index index, final Collection adapterIdsToQuery, final PersistentAdapterStore adapterStore, final DataStatisticsStore statisticsStore, final String... authorizations) { return getInternalIndexStatistic( FieldVisibilityCountStatistic.STATS_TYPE, index, adapterIdsToQuery, adapterStore, statisticsStore, authorizations); } /** * Get the row range histogram of an index partition. * * @param index the index * @param adapterIds the adapters to query * @param adapterStore the adapter store * @param statisticsStore the statistics store * @param partitionKey the partition key * @param authorizations authorizations for the query * @return the row range histogram, or {@code null} if it didn't exist */ public static RowRangeHistogramValue getRangeStats( final Index index, final List adapterIds, final PersistentAdapterStore adapterStore, final DataStatisticsStore statisticsStore, final ByteArray partitionKey, final String... authorizations) { final RowRangeHistogramStatistic stat = (RowRangeHistogramStatistic) statisticsStore.getStatisticById( IndexStatistic.generateStatisticId( index.getName(), RowRangeHistogramStatistic.STATS_TYPE, Statistic.INTERNAL_TAG)); if ((stat != null) && (stat.getBinningStrategy() instanceof CompositeBinningStrategy) && ((CompositeBinningStrategy) stat.getBinningStrategy()).isOfType( DataTypeBinningStrategy.class, PartitionBinningStrategy.class)) { RowRangeHistogramValue combinedValue = null; for (final Short adapterId : adapterIds) { final RowRangeHistogramValue value = statisticsStore.getStatisticValue( stat, CompositeBinningStrategy.getBin( DataTypeBinningStrategy.getBin(adapterStore.getAdapter(adapterId)), PartitionBinningStrategy.getBin(partitionKey.getBytes())), authorizations); if (value != null) { if (combinedValue == null) { combinedValue = value; } else { combinedValue.merge(value); } } } return combinedValue; } return null; } /** * Get the row range histogram of a specific partition in an index. * * @param statisticsStore the statistics store * @param indexName the index name * @param typeName the type name * @param partitionKey the partition key * @param authorizations authorizations for the query * @return the row range histogram, or {@code null} if it didn't exist */ public static RowRangeHistogramValue getRangeStats( final DataStatisticsStore statisticsStore, final String indexName, final String typeName, final ByteArray partitionKey, final String... authorizations) { final Statistic statistic = statisticsStore.getStatisticById( IndexStatistic.generateStatisticId( indexName, RowRangeHistogramStatistic.STATS_TYPE, Statistic.INTERNAL_TAG)); if ((statistic != null) && (statistic.getBinningStrategy() instanceof CompositeBinningStrategy) && ((CompositeBinningStrategy) statistic.getBinningStrategy()).isOfType( DataTypeBinningStrategy.class, PartitionBinningStrategy.class)) { return statisticsStore.getStatisticValue( statistic, CompositeBinningStrategy.getBin( DataTypeBinningStrategy.getBin(typeName), PartitionBinningStrategy.getBin(partitionKey.getBytes())), authorizations); } return null; } private static , R> V getInternalIndexStatistic( final IndexStatisticType statisticType, final Index index, final Collection adapterIdsToQuery, final PersistentAdapterStore adapterStore, final DataStatisticsStore statisticsStore, final String... authorizations) { final StatisticId statisticId = IndexStatistic.generateStatisticId(index.getName(), statisticType, Statistic.INTERNAL_TAG); final Statistic stat = statisticsStore.getStatisticById(statisticId); if (stat != null) { V combinedValue = null; for (final short adapterId : adapterIdsToQuery) { final DataTypeAdapter adapter = adapterStore.getAdapter(adapterId); final V value = statisticsStore.getStatisticValue( stat, DataTypeBinningStrategy.getBin(adapter), authorizations); if (combinedValue == null) { combinedValue = value; } else { combinedValue.merge(value); } } return combinedValue; } return null; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/StatisticId.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.api.StatisticValue; import com.google.common.primitives.Bytes; /** * A unique identifier for a statistic. The group id of the identifier correlates to the statistic * group that it belongs to, for example, all index statistics for a single index belong to the same * group, while all field statistics for a given type also belong to the same group. The unique id * of the identifier is guaranteed to be unique among all statistics. Multiple statistics of the * same type in the same group can be added by using different tags. * * @param the statistic value type */ public class StatisticId> { public static final byte[] UNIQUE_ID_SEPARATOR = new byte[] {'|'}; protected final ByteArray groupId; protected final StatisticType statisticType; protected final String tag; protected ByteArray cachedBytes = null; /** * Create a new statistic id with the given group, statistic type, and tag. * * @param groupId the group id * @param statisticType the statistic type * @param tag the tag */ public StatisticId( final ByteArray groupId, final StatisticType statisticType, final String tag) { this.groupId = groupId; this.statisticType = statisticType; this.tag = tag; } /** * Get the statistic type of the statistic represented by this id. * * @return the statistic type */ public StatisticType getStatisticType() { return statisticType; } /** * Get the tag of the statistic represented by this id. * * @return the tag */ public String getTag() { return tag; } /** * Get the group id of the identifier. The group id correlates to the statistic group that it * belongs to, for example, all index statistics for a single index belong to the same group, * while all field statistics for a given type also belong to the same group. * * @return the group id */ public ByteArray getGroupId() { return groupId; } /** * Get the unique id of the identifier. The unique id is guaranteed to be unique among all * statistics. Multiple statistics of the same type in the same group can be added by using * different tags. * * @return the unique id */ public ByteArray getUniqueId() { if (cachedBytes == null) { cachedBytes = generateUniqueId(statisticType, tag); } return cachedBytes; } /** * Generate a unique id with the given statistic type and tag. * * @param statisticType the statistic type * @param tag the tag * @return the unique id */ public static ByteArray generateUniqueId(final StatisticType statisticType, final String tag) { if (tag == null) { return statisticType; } else { return new ByteArray( Bytes.concat( statisticType.getBytes(), UNIQUE_ID_SEPARATOR, StringUtils.stringToBinary(tag))); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/StatisticType.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics; import java.util.Arrays; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.store.api.StatisticValue; /** * This class serves as the base implementation for a statistic type, based on {@link ByteArray}. * * @param the statistic value type */ public abstract class StatisticType> extends ByteArray { private static final long serialVersionUID = 1L; public StatisticType(final String id) { super(id); } @Override public boolean equals(final Object obj) { // If all we know is the name of the stat type, // but not the class we need to override equals on // the base statistics type so that the // class does not need to match if (this == obj) { return true; } if (obj == null) { return false; } if (!(obj instanceof StatisticType)) { return false; } final StatisticType other = (StatisticType) obj; return Arrays.equals(bytes, other.getBytes()); } @Override public String toString() { return getString(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/StatisticUpdateCallback.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics; import java.io.Closeable; import java.io.Flushable; import java.util.List; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.callback.DeleteCallback; import org.locationtech.geowave.core.store.callback.IngestCallback; import org.locationtech.geowave.core.store.callback.ScanCallback; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import com.google.common.collect.Lists; /** * This class is responsible for managing updates to a set of statistics for a given type and index. * It serves as an ingest, scan, and delete callback that will write all statistic updates to the * statistics store. */ public class StatisticUpdateCallback implements IngestCallback, DeleteCallback, ScanCallback, AutoCloseable, Closeable, Flushable { private static final int FLUSH_STATS_THRESHOLD = 1000000; private final List> statisticUpdateHandlers; private final Object MUTEX = new Object(); private final DataStatisticsStore statisticsStore; private final boolean skipFlush; private boolean overwrite; private int updateCount = 0; /** * Create an update callback for the given set of statistics. * * @param statistics the statistics to update * @param statisticsStore the statistics store * @param index the index used in the operation * @param type the type used in the operation */ @SuppressWarnings({"rawtypes", "unchecked"}) public StatisticUpdateCallback( final List>> statistics, final DataStatisticsStore statisticsStore, final Index index, final AdapterToIndexMapping indexMapping, final InternalDataAdapter type) { this.statisticsStore = statisticsStore; statisticUpdateHandlers = Lists.newArrayListWithCapacity(statistics.size()); for (Statistic statistic : statistics) { StatisticUpdateHandler handler = new StatisticUpdateHandler(statistic, index, indexMapping, type); statisticUpdateHandlers.add(handler); } final Object v = System.getProperty("StatsCompositionTool.skipFlush"); skipFlush = ((v != null) && v.toString().equalsIgnoreCase("true")); } @Override public void entryDeleted(T entry, GeoWaveRow... rows) { synchronized (MUTEX) { for (StatisticUpdateHandler handler : statisticUpdateHandlers) { handler.entryDeleted(entry, rows); } updateCount++; checkStats(); } } @Override public void entryIngested(T entry, GeoWaveRow... rows) { statisticUpdateHandlers.forEach(v -> v.entryIngested(entry, rows)); } @Override public void entryScanned(T entry, GeoWaveRow row) { statisticUpdateHandlers.forEach(v -> v.entryScanned(entry, row)); } private void checkStats() { if (!skipFlush && (updateCount >= FLUSH_STATS_THRESHOLD)) { updateCount = 0; flush(); } } @Override public void flush() { synchronized (MUTEX) { for (final StatisticUpdateHandler updateHandler : statisticUpdateHandlers) { updateHandler.writeStatistics(statisticsStore, overwrite); } // just overwrite the initial set of values overwrite = false; } } @Override public void close() { flush(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/StatisticUpdateHandler.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.EntryVisibilityHandler; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.FieldStatistic; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticBinningStrategy; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.callback.DeleteCallback; import org.locationtech.geowave.core.store.callback.IngestCallback; import org.locationtech.geowave.core.store.callback.ScanCallback; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.statistics.binning.CompositeBinningStrategy; import org.locationtech.geowave.core.store.statistics.binning.PartitionBinningStrategy; import org.locationtech.geowave.core.store.statistics.visibility.DefaultStatisticVisibility; import org.locationtech.geowave.core.store.statistics.visibility.FieldDependentStatisticVisibility; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Maps; import com.google.common.collect.Sets; /** * This class handles updates for a single statistic. It is responsible for creating separate * statistic values for each visibility and bin combination. */ public class StatisticUpdateHandler, R> implements IngestCallback, DeleteCallback, ScanCallback { private static final Logger LOGGER = LoggerFactory.getLogger(StatisticUpdateHandler.class); private final Statistic statistic; private final Map> statisticsMap = new HashMap<>(); private final EntryVisibilityHandler visibilityHandler; private final InternalDataAdapter adapter; private final IngestHandler ingestHandler; private final DeleteHandler deleteHandler; private final boolean supportsIngestCallback; private final boolean supportsDeleteCallback; private final boolean filterByPartition; private static final ByteArray NO_BIN = new ByteArray(new byte[0]); public StatisticUpdateHandler( final Statistic statistic, final Index index, final AdapterToIndexMapping indexMapping, final InternalDataAdapter adapter) { this.statistic = statistic; this.adapter = adapter; this.visibilityHandler = getVisibilityHandler(indexMapping, index); this.ingestHandler = new IngestHandler<>(); this.deleteHandler = new DeleteHandler<>(); final V value = statistic.createEmpty(); supportsIngestCallback = value instanceof StatisticsIngestCallback; supportsDeleteCallback = value instanceof StatisticsDeleteCallback; final StatisticBinningStrategy binningStrategy = statistic.getBinningStrategy(); if (binningStrategy != null) { filterByPartition = binningStrategy instanceof PartitionBinningStrategy || ((binningStrategy instanceof CompositeBinningStrategy) && ((CompositeBinningStrategy) binningStrategy).usesStrategy( PartitionBinningStrategy.class)); } else { filterByPartition = false; } } protected void handleEntry( final Handler handler, final T entry, final GeoWaveRow... rows) { if (rows.length == 0) { // This can happen with attribute indices when the attribute value is null return; } final ByteArray visibility = new ByteArray(visibilityHandler.getVisibility(entry, rows)); Map binnedValues = statisticsMap.get(visibility); if (binnedValues == null) { binnedValues = Maps.newHashMap(); statisticsMap.put(visibility, binnedValues); } if (statistic.getBinningStrategy() != null) { if (filterByPartition) { for (final GeoWaveRow row : rows) { handleBinnedRows(handler, binnedValues, entry, row); } } else { handleBinnedRows(handler, binnedValues, entry, rows); } } else { handleBin(handler, binnedValues, NO_BIN, entry, rows); } } protected void handleBinnedRows( final Handler handler, final Map binnedValues, final T entry, final GeoWaveRow... rows) { final ByteArray[] bins = statistic.getBinningStrategy().getBins(adapter, entry, rows); for (final ByteArray bin : bins) { handleBin(handler, binnedValues, bin, entry, rows); } } protected void handleBin( final Handler handler, final Map binnedValues, final ByteArray bin, final T entry, final GeoWaveRow... rows) { V value = binnedValues.get(bin); if (value == null) { value = statistic.createEmpty(); value.setBin(bin); binnedValues.put(bin, value); } handler.handle(value, adapter, entry, rows); } private EntryVisibilityHandler getVisibilityHandler( final AdapterToIndexMapping indexMapping, final Index index) { final Set usedFields = Sets.newHashSet(); if (statistic instanceof FieldStatistic) { usedFields.add(((FieldStatistic) statistic).getFieldName()); } if (statistic.getBinningStrategy() != null) { statistic.getBinningStrategy().addFieldsUsed(usedFields); } boolean fieldDependent = false; for (final String fieldName : usedFields) { // If all of the used fields are part of the common index model, we can use the default // visibility if ((indexMapping != null) && !adapter.isCommonIndexField(indexMapping, fieldName)) { fieldDependent = true; break; } } if (fieldDependent) { return new FieldDependentStatisticVisibility<>( index != null ? index.getIndexModel() : null, adapter, usedFields.toArray(new String[usedFields.size()])); } return new DefaultStatisticVisibility<>(); } @Override public synchronized void entryIngested(final T entry, final GeoWaveRow... rows) { if (supportsIngestCallback) { handleEntry(ingestHandler, entry, rows); } } @Override public synchronized void entryDeleted(final T entry, final GeoWaveRow... rows) { if (supportsDeleteCallback) { handleEntry(deleteHandler, entry, rows); } } @Override public synchronized void entryScanned(final T entry, final GeoWaveRow row) { if (supportsIngestCallback) { handleEntry(ingestHandler, entry, row); } } public void writeStatistics(final DataStatisticsStore statisticsStore, final boolean overwrite) { if (overwrite) { statisticsStore.removeStatisticValues(statistic); } try (StatisticValueWriter statWriter = statisticsStore.createStatisticValueWriter(statistic)) { for (final Entry> visibilityStatistic : statisticsMap.entrySet()) { final Map bins = visibilityStatistic.getValue(); for (final Entry binValue : bins.entrySet()) { statWriter.writeStatisticValue( binValue.getKey().getBytes(), visibilityStatistic.getKey().getBytes(), binValue.getValue()); } } statisticsMap.clear(); } catch (final Exception e) { LOGGER.error("Unable to write statistic value.", e); } } private static interface Handler, R> { public void handle( V value, DataTypeAdapter adapter, final T entry, final GeoWaveRow... rows); } private static class IngestHandler, R> implements Handler { @Override public void handle( final V value, final DataTypeAdapter adapter, final T entry, final GeoWaveRow... rows) { if (value instanceof StatisticsIngestCallback) { ((StatisticsIngestCallback) value).entryIngested(adapter, entry, rows); } } } private static class DeleteHandler, R> implements Handler { @Override public void handle( final V value, final DataTypeAdapter adapter, final T entry, final GeoWaveRow... rows) { if (value instanceof StatisticsDeleteCallback) { ((StatisticsDeleteCallback) value).entryDeleted(adapter, entry, rows); } } } @Override public String toString() { return "StatisticUpdateHandler -> " + statistic.toString(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/StatisticValueReader.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics; import java.util.Arrays; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.entities.GeoWaveMetadata; /** * Reads GeoWaveMetadata as statistic values, and merges any values that share a primary id. */ public class StatisticValueReader, R> implements CloseableIterator { private final CloseableIterator metadataIter; private final Statistic statistic; private V next = null; private byte[] nextPrimaryId = null; public StatisticValueReader( final CloseableIterator metadataIter, final Statistic statistic) { this.metadataIter = metadataIter; this.statistic = statistic; } @Override public boolean hasNext() { return (next != null) || metadataIter.hasNext(); } @Override public V next() { V currentValue = next; byte[] currentPrimaryId = nextPrimaryId; next = null; nextPrimaryId = null; while (metadataIter.hasNext()) { final GeoWaveMetadata row = metadataIter.next(); final V entry = statistic.createEmpty(); entry.fromBinary(PersistenceUtils.stripClassId(row.getValue())); if (currentValue == null) { currentValue = entry; currentPrimaryId = row.getPrimaryId(); } else { if (Arrays.equals(currentPrimaryId, row.getPrimaryId())) { currentValue.merge(entry); } else { next = entry; nextPrimaryId = row.getPrimaryId(); break; } } } if (currentValue != null && statistic.getBinningStrategy() != null) { currentValue.setBin(getBinFromValueId(statistic.getId(), currentPrimaryId)); } return currentValue; } @Override public void close() { metadataIter.close(); } public static ByteArray getBinFromValueId( final StatisticId statisticId, final byte[] valueId) { int binIndex = statisticId.getUniqueId().getBytes().length + StatisticId.UNIQUE_ID_SEPARATOR.length; byte[] binBytes = new byte[valueId.length - binIndex]; for (int i = 0; i < binBytes.length; i++) { binBytes[i] = valueId[i + binIndex]; } return new ByteArray(binBytes); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/StatisticValueWriter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.entities.GeoWaveMetadata; import org.locationtech.geowave.core.store.operations.MetadataWriter; /** * Writes statistic values to the data store using a given metadata writer. */ public class StatisticValueWriter> implements AutoCloseable { private final MetadataWriter writer; private final Statistic statistic; public StatisticValueWriter(final MetadataWriter writer, final Statistic statistic) { this.writer = writer; this.statistic = statistic; } @Override public void close() throws Exception { writer.close(); } public void writeStatisticValue(final byte[] bin, final byte[] visibility, V value) { byte[] primaryId; if (statistic.getBinningStrategy() != null) { primaryId = StatisticValue.getValueId(statistic.getId(), bin); } else { primaryId = statistic.getId().getUniqueId().getBytes(); } writer.write( new GeoWaveMetadata( primaryId, statistic.getId().getGroupId().getBytes(), visibility, PersistenceUtils.toBinary(value))); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/StatisticsDeleteCallback.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.entities.GeoWaveRow; /** * Interface for statistics that support updates when an entry is deleted. */ public interface StatisticsDeleteCallback { /** * Called when an entry is deleted from the data store. * * @param type the data type that the entry belongs to * @param entry the deleted entry * @param rows the GeoWave rows associated with the entry */ public void entryDeleted(DataTypeAdapter type, T entry, GeoWaveRow... rows); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/StatisticsIngestCallback.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.entities.GeoWaveRow; /** * Interface for statistics that support updates when an entry is ingested. */ public interface StatisticsIngestCallback { /** * Called when an entry is added to the data store. * * @param type the data type that the entry belongs to * @param entry the ingested entry * @param rows the GeoWave rows associated with the entry */ public void entryIngested(DataTypeAdapter adapter, T entry, GeoWaveRow... rows); } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/StatisticsPersistableRegistry.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics; import org.locationtech.geowave.core.index.persist.InternalPersistableRegistry; import org.locationtech.geowave.core.index.persist.PersistableRegistrySpi; public class StatisticsPersistableRegistry implements PersistableRegistrySpi, InternalPersistableRegistry { @Override public PersistableIdAndConstructor[] getSupportedPersistables() { return StatisticsRegistry.instance().getPersistables(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/StatisticsRegistry.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics; import java.util.Arrays; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.function.Supplier; import java.util.stream.Collectors; import org.locationtech.geowave.core.index.SPIServiceRegistry; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistableRegistrySpi.PersistableIdAndConstructor; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticBinningStrategy; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.statistics.StatisticsRegistrySPI.RegisteredBinningStrategy; import org.locationtech.geowave.core.store.statistics.StatisticsRegistrySPI.RegisteredStatistic; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Maps; /** * Singleton registry for all supported statistics. Statistics can be added to the system using * {@link StatisticsRegistrySPI}. */ public class StatisticsRegistry { private static final Logger LOGGER = LoggerFactory.getLogger(StatisticsRegistry.class); private static StatisticsRegistry INSTANCE = null; private final Map statistics = Maps.newHashMap(); private final Map binningStrategies = Maps.newHashMap(); private StatisticsRegistry() { final Iterator spiIter = new SPIServiceRegistry(StatisticsRegistry.class).load(StatisticsRegistrySPI.class); while (spiIter.hasNext()) { final StatisticsRegistrySPI providedStats = spiIter.next(); Arrays.stream(providedStats.getRegisteredStatistics()).forEach(this::putStat); Arrays.stream(providedStats.getRegisteredBinningStrategies()).forEach( this::putBinningStrategy); } } private void putStat(final RegisteredStatistic stat) { final String key = stat.getStatisticsType().getString().toLowerCase(); if (statistics.containsKey(key)) { LOGGER.warn( "Multiple statistics with the same type were found on the classpath. Only the first instance will be loaded!"); return; } statistics.put(key, stat); } private void putBinningStrategy(final RegisteredBinningStrategy strategy) { final String key = strategy.getStrategyName().toLowerCase(); if (binningStrategies.containsKey(key)) { LOGGER.warn( "Multiple binning strategies with the same name were found on the classpath. Only the first instance will be loaded!"); return; } binningStrategies.put(key, strategy); } public static StatisticsRegistry instance() { if (INSTANCE == null) { INSTANCE = new StatisticsRegistry(); } return INSTANCE; } @SuppressWarnings("unchecked") public PersistableIdAndConstructor[] getPersistables() { final Collection registeredStatistics = statistics.values(); final Collection registeredBinningStrategies = binningStrategies.values(); final PersistableIdAndConstructor[] persistables = new PersistableIdAndConstructor[(registeredStatistics.size() * 2) + registeredBinningStrategies.size()]; int persistableIndex = 0; for (final RegisteredStatistic statistic : registeredStatistics) { persistables[persistableIndex++] = new PersistableIdAndConstructor( statistic.getStatisticPersistableId(), (Supplier) (Supplier) statistic.getStatisticConstructor()); persistables[persistableIndex++] = new PersistableIdAndConstructor( statistic.getValuePersistableId(), (Supplier) (Supplier) statistic.getValueConstructor()); } for (final RegisteredBinningStrategy binningStrategy : registeredBinningStrategies) { persistables[persistableIndex++] = new PersistableIdAndConstructor( binningStrategy.getPersistableId(), (Supplier) (Supplier) binningStrategy.getConstructor()); } return persistables; }; /** * Get registered index statistics that are compatible with the given index class. * * @param indexClass the class of the index * @return a list of index statistics */ public List>> getRegisteredIndexStatistics( final Class indexClass) { return statistics.values().stream().filter( s -> s.isIndexStatistic() && s.isCompatibleWith(indexClass)).map( s -> s.getStatisticConstructor().get()).collect(Collectors.toList()); } /** * Get registered data type statistics that are compatible with the the data type class. * * @param adapterDataClass the class of the entries of the data type adapter * @return a list of compatible statistics */ public List>> getRegisteredDataTypeStatistics( final Class adapterDataClass) { return statistics.values().stream().filter( s -> s.isDataTypeStatistic() && s.isCompatibleWith(adapterDataClass)).map( s -> s.getStatisticConstructor().get()).collect(Collectors.toList()); } /** * Get registered field statistics that are compatible with the the provided type. * * @param type the type to get compatible statistics for * @param fieldName the field to get compatible statistics for * @return a map of compatible statistics, keyed by field name */ public Map>>> getRegisteredFieldStatistics( final DataTypeAdapter type, final String fieldName) { final Map>>> fieldStatistics = Maps.newHashMap(); final FieldDescriptor[] fieldDescriptors = type.getFieldDescriptors(); for (int i = 0; i < fieldDescriptors.length; i++) { final String name = fieldDescriptors[i].fieldName(); final Class fieldClass = fieldDescriptors[i].bindingClass(); if ((fieldName == null) || fieldName.equals(name)) { final List>> fieldOptions = statistics.values().stream().filter( s -> s.isFieldStatistic() && s.isCompatibleWith(fieldClass)).map( s -> s.getStatisticConstructor().get()).collect(Collectors.toList()); fieldStatistics.put(name, fieldOptions); } } return fieldStatistics; } /** * Get all registered statistics. * * @return a list of registered statistics */ public List>> getAllRegisteredStatistics() { return statistics.values().stream().map(s -> s.getStatisticConstructor().get()).collect( Collectors.toList()); } /** * Get all registered binning strategies. * * @return a list of registered binning strategies */ public List getAllRegisteredBinningStrategies() { return binningStrategies.values().stream().map(b -> b.getConstructor().get()).collect( Collectors.toList()); } /** * Retrieves the statistic of the given statistic type. * * @param statType the statistic type * @return the statistic that matches the given name, or {@code null} if it could not be found */ public Statistic> getStatistic(final StatisticType statType) { return getStatistic(statType.getString()); } /** * Retrieves the statistic of the given statistic type. * * @param statType the statistic type * @return the statistic that matches the given name, or {@code null} if it could not be found */ public Statistic> getStatistic(final String statType) { final RegisteredStatistic statistic = statistics.get(statType.toLowerCase()); if (statistic == null) { return null; } return statistic.getStatisticConstructor().get(); } /** * Retrieves the statistic type that matches the given string. * * @param statType the statistic type to get * @return the statistic type, or {@code null} if a matching statistic type could not be found */ public StatisticType> getStatisticType(final String statType) { final RegisteredStatistic statistic = statistics.get(statType.toLowerCase()); if (statistic == null) { return null; } return statistic.getStatisticsType(); } /** * Retrieves the binning strategy that matches the given string. * * @param binningStrategyType the binning strategy to get * @return the binning strategy, or {@code null} if a matching binning strategy could not be found */ public StatisticBinningStrategy getBinningStrategy(final String binningStrategyType) { final RegisteredBinningStrategy strategy = binningStrategies.get(binningStrategyType.toLowerCase()); if (strategy == null) { return null; } return strategy.getConstructor().get(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/StatisticsRegistrySPI.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics; import java.util.function.Supplier; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticBinningStrategy; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.statistics.adapter.DataTypeStatisticType; import org.locationtech.geowave.core.store.statistics.field.FieldStatisticType; import org.locationtech.geowave.core.store.statistics.index.IndexStatisticType; /** * Base SPI for registered statistics. This class also serves as the persistable registry for those * statistics. */ public interface StatisticsRegistrySPI { /** * Return a set of registered statistics. * * @return the registered statistics */ RegisteredStatistic[] getRegisteredStatistics(); /** * Return a set of registered binning strategies. * * @return the registered binning strategies */ RegisteredBinningStrategy[] getRegisteredBinningStrategies(); /** * This class contains everything needed to register a statistic with GeoWave. */ public static class RegisteredStatistic { private final StatisticType> statType; private final Supplier>> statisticConstructor; private final Supplier> valueConstructor; private final short statisticPersistableId; private final short valuePersistableId; private Statistic prototype = null; /** * @param statType the statistics type * @param statisticConstructor the statistic constructor * @param valueConstructor the statistic value constructor * @param statisticPersistableId the persistable id to use for the statistic * @param valuePersistableId the persistable id to use for the statistic value */ @SuppressWarnings("unchecked") public RegisteredStatistic( final StatisticType> statType, final Supplier>> statisticConstructor, final Supplier> valueConstructor, final short statisticPersistableId, final short valuePersistableId) { this.statType = (StatisticType>) statType; this.statisticConstructor = statisticConstructor; this.valueConstructor = valueConstructor; this.statisticPersistableId = statisticPersistableId; this.valuePersistableId = valuePersistableId; } /** * @return the statistics type */ public StatisticType> getStatisticsType() { return statType; } /** * @return the options constructor */ @SuppressWarnings("unchecked") public Supplier>> getStatisticConstructor() { return (Supplier>>) statisticConstructor; } /** * @return the options constructor */ @SuppressWarnings("unchecked") public Supplier> getValueConstructor() { return (Supplier>) valueConstructor; } public boolean isDataTypeStatistic() { return statType instanceof DataTypeStatisticType; } public boolean isIndexStatistic() { return statType instanceof IndexStatisticType; } public boolean isFieldStatistic() { return statType instanceof FieldStatisticType; } public boolean isCompatibleWith(final Class clazz) { if (prototype == null) { prototype = statisticConstructor.get(); } return prototype.isCompatibleWith(clazz); } public short getStatisticPersistableId() { return statisticPersistableId; } public short getValuePersistableId() { return valuePersistableId; } } /** * This class contains everything needed to register a statistic binning strategy with GeoWave. */ public static class RegisteredBinningStrategy { private final String strategyName; private final Supplier constructor; private final short persistableId; /** * @param strategyName the name of the binning strategy * @param constructor the constructor for the binning strategy * @param persistableId the persistable id of the binning strategy */ public RegisteredBinningStrategy( final String strategyName, final Supplier constructor, final short persistableId) { this.strategyName = strategyName; this.constructor = constructor; this.persistableId = persistableId; } /** * @return the strategy name */ public String getStrategyName() { return strategyName; } /** * @return the binning strategy constructor */ @SuppressWarnings("unchecked") public Supplier getConstructor() { return (Supplier) constructor; } public short getPersistableId() { return persistableId; } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/StatisticsValueIterator.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Objects; import java.util.stream.Collectors; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.CloseableIteratorWrapper; import org.locationtech.geowave.core.store.api.BinConstraints.ByteArrayConstraints; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import com.google.common.collect.Iterators; /** * Iterates over the values of a set of statistics. */ public class StatisticsValueIterator implements CloseableIterator> { private final DataStatisticsStore statisticsStore; private final Iterator>> statistics; private final ByteArrayConstraints binConstraints; private final String[] authorizations; private CloseableIterator> current = null; private StatisticValue next = null; public StatisticsValueIterator( final DataStatisticsStore statisticsStore, final Iterator>> statistics, final ByteArrayConstraints binConstraints, final String... authorizations) { this.statisticsStore = statisticsStore; this.statistics = statistics; this.binConstraints = binConstraints; this.authorizations = authorizations; } @SuppressWarnings("unchecked") private void computeNext() { if (next == null) { while (((current == null) || !current.hasNext()) && statistics.hasNext()) { if (current != null) { current.close(); current = null; } final Statistic> nextStat = (Statistic>) statistics.next(); if ((nextStat.getBinningStrategy() != null) && (binConstraints != null) && !binConstraints.isAllBins()) { if (binConstraints.getBins().length > 0) { if (binConstraints.isPrefix()) { final List>> iters = Arrays.stream(binConstraints.getBins()).map( bin -> statisticsStore.getStatisticValues( nextStat, bin, authorizations)).collect(Collectors.toList()); current = new CloseableIteratorWrapper<>( () -> iters.forEach(CloseableIterator::close), Iterators.concat(iters.iterator())); } else { current = new CloseableIterator.Wrapper<>( Arrays.stream(binConstraints.getBins()).map( bin -> statisticsStore.getStatisticValue( nextStat, bin, authorizations)).filter(Objects::nonNull).iterator()); } // TODO should we allow for both prefix/bin constraints and range constraints or just // use one or the other as now? there doesn't seem to be a good use case to require both } else if (binConstraints.getBinRanges().length > 0) { current = statisticsStore.getStatisticValues( nextStat, binConstraints.getBinRanges(), authorizations); } else { continue; } } else { current = statisticsStore.getStatisticValues(nextStat, authorizations); } if ((current != null) && !current.hasNext()) { current = new CloseableIterator.Wrapper<>(Iterators.singletonIterator(nextStat.createEmpty())); } } if ((current != null) && current.hasNext()) { next = current.next(); } } } @Override public boolean hasNext() { if (next == null) { computeNext(); } return next != null; } @Override public StatisticValue next() { if (next == null) { computeNext(); } final StatisticValue retVal = next; next = null; return retVal; } @Override public void close() { if (current != null) { current.close(); current = null; } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/adapter/CountStatistic.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.adapter; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.FloatCompareUtils; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.DataTypeStatistic; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.statistics.StatisticsDeleteCallback; import org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback; public class CountStatistic extends DataTypeStatistic { public static final DataTypeStatisticType STATS_TYPE = new DataTypeStatisticType<>("COUNT"); public CountStatistic() { super(STATS_TYPE); } public CountStatistic(final String typeName) { super(STATS_TYPE, typeName); } @Override public String getDescription() { return "Counts the number of entries in the data type."; } @Override public CountValue createEmpty() { return new CountValue(this); } public static class CountValue extends StatisticValue implements StatisticsIngestCallback, StatisticsDeleteCallback { public CountValue() { this(null); } public CountValue(final Statistic statistic) { super(statistic); } private long count = 0L; private Double weightedCount = null; @Override public Long getValue() { if (weightedCount != null) { return Math.round(weightedCount); } return count; } public Double getWeightedCount() { if (weightedCount != null) { return weightedCount; } return (double) count; } @Override public void entryDeleted( final DataTypeAdapter adapter, final T entry, final GeoWaveRow... rows) { if ((getBin() != null) && (getStatistic().getBinningStrategy() != null)) { final double weight = getStatistic().getBinningStrategy().getWeight(getBin(), adapter, entry, rows); if (FloatCompareUtils.checkDoublesEqual(0.0, weight)) { // don't mess with potentially switching to weights if the weight is 0 return; } else if (!FloatCompareUtils.checkDoublesEqual(1.0, weight)) { // let it pass through to normal incrementing if the weight is 1, otherwise use weights if (weightedCount == null) { weightedCount = (double) count; count = 0; } weightedCount -= weight; return; } } if (weightedCount != null) { weightedCount -= 1; } else { count--; } } @Override public void entryIngested( final DataTypeAdapter adapter, final T entry, final GeoWaveRow... rows) { if ((getBin() != null) && (getStatistic().getBinningStrategy() != null)) { final double weight = getStatistic().getBinningStrategy().getWeight(getBin(), adapter, entry, rows); if (FloatCompareUtils.checkDoublesEqual(0.0, weight)) { // don't mess with potentially switching to weights if the weight is 0 return; } else if (!FloatCompareUtils.checkDoublesEqual(1.0, weight)) { // let it pass through to normal incrementing if the weight is 1, otherwise use weights if (weightedCount == null) { weightedCount = (double) count; count = 0; } weightedCount += weight; return; } } if (weightedCount != null) { weightedCount += 1; } else { count++; } } @Override public byte[] toBinary() { // if its double lets make it 9 bytes with the last one being 0xFF (which is impossible for // varint encoding) if (weightedCount != null) { final ByteBuffer buf = ByteBuffer.allocate(9); buf.putDouble(weightedCount); buf.put((byte) 0xFF); return buf.array(); } return VarintUtils.writeSignedLong(count); } @Override public void fromBinary(final byte[] bytes) { if ((bytes.length == 9) && (bytes[8] == (byte) 0xFF)) { count = 0; weightedCount = ByteBuffer.wrap(bytes).getDouble(); } else { count = VarintUtils.readSignedLong(ByteBuffer.wrap(bytes)); weightedCount = null; } } @Override public void merge(final Mergeable merge) { if ((merge != null) && (merge instanceof CountValue)) { if (weightedCount != null) { if (((CountValue) merge).weightedCount != null) { weightedCount += ((CountValue) merge).weightedCount; } else { weightedCount += ((CountValue) merge).getValue(); } } else { if (((CountValue) merge).weightedCount != null) { weightedCount = (double) count; count = 0; weightedCount += ((CountValue) merge).weightedCount; } else { count += ((CountValue) merge).getValue(); } } } } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/adapter/DataTypeStatisticType.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.adapter; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.statistics.StatisticType; /** * Statistic type for data type statistics. Generally used for type checking. */ public class DataTypeStatisticType> extends StatisticType { private static final long serialVersionUID = 1L; public DataTypeStatisticType(final String id) { super(id); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/binning/BinningStrategyUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.binning; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.function.Function; import org.apache.commons.lang.ArrayUtils; import org.locationtech.geowave.core.index.ByteArray; import com.google.common.primitives.Bytes; /** * Static utility methods useful for binning strategies * */ public class BinningStrategyUtils { /** * in the case of queries of composite or multi-field bins, we need to query all combinations of * individual bins * * @param individualBins the individual bin * @param concatenateByteArrays a method to concatenate the bins, useful for different nuances of * encoding * @return all combinations of the concatenated individual bins */ public static ByteArray[] getAllCombinations( final ByteArray[][] individualBins, final Function concatenateByteArrays) { final List combinedConstraintCombos = new ArrayList<>(); combos(0, individualBins, new ByteArray[0], combinedConstraintCombos); return combinedConstraintCombos.stream().map(concatenateByteArrays).toArray(ByteArray[]::new); } private static void combos( final int pos, final ByteArray[][] c, final ByteArray[] soFar, final List finalList) { if (pos == c.length) { finalList.add(soFar); return; } for (int i = 0; i != c[pos].length; i++) { combos(pos + 1, c, (ByteArray[]) ArrayUtils.add(soFar, c[pos][i]), finalList); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/binning/CompositeBinningStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.binning; import java.nio.ByteBuffer; import java.util.ArrayDeque; import java.util.Arrays; import java.util.Deque; import java.util.List; import java.util.OptionalInt; import java.util.Set; import java.util.stream.Collector; import java.util.stream.Collectors; import java.util.stream.IntStream; import org.apache.commons.lang3.tuple.Pair; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.api.BinConstraints.ByteArrayConstraints; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.StatisticBinningStrategy; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.statistics.query.BinConstraintsImpl.ExplicitConstraints; import com.google.common.collect.Lists; /** * Statistic binning strategy that combines two or more other binning strategies. */ public class CompositeBinningStrategy implements StatisticBinningStrategy { public static final String NAME = "COMPOSITE"; public static final byte[] WILDCARD_BYTES = new byte[0]; private StatisticBinningStrategy[] childBinningStrategies; public CompositeBinningStrategy() { childBinningStrategies = new StatisticBinningStrategy[0]; } public CompositeBinningStrategy(final StatisticBinningStrategy... childBinningStrategies) { this.childBinningStrategies = childBinningStrategies; } @Override public String getStrategyName() { return NAME; } @Override public String getDescription() { return "Bin the statistic using multiple strategies."; } @Override public ByteArray[] getBins( final DataTypeAdapter adapter, final T entry, final GeoWaveRow... rows) { final ByteArray[][] perStrategyBins = Arrays.stream(childBinningStrategies).map(s -> s.getBins(adapter, entry, rows)).toArray( ByteArray[][]::new); return getAllCombinations(perStrategyBins); } @Override public String getDefaultTag() { return Arrays.stream(childBinningStrategies).map(s -> s.getDefaultTag()).collect( Collectors.joining("|")); } @Override public void addFieldsUsed(final Set fieldsUsed) { for (final StatisticBinningStrategy child : childBinningStrategies) { child.addFieldsUsed(fieldsUsed); } } @Override public byte[] toBinary() { return PersistenceUtils.toBinary(Lists.newArrayList(childBinningStrategies)); } @Override public void fromBinary(final byte[] bytes) { final List strategies = PersistenceUtils.fromBinaryAsList(bytes); childBinningStrategies = strategies.toArray(new StatisticBinningStrategy[strategies.size()]); } @Override public String binToString(final ByteArray bin) { if (bin == null || bin.getBytes() == null || bin.getBytes().length == 0) { return "None"; } final ByteBuffer buffer = ByteBuffer.wrap(bin.getBytes()); buffer.position(buffer.limit() - 1); final int[] byteLengths = Arrays.stream(childBinningStrategies).mapToInt( s -> VarintUtils.readUnsignedIntReversed(buffer)).toArray(); buffer.rewind(); final StringBuffer strVal = new StringBuffer(); for (int i = 0; i < childBinningStrategies.length; i++) { if (i != 0) { strVal.append("|"); } final byte[] subBin = new byte[byteLengths[i]]; buffer.get(subBin); strVal.append(childBinningStrategies[i].binToString(new ByteArray(subBin))); } return strVal.toString(); } public Pair[] getSubBins(final ByteArray bin) { final ByteBuffer buffer = ByteBuffer.wrap(bin.getBytes()); buffer.position(buffer.limit() - 1); final int[] byteLengths = Arrays.stream(childBinningStrategies).mapToInt( s -> VarintUtils.readUnsignedIntReversed(buffer)).toArray(); buffer.rewind(); @SuppressWarnings("unchecked") final Pair[] retVal = new Pair[childBinningStrategies.length]; for (int i = 0; i < childBinningStrategies.length; i++) { final byte[] subBin = new byte[byteLengths[i]]; buffer.get(subBin); retVal[i] = Pair.of(childBinningStrategies[i], new ByteArray(subBin)); } return retVal; } public boolean binMatches( final Class binningStrategyClass, final ByteArray bin, final ByteArray subBin) { // this logic only seems to be valid if the child binning strategy classes are different final ByteBuffer buffer = ByteBuffer.wrap(bin.getBytes()); // first look to see if the strategy is directly assignable and at which position final OptionalInt directlyAssignable = IntStream.range(0, childBinningStrategies.length).filter( i -> binningStrategyClass.isAssignableFrom( childBinningStrategies[i].getClass())).findFirst(); if (directlyAssignable.isPresent()) { return Arrays.equals( getSubBinAtIndex(directlyAssignable.getAsInt(), buffer), subBin.getBytes()); } final OptionalInt composite = IntStream.range(0, childBinningStrategies.length).filter( i -> (childBinningStrategies[i] instanceof CompositeBinningStrategy) && ((CompositeBinningStrategy) childBinningStrategies[i]).usesStrategy( binningStrategyClass)).findFirst(); if (composite.isPresent()) { // get the subBin from the buffer at this position return ((CompositeBinningStrategy) childBinningStrategies[composite.getAsInt()]).binMatches( binningStrategyClass, new ByteArray(getSubBinAtIndex(directlyAssignable.getAsInt(), buffer)), subBin); } return false; } private static byte[] getSubBinAtIndex(final int index, final ByteBuffer buffer) { // get the subBin from the buffer at this position buffer.position(buffer.limit() - 1); final int skipBytes = IntStream.range(0, index - 1).map(i -> VarintUtils.readUnsignedIntReversed(buffer)).sum(); final byte[] subBin = new byte[VarintUtils.readUnsignedIntReversed(buffer)]; buffer.position(skipBytes); buffer.get(subBin); return subBin; } public boolean usesStrategy( final Class binningStrategyClass) { return Arrays.stream(childBinningStrategies).anyMatch( s -> binningStrategyClass.isAssignableFrom(s.getClass()) || ((s instanceof CompositeBinningStrategy) && ((CompositeBinningStrategy) s).usesStrategy(binningStrategyClass))); } public boolean isOfType(final Class... strategyClasses) { if (strategyClasses.length == childBinningStrategies.length) { return IntStream.range(0, strategyClasses.length).allMatch( i -> strategyClasses[i].isAssignableFrom(childBinningStrategies[i].getClass())); } return false; } public static ByteArray getBin(final ByteArray... bins) { final int byteLength = Arrays.stream(bins).map(ByteArray::getBytes).mapToInt( b -> b.length + VarintUtils.unsignedIntByteLength(b.length)).sum(); final ByteBuffer bytes = ByteBuffer.allocate(byteLength); Arrays.stream(bins).map(ByteArray::getBytes).forEach(b -> { bytes.put(b); }); // write the lengths at the back for deserialization purposes only (and so prefix scans don't // need to account for this) // also we want to iterate in reverse order so this reverses the order final Deque output = Arrays.stream(bins).collect( Collector.of(ArrayDeque::new, (deq, t) -> deq.addFirst(t), (d1, d2) -> { d2.addAll(d1); return d2; })); output.stream().map(ByteArray::getBytes).forEach(b -> { VarintUtils.writeUnsignedIntReversed(b.length, bytes); }); return new ByteArray(bytes.array()); } @Override public ByteArrayConstraints constraints(final Object constraint) { if ((constraint != null) && (constraint instanceof Object[])) { return constraints((Object[]) constraint, childBinningStrategies); } return StatisticBinningStrategy.super.constraints(constraint); } private ByteArrayConstraints constraints( final Object[] constraints, final StatisticBinningStrategy[] binningStrategies) { // this will handle merging bins together per constraint-binningStrategy pair if (constraints.length == binningStrategies.length) { final List perStrategyConstraints = IntStream.range(0, constraints.length).mapToObj( i -> binningStrategies[i].constraints(constraints[i])).collect(Collectors.toList()); return concat(perStrategyConstraints); } // if there's not the same number of constraints as binning strategies, use default logic return StatisticBinningStrategy.super.constraints(constraints); } private ByteArrayConstraints concat(final List perStrategyConstraints) { final ByteArray[][] c = new ByteArray[perStrategyConstraints.size()][]; boolean allBins = true; for (int i = 0; i < perStrategyConstraints.size(); i++) { final ByteArrayConstraints constraints = perStrategyConstraints.get(i); if (constraints.isAllBins()) { if (!allBins) { throw new IllegalArgumentException( "Cannot use 'all bins' query for one strategy and not the other"); } } else { allBins = false; } if (constraints.isPrefix()) { // can only use a prefix if its the last field or the rest of the fields are 'all bins' boolean isValid = true; for (final int j = i + 1; i < perStrategyConstraints.size(); i++) { final ByteArrayConstraints innerConstraints = perStrategyConstraints.get(j); if (!innerConstraints.isAllBins()) { isValid = false; break; } else { c[i] = new ByteArray[] {new ByteArray()}; } } if (isValid) { return new ExplicitConstraints(getAllCombinations(c), true); } else { throw new IllegalArgumentException( "Cannot use 'prefix' query for a strategy that is also using exact constraints on a subsequent strategy"); } } c[i] = constraints.getBins(); } return new ExplicitConstraints(getAllCombinations(c), false); } private static ByteArray[] getAllCombinations(final ByteArray[][] perStrategyBins) { return BinningStrategyUtils.getAllCombinations( perStrategyBins, CompositeBinningStrategy::getBin); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/binning/DataTypeBinningStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.binning; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.StatisticBinningStrategy; import org.locationtech.geowave.core.store.entities.GeoWaveRow; /** * Statistic binning strategy that bins statistic values by data type name. This is generally only * used for index statistics because data type and field statistics would all go under the same bin. */ public class DataTypeBinningStrategy implements StatisticBinningStrategy { public static final String NAME = "DATA_TYPE"; @Override public String getStrategyName() { return NAME; } @Override public String getDescription() { return "Bin the statistic by data type. Only used for index statistics."; } @Override public ByteArray[] getBins( final DataTypeAdapter adapter, final T entry, final GeoWaveRow... rows) { return new ByteArray[] {getBin(adapter)}; } @Override public String getDefaultTag() { return "dataType"; } public static ByteArray getBin(final DataTypeAdapter adapter) { if (adapter == null) { return new ByteArray(); } return new ByteArray(adapter.getTypeName()); } public static ByteArray getBin(final String typeName) { if (typeName == null) { return new ByteArray(); } return new ByteArray(typeName); } @Override public byte[] toBinary() { return new byte[0]; } @Override public void fromBinary(final byte[] bytes) {} @Override public String binToString(final ByteArray bin) { return bin.getString(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/binning/FieldValueBinningStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.binning; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.IntStream; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.tuple.Pair; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.ByteArrayRange; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.api.BinConstraints.ByteArrayConstraints; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.StatisticBinningStrategy; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.statistics.query.BinConstraintsImpl.ExplicitConstraints; import com.beust.jcommander.Parameter; import com.beust.jcommander.Strings; import com.clearspring.analytics.util.Lists; /** * Statistic binning strategy that bins statistic values by the string representation of the value * of one or more fields. */ public class FieldValueBinningStrategy implements StatisticBinningStrategy { public static final String NAME = "FIELD_VALUE"; @Parameter( names = "--binField", description = "Field that contains the bin value. This can be specified multiple times to bin on a combination of fields.", required = true) protected List fields; public FieldValueBinningStrategy() { fields = Lists.newArrayList(); } public FieldValueBinningStrategy(final String... fields) { this.fields = Arrays.asList(fields); } @Override public String getStrategyName() { return NAME; } @Override public String getDescription() { return "Bin the statistic by the value of one or more fields."; } @Override public void addFieldsUsed(final Set fieldsUsed) { fieldsUsed.addAll(fields); } @Override public ByteArray[] getBins( final DataTypeAdapter adapter, final T entry, final GeoWaveRow... rows) { if (fields.isEmpty()) { return new ByteArray[0]; } else if (fields.size() == 1) { return new ByteArray[] {getSingleBin(adapter.getFieldValue(entry, fields.get(0)))}; } final ByteArray[] fieldValues = fields.stream().map(field -> getSingleBin(adapter.getFieldValue(entry, field))).toArray( ByteArray[]::new); return new ByteArray[] {getBin(fieldValues)}; } protected static ByteArray getBin(final ByteArray[] fieldValues) { int length = 0; for (final ByteArray fieldValue : fieldValues) { length += fieldValue.getBytes().length; } final byte[] finalBin = new byte[length + (Character.BYTES * (fieldValues.length - 1))]; final ByteBuffer binBuffer = ByteBuffer.wrap(finalBin); for (final ByteArray fieldValue : fieldValues) { binBuffer.put(fieldValue.getBytes()); if (binBuffer.remaining() > 0) { binBuffer.putChar('|'); } } return new ByteArray(binBuffer.array()); } @Override public String getDefaultTag() { return Strings.join("|", fields); } protected ByteArray getSingleBin(final Object value) { if (value == null) { return new ByteArray(); } return new ByteArray(value.toString()); } @Override public byte[] toBinary() { return StringUtils.stringsToBinary(fields.toArray(new String[fields.size()])); } @Override public void fromBinary(final byte[] bytes) { fields = Arrays.asList(StringUtils.stringsFromBinary(bytes)); } @Override public String binToString(final ByteArray bin) { return bin.getString(); } @SuppressWarnings("unchecked") @Override public Class[] supportedConstraintClasses() { if (fields.size() > 1) { return ArrayUtils.addAll( StatisticBinningStrategy.super.supportedConstraintClasses(), Map.class, Pair[].class); } return StatisticBinningStrategy.super.supportedConstraintClasses(); } protected ByteArrayConstraints singleFieldConstraints(final Object constraint) { return StatisticBinningStrategy.super.constraints(constraint); } protected ByteArrayConstraints handleEmptyField(final Object constraint) { throw new IllegalArgumentException( "There are no fields in the binning strategy for these constraints"); } @SuppressWarnings("unchecked") @Override public ByteArrayConstraints constraints(final Object constraint) { if (fields.isEmpty() && (constraint != null)) { return handleEmptyField(constraint); } else if (fields.size() > 1) { Map constraintMap; if (constraint instanceof Map) { constraintMap = (Map) constraint; } else if (constraint instanceof Pair[]) { if (((Pair[]) constraint).length != fields.size()) { throw new IllegalArgumentException( "org.apache.commons.lang3.tuple.Pair[] constraint of length " + ((Pair[]) constraint).length + " must be the same length as the number of fields " + fields.size()); } constraintMap = Arrays.stream(((Pair[]) constraint)).collect( Collectors.toMap((p) -> p.getKey().toString(), Pair::getValue)); } else { throw new IllegalArgumentException( "There are multiple fields in the binning strategy; A java.util.Map or org.apache.commons.lang3.tuple.Pair[] constraint must be used with keys associated with field names and values of constraints per field"); } final ByteArray[][] c = new ByteArray[fields.size()][]; boolean allBins = true; for (int i = 0; i < fields.size(); i++) { final String field = fields.get(i); final ByteArrayConstraints constraints = singleFieldConstraints(constraintMap.get(field)); if (constraints.isAllBins()) { if (!allBins) { throw new IllegalArgumentException( "Cannot use 'all bins' query for one field and not the other"); } } else { allBins = false; } if (constraints.isPrefix() || (constraints.getBinRanges().length > 0)) { // can only use a prefix if its the last field or the rest of the fields are 'all bins' boolean isValid = true; for (final int j = i + 1; i < fields.size(); i++) { final String innerField = fields.get(j); final ByteArrayConstraints innerConstraints = singleFieldConstraints(constraintMap.get(innerField)); if (!innerConstraints.isAllBins()) { isValid = false; break; } else { c[j] = new ByteArray[] {new ByteArray()}; } } if (isValid) { if (constraints.getBinRanges().length > 0) { // we just prepend all combinations of prior byte arrays to the starts and the ends of // the bin ranges final ByteArray[][] ends = c.clone(); final ByteArray[][] starts = c; starts[i] = Arrays.stream(constraints.getBinRanges()).map(ByteArrayRange::getStart).toArray( ByteArray[]::new); final ByteArray[] startsCombined = getAllCombinations(starts); ends[i] = Arrays.stream(constraints.getBinRanges()).map(ByteArrayRange::getEnd).toArray( ByteArray[]::new); final ByteArray[] endsCombined = getAllCombinations(ends); // now take these pair-wise and combine them back into ByteArrayRange's return new ExplicitConstraints( IntStream.range(0, startsCombined.length).mapToObj( k -> new ByteArrayRange( startsCombined[k].getBytes(), endsCombined[k].getBytes())).toArray(ByteArrayRange[]::new)); } else { c[i] = constraints.getBins(); return new ExplicitConstraints(getAllCombinations(c), true); } } else { throw new IllegalArgumentException( "Cannot use 'prefix' or 'range' query for a field that is preceding additional constraints"); } } c[i] = constraints.getBins(); } return new ExplicitConstraints(getAllCombinations(c), false); } else { return singleFieldConstraints(constraint); } } protected static ByteArray[] getAllCombinations(final ByteArray[][] perFieldBins) { return BinningStrategyUtils.getAllCombinations(perFieldBins, FieldValueBinningStrategy::getBin); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/binning/NumericRangeFieldValueBinningStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.binning; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.stream.LongStream; import java.util.stream.Stream; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.Range; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.lexicoder.Lexicoders; import org.locationtech.geowave.core.store.api.BinConstraints.ByteArrayConstraints; import org.locationtech.geowave.core.store.statistics.query.BinConstraintsImpl.ExplicitConstraints; import com.beust.jcommander.Parameter; /** * Statistic binning strategy that bins statistic values by the numeric representation of the value * of a given field. By default it will truncate decimal places and will bin by the integer. * However, an "offset" and "interval" can be provided to bin numbers at any regular step-sized * increment from an origin value. A statistic using this binning strategy can be constrained using * numeric ranges (Apache-Commons `Range` class can be used as a constraint). */ public class NumericRangeFieldValueBinningStrategy extends FieldValueBinningStrategy { public static final String NAME = "NUMERIC_RANGE"; @Parameter(names = "--binInterval", description = "The interval between bins. Defaults to 1.") private double interval = 1; @Parameter( names = "--binOffset", description = "Offset the field values by a given amount. Defaults to 0.") private double offset = 0; @Override public String getStrategyName() { return NAME; } public NumericRangeFieldValueBinningStrategy() { super(); } public NumericRangeFieldValueBinningStrategy(final String... fields) { super(fields); } public NumericRangeFieldValueBinningStrategy(final double interval, final String... fields) { this(interval, 0.0, fields); } public NumericRangeFieldValueBinningStrategy( final double interval, final double offset, final String... fields) { super(fields); this.interval = interval; this.offset = offset; } @Override public String getDescription() { return "Bin the statistic by the numeric value of a specified field."; } @SuppressWarnings("unchecked") @Override public Class[] supportedConstraintClasses() { return ArrayUtils.addAll( super.supportedConstraintClasses(), Number.class, Range.class, Range[].class); } @SuppressWarnings("unchecked") @Override public ByteArrayConstraints singleFieldConstraints(final Object constraint) { if (constraint instanceof Number) { return new ExplicitConstraints(new ByteArray[] {getNumericBin((Number) constraint)}); } else if (constraint instanceof Range) { return new ExplicitConstraints(getNumericBins((Range) constraint)); } else if (constraint instanceof Range[]) { final Stream stream = Arrays.stream((Range[]) constraint).map(this::getNumericBins); return new ExplicitConstraints(stream.flatMap(Arrays::stream).toArray(ByteArray[]::new)); } return super.constraints(constraint); } @Override protected ByteArray getSingleBin(final Object value) { if ((value == null) || !(value instanceof Number)) { return new ByteArray(new byte[] {0x0}); } return getNumericBin((Number) value); } private ByteArray getNumericBin(final Number value) { final long bin = (long) Math.floor(((value.doubleValue() + offset) / interval)); return getBinId(bin); } private ByteArray getBinId(final long bin) { final ByteBuffer buffer = ByteBuffer.allocate(1 + Long.BYTES); buffer.put((byte) 0x1); buffer.putLong(Lexicoders.LONG.lexicode(bin)); return new ByteArray(buffer.array()); } private ByteArray[] getNumericBins(final Range value) { final long minBin = (long) Math.floor(((value.getMinimum().doubleValue() + offset) / interval)); final long maxBin = (long) Math.floor(((value.getMaximum().doubleValue() + offset) / interval)); return LongStream.rangeClosed(minBin, maxBin).mapToObj(this::getBinId).toArray( ByteArray[]::new); } @Override public byte[] toBinary() { final byte[] parentBinary = super.toBinary(); final ByteBuffer buf = ByteBuffer.allocate(parentBinary.length + 16); buf.put(parentBinary); buf.putDouble(interval); buf.putDouble(offset); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final byte[] parentBinary = new byte[bytes.length - 16]; buf.get(parentBinary); super.fromBinary(parentBinary); interval = buf.getDouble(); offset = buf.getDouble(); } public Range getRange(final ByteArray bytes) { final Map> allRanges = getRanges(bytes); final Optional> mergedRange = allRanges.values().stream().filter(Objects::nonNull).reduce( (r1, r2) -> Range.between( Math.min(r1.getMinimum(), r2.getMinimum()), Math.max(r1.getMaximum(), r2.getMaximum()))); if (mergedRange.isPresent()) { return mergedRange.get(); } return null; } public Map> getRanges(final ByteArray bytes) { return getRanges(ByteBuffer.wrap(bytes.getBytes())); } private Map> getRanges(final ByteBuffer buffer) { final Map> retVal = new HashMap<>(); for (final String field : fields) { if (!buffer.hasRemaining()) { return retVal; } if (buffer.get() == 0x0) { retVal.put(field, null); } else { retVal.put(field, getRange(buffer)); if (buffer.hasRemaining()) { buffer.getChar(); } } } return retVal; } private Range getRange(final ByteBuffer buffer) { final byte[] longBuffer = new byte[Long.BYTES]; buffer.get(longBuffer); final double low = (Lexicoders.LONG.fromByteArray(longBuffer) * interval) - offset; return Range.between(low, low + interval); } @Override public String binToString(final ByteArray bin) { final ByteBuffer buffer = ByteBuffer.wrap(bin.getBytes()); final StringBuffer sb = new StringBuffer(); while (buffer.remaining() > 0) { if (buffer.get() == 0x0) { sb.append(""); } else { sb.append(rangeToString(getRange(buffer))); } if (buffer.remaining() > 0) { sb.append(buffer.getChar()); } } return sb.toString(); } private static String rangeToString(final Range range) { final StringBuilder buf = new StringBuilder(32); buf.append('['); buf.append(range.getMinimum()); buf.append(".."); buf.append(range.getMaximum()); buf.append(')'); return buf.toString(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/binning/PartitionBinningStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.binning; import java.util.Arrays; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.StatisticBinningStrategy; import org.locationtech.geowave.core.store.entities.GeoWaveRow; /** * Statistic binning strategy that bins statistic values by the partitions that the entry resides * on. */ public class PartitionBinningStrategy implements StatisticBinningStrategy { public static final String NAME = "PARTITION"; @Override public String getStrategyName() { return NAME; } @Override public String getDescription() { return "Bin the statistic by the partition that entries reside on."; } @Override public ByteArray[] getBins( final DataTypeAdapter adapter, final T entry, final GeoWaveRow... rows) { final ByteArray[] partitionKeys = new ByteArray[rows.length]; for (int i = 0; i < rows.length; i++) { partitionKeys[i] = getBin(rows[i].getPartitionKey()); } return partitionKeys; } @Override public String getDefaultTag() { return "partition"; } public static ByteArray getBin(final byte[] partitionKey) { if (partitionKey == null) { return new ByteArray(); } return new ByteArray(partitionKey); } @Override public byte[] toBinary() { return new byte[0]; } @Override public void fromBinary(final byte[] bytes) {} @Override public String binToString(final ByteArray bin) { return Arrays.toString(bin.getBytes()); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/field/BloomFilterStatistic.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.field; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.FieldStatistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.Parameter; import com.google.common.hash.BloomFilter; import com.google.common.hash.Funnels; /** * Applies a bloom filter to field values useful for quickly determining set membership. False * positives are possible but false negatives are not possible. In other words, a value can be * determined to be possibly in the set or definitely not in the set. */ public class BloomFilterStatistic extends FieldStatistic { private static Logger LOGGER = LoggerFactory.getLogger(BloomFilterStatistic.class); @Parameter( names = "--expectedInsertions", description = "The number of expected insertions, used for appropriate sizing of bloom filter.") private long expectedInsertions = 10000; @Parameter( names = "--desiredFpp", description = "The desired False Positive Probability, directly related to the expected number of insertions. Higher FPP results in more compact Bloom Filter and lower FPP results in more accuracy.") private double desiredFalsePositiveProbability = 0.03; public static final FieldStatisticType STATS_TYPE = new FieldStatisticType<>("BLOOM_FILTER"); public BloomFilterStatistic() { super(STATS_TYPE); } public BloomFilterStatistic(final String typeName, final String fieldName) { super(STATS_TYPE, typeName, fieldName); } public void setExpectedInsertions(final long expectedInsertions) { this.expectedInsertions = expectedInsertions; } public long getExpectedInsertions() { return this.expectedInsertions; } public void setDesiredFalsePositiveProbability(final double desiredFalsePositiveProbability) { this.desiredFalsePositiveProbability = desiredFalsePositiveProbability; } public double getDesiredFalsePositiveProbability() { return this.desiredFalsePositiveProbability; } @Override public String getDescription() { return "Provides a bloom filter used for probabilistically determining set membership."; } @Override public boolean isCompatibleWith(final Class fieldClass) { return true; } @Override public BloomFilterValue createEmpty() { return new BloomFilterValue(this); } @Override protected int byteLength() { return super.byteLength() + VarintUtils.unsignedLongByteLength(expectedInsertions) + Double.BYTES; } @Override protected void writeBytes(ByteBuffer buffer) { super.writeBytes(buffer); VarintUtils.writeUnsignedLong(expectedInsertions, buffer); buffer.putDouble(desiredFalsePositiveProbability); } @Override protected void readBytes(ByteBuffer buffer) { super.readBytes(buffer); expectedInsertions = VarintUtils.readUnsignedLong(buffer); desiredFalsePositiveProbability = buffer.getDouble(); } public static class BloomFilterValue extends StatisticValue> implements StatisticsIngestCallback { private BloomFilter bloomFilter; public BloomFilterValue() { this(null); } private BloomFilterValue(final BloomFilterStatistic statistic) { super(statistic); if (statistic == null) { bloomFilter = null; } else { bloomFilter = BloomFilter.create( Funnels.unencodedCharsFunnel(), statistic.expectedInsertions, statistic.desiredFalsePositiveProbability); } } @Override public void merge(final Mergeable merge) { if ((merge != null) && (merge instanceof BloomFilterValue)) { final BloomFilterValue other = (BloomFilterValue) merge; if (bloomFilter == null) { bloomFilter = other.bloomFilter; } else if ((other.bloomFilter != null) && bloomFilter.isCompatible(other.bloomFilter)) { bloomFilter.putAll(other.bloomFilter); } } } @Override public void entryIngested( final DataTypeAdapter adapter, final T entry, final GeoWaveRow... rows) { final Object o = adapter.getFieldValue(entry, ((BloomFilterStatistic) getStatistic()).getFieldName()); if (o == null) { return; } bloomFilter.put(o.toString()); } @Override public BloomFilter getValue() { return bloomFilter; } @Override public byte[] toBinary() { try (final ByteArrayOutputStream baos = new ByteArrayOutputStream()) { bloomFilter.writeTo(baos); baos.flush(); return baos.toByteArray(); } catch (final IOException e) { LOGGER.warn("Unable to write bloom filter", e); } return new byte[0]; } @Override public void fromBinary(final byte[] bytes) { if (bytes.length > 0) { try { bloomFilter = BloomFilter.readFrom(new ByteArrayInputStream(bytes), Funnels.unencodedCharsFunnel()); } catch (final IOException e) { LOGGER.error("Unable to read Bloom Filter", e); } } } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/field/CountMinSketchStatistic.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.field; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.FieldStatistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback; import com.beust.jcommander.Parameter; import com.clearspring.analytics.stream.frequency.CountMinSketch; import com.clearspring.analytics.stream.frequency.FrequencyMergeException; /** * Maintains an estimate of how may of each attribute value occurs in a set of data. * *

Default values: * *

Error factor of 0.001 with probability 0.98 of retrieving a correct estimate. The Algorithm * does not under-state the estimate. */ public class CountMinSketchStatistic extends FieldStatistic { public static final FieldStatisticType STATS_TYPE = new FieldStatisticType<>("COUNT_MIN_SKETCH"); @Parameter(names = "--errorFactor", description = "Error factor.") private double errorFactor = 0.001; @Parameter( names = "--probabilityOfCorrectness", description = "Probability of retrieving a correct estimate.") private double probabilityOfCorrectness = 0.98; public CountMinSketchStatistic() { super(STATS_TYPE); } public CountMinSketchStatistic(final String typeName, final String fieldName) { super(STATS_TYPE, typeName, fieldName); } public CountMinSketchStatistic( final String typeName, final String fieldName, final double errorFactor, final double probabilityOfCorrectness) { super(STATS_TYPE, typeName, fieldName); this.errorFactor = errorFactor; this.probabilityOfCorrectness = probabilityOfCorrectness; } public void setErrorFactor(final double errorFactor) { this.errorFactor = errorFactor; } public double getErrorFactor() { return this.errorFactor; } public void setProbabilityOfCorrectness(final double probabilityOfCorrectness) { this.probabilityOfCorrectness = probabilityOfCorrectness; } public double getProbabilityOfCorrectness() { return this.probabilityOfCorrectness; } @Override public String getDescription() { return "Maintains an estimate of how many of each attribute value occurs in a set of data."; } @Override public boolean isCompatibleWith(Class fieldClass) { return true; } @Override public CountMinSketchValue createEmpty() { return new CountMinSketchValue(this); } @Override protected int byteLength() { return super.byteLength() + Double.BYTES * 2; } @Override protected void writeBytes(ByteBuffer buffer) { super.writeBytes(buffer); buffer.putDouble(errorFactor); buffer.putDouble(probabilityOfCorrectness); } @Override protected void readBytes(ByteBuffer buffer) { super.readBytes(buffer); errorFactor = buffer.getDouble(); probabilityOfCorrectness = buffer.getDouble(); } public static class CountMinSketchValue extends StatisticValue implements StatisticsIngestCallback { private CountMinSketch sketch; public CountMinSketchValue() { super(null); sketch = null; } public CountMinSketchValue(final CountMinSketchStatistic statistic) { super(statistic); sketch = new CountMinSketch(statistic.errorFactor, statistic.probabilityOfCorrectness, 7364181); } public long totalSampleSize() { return sketch.size(); } public long count(final String item) { return sketch.estimateCount(item); } @Override public void merge(Mergeable merge) { if (merge instanceof CountMinSketchValue) { try { sketch = CountMinSketch.merge(sketch, ((CountMinSketchValue) merge).sketch); } catch (final FrequencyMergeException e) { throw new RuntimeException("Unable to merge sketches", e); } } } @Override public void entryIngested(DataTypeAdapter adapter, T entry, GeoWaveRow... rows) { final Object o = adapter.getFieldValue(entry, ((CountMinSketchStatistic) statistic).getFieldName()); if (o == null) { return; } sketch.add(o.toString(), 1); } @Override public CountMinSketch getValue() { return sketch; } @Override public byte[] toBinary() { final byte[] data = CountMinSketch.serialize(sketch); final ByteBuffer buffer = ByteBuffer.allocate(VarintUtils.unsignedIntByteLength(data.length) + data.length); VarintUtils.writeUnsignedInt(data.length, buffer); buffer.put(data); return buffer.array(); } @Override public void fromBinary(byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); final byte[] data = ByteArrayUtils.safeRead(buffer, VarintUtils.readUnsignedInt(buffer)); sketch = CountMinSketch.deserialize(data); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/field/FieldStatisticId.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.field; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.statistics.StatisticId; import org.locationtech.geowave.core.store.statistics.StatisticType; import com.google.common.primitives.Bytes; /** * An extension of Statistic ID that allows the same statistic and tag to be added to multiple * fields without conflicting. */ public class FieldStatisticId> extends StatisticId { private final String fieldName; /** * Create a new statistic id with the given group, statistic type, field name and tag. * * @param groupId the group id * @param statisticType the statistic type * @param fieldName the field name * @param tag the tag */ public FieldStatisticId( final ByteArray groupId, final StatisticType statisticType, final String fieldName, final String tag) { super(groupId, statisticType, tag); this.fieldName = fieldName; } /** * Get the field name of the underlying statistic. * * @return the field name */ public String getFieldName() { return fieldName; } /** * Get the unique id of the identifier. The unique id is guaranteed to be unique among all * statistics within the same group. Multiple statistics of the same type in the same group can be * added by using different tags. * * @return the unique id */ @Override public ByteArray getUniqueId() { if (cachedBytes == null) { cachedBytes = generateUniqueId(statisticType, fieldName, tag); } return cachedBytes; } /** * Generate a unique id with the given statistic type, field name, and tag. * * @param statisticType the statistic type * @param fieldName the field name * @param tag the tag * @return the unique id */ public static ByteArray generateUniqueId( final StatisticType statisticType, final String fieldName, final String tag) { if (tag == null) { return new ByteArray( Bytes.concat( statisticType.getBytes(), StatisticId.UNIQUE_ID_SEPARATOR, StringUtils.stringToBinary(fieldName))); } else { return new ByteArray( Bytes.concat( statisticType.getBytes(), StatisticId.UNIQUE_ID_SEPARATOR, StringUtils.stringToBinary(fieldName), StatisticId.UNIQUE_ID_SEPARATOR, StringUtils.stringToBinary(tag))); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/field/FieldStatisticType.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.field; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.statistics.StatisticType; /** * Statistic type for field statistics. Generally used for type checking. */ public class FieldStatisticType> extends StatisticType { private static final long serialVersionUID = 1L; public FieldStatisticType(final String id) { super(id); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/field/FixedBinNumericHistogramStatistic.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.field; import java.nio.ByteBuffer; import java.util.Date; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.adapter.statistics.histogram.FixedBinNumericHistogram; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.FieldStatistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback; import com.beust.jcommander.Parameter; /** * Fixed number of bins for a histogram. Unless configured, the range will expand dynamically, * redistributing the data as necessary into the wider bins. * *

The advantage of constraining the range of the statistic is to ignore values outside the * range, such as erroneous values. Erroneous values force extremes in the histogram. For example, * if the expected range of values falls between 0 and 1 and a value of 10000 occurs, then a single * bin contains the entire population between 0 and 1, a single bin represents the single value of * 10000. If there are extremes in the data, then use {@link NumericHistogramStatistic} instead. * *

The default number of bins is 32. */ public class FixedBinNumericHistogramStatistic extends FieldStatistic { public static final FieldStatisticType STATS_TYPE = new FieldStatisticType<>("FIXED_BIN_NUMERIC_HISTOGRAM"); @Parameter(names = "--numBins", description = "The number of bins for the histogram.") private int numBins = 1024; @Parameter( names = "--minValue", description = "The minimum value for the histogram. If both min and max are not specified, the range will be unconstrained.") private Double minValue = null; @Parameter( names = "--maxValue", description = "The maximum value for the histogram. If both min and max are not specified, the range will be unconstrained.") private Double maxValue = null; public FixedBinNumericHistogramStatistic() { super(STATS_TYPE); } public FixedBinNumericHistogramStatistic(final String typeName, final String fieldName) { this(typeName, fieldName, 1024); } public FixedBinNumericHistogramStatistic( final String typeName, final String fieldName, final int bins) { super(STATS_TYPE, typeName, fieldName); this.numBins = bins; } public FixedBinNumericHistogramStatistic( final String typeName, final String fieldName, final int bins, final double minValue, final double maxValue) { super(STATS_TYPE, typeName, fieldName); this.numBins = bins; this.minValue = minValue; this.maxValue = maxValue; } public void setNumBins(final int numBins) { this.numBins = numBins; } public int getNumBins() { return numBins; } public void setMinValue(final Double minValue) { this.minValue = minValue; } public Double getMinValue() { return minValue; } public void setMaxValue(final Double maxValue) { this.maxValue = maxValue; } public Double getMaxValue() { return maxValue; } @Override public boolean isCompatibleWith(Class fieldClass) { return Number.class.isAssignableFrom(fieldClass) || Date.class.isAssignableFrom(fieldClass); } @Override public String getDescription() { return "A numeric histogram with a fixed number of bins."; } @Override public FixedBinNumericHistogramValue createEmpty() { return new FixedBinNumericHistogramValue(this); } @Override protected int byteLength() { int length = super.byteLength() + VarintUtils.unsignedIntByteLength(numBins) + 2; length += minValue == null ? 0 : Double.BYTES; length += maxValue == null ? 0 : Double.BYTES; return length; } @Override protected void writeBytes(ByteBuffer buffer) { super.writeBytes(buffer); VarintUtils.writeUnsignedInt(numBins, buffer); if (minValue == null) { buffer.put((byte) 0); } else { buffer.put((byte) 1); buffer.putDouble(minValue); } if (maxValue == null) { buffer.put((byte) 0); } else { buffer.put((byte) 1); buffer.putDouble(maxValue); } } @Override protected void readBytes(ByteBuffer buffer) { super.readBytes(buffer); numBins = VarintUtils.readUnsignedInt(buffer); if (buffer.get() == 1) { minValue = buffer.getDouble(); } else { minValue = null; } if (buffer.get() == 1) { maxValue = buffer.getDouble(); } else { maxValue = null; } } public static class FixedBinNumericHistogramValue extends StatisticValue implements StatisticsIngestCallback { private FixedBinNumericHistogram histogram; public FixedBinNumericHistogramValue() { super(null); histogram = null; } public FixedBinNumericHistogramValue(FixedBinNumericHistogramStatistic statistic) { super(statistic); if (statistic.minValue == null || statistic.maxValue == null) { histogram = new FixedBinNumericHistogram(statistic.numBins); } else { histogram = new FixedBinNumericHistogram(statistic.numBins, statistic.minValue, statistic.maxValue); } } @Override public void merge(Mergeable merge) { if (merge != null && merge instanceof FixedBinNumericHistogramValue) { histogram.merge(((FixedBinNumericHistogramValue) merge).getValue()); } } @Override public void entryIngested(DataTypeAdapter adapter, T entry, GeoWaveRow... rows) { final Object o = adapter.getFieldValue( entry, ((FixedBinNumericHistogramStatistic) getStatistic()).getFieldName()); if (o == null) { return; } double value; if (o instanceof Date) { value = ((Date) o).getTime(); } else if (o instanceof Number) { value = ((Number) o).doubleValue(); } else { return; } histogram.add(1, value); } @Override public FixedBinNumericHistogram getValue() { return histogram; } @Override public byte[] toBinary() { final ByteBuffer buffer = ByteBuffer.allocate(histogram.bufferSize()); histogram.toBinary(buffer); return buffer.array(); } @Override public void fromBinary(byte[] bytes) { histogram = new FixedBinNumericHistogram(); histogram.fromBinary(ByteBuffer.wrap(bytes)); } public double[] quantile(final int bins) { return histogram.quantile(bins); } public double cdf(final double val) { return histogram.cdf(val); } public double quantile(final double percentage) { return histogram.quantile(percentage); } public double percentPopulationOverRange(final double start, final double stop) { return cdf(stop) - cdf(start); } public long totalSampleSize() { return histogram.getTotalCount(); } public long[] count(final int binSize) { return histogram.count(binSize); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/field/HyperLogLogStatistic.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.field; import java.io.IOException; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.FieldStatistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.IValueValidator; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.clearspring.analytics.stream.cardinality.CardinalityMergeException; import com.clearspring.analytics.stream.cardinality.HyperLogLogPlus; /** * Provides an estimated cardinality of the number of unique values for an attribute. */ public class HyperLogLogStatistic extends FieldStatistic { private static final Logger LOGGER = LoggerFactory.getLogger(HyperLogLogStatistic.class); public static final FieldStatisticType STATS_TYPE = new FieldStatisticType<>("HYPER_LOG_LOG"); @Parameter( names = "--precision", description = "Number of bits per count value. 2^precision will be the maximum count per distinct value. Maximum precision is 32.", validateValueWith = PrecisionValidator.class) private int precision = 16; public HyperLogLogStatistic() { super(STATS_TYPE); } public HyperLogLogStatistic(final String typeName, final String fieldName) { super(STATS_TYPE, typeName, fieldName); } public HyperLogLogStatistic(final String typeName, final String fieldName, final int precision) { super(STATS_TYPE, typeName, fieldName); this.precision = precision; } public void setPrecision(final short precision) { this.precision = precision; } public int getPrecision() { return precision; } @Override public String getDescription() { return "Provides an estimated cardinality of the number of unqiue values for an attribute."; } @Override public HyperLogLogPlusValue createEmpty() { return new HyperLogLogPlusValue(this); } @Override public boolean isCompatibleWith(Class fieldClass) { return true; } @Override protected int byteLength() { return super.byteLength() + VarintUtils.unsignedIntByteLength(precision); } @Override protected void writeBytes(ByteBuffer buffer) { super.writeBytes(buffer); VarintUtils.writeUnsignedInt(precision, buffer); } @Override protected void readBytes(ByteBuffer buffer) { super.readBytes(buffer); precision = VarintUtils.readUnsignedInt(buffer); } public static class HyperLogLogPlusValue extends StatisticValue implements StatisticsIngestCallback { private HyperLogLogPlus loglog; public HyperLogLogPlusValue() { super(null); loglog = null; } public HyperLogLogPlusValue(final HyperLogLogStatistic statistic) { super(statistic); loglog = new HyperLogLogPlus(statistic.precision); } public long cardinality() { return loglog.cardinality(); } @Override public void merge(Mergeable merge) { if (merge instanceof HyperLogLogPlusValue) { try { loglog = (HyperLogLogPlus) ((HyperLogLogPlusValue) merge).loglog.merge(loglog); } catch (final CardinalityMergeException e) { throw new RuntimeException("Unable to merge counters", e); } } } @Override public void entryIngested(DataTypeAdapter adapter, T entry, GeoWaveRow... rows) { final Object o = adapter.getFieldValue(entry, ((HyperLogLogStatistic) statistic).getFieldName()); if (o == null) { return; } loglog.offer(o.toString()); } @Override public HyperLogLogPlus getValue() { return loglog; } @Override public byte[] toBinary() { try { return loglog.getBytes(); } catch (final IOException e) { LOGGER.error("Exception while writing statistic", e); } return new byte[0]; } @Override public void fromBinary(byte[] bytes) { try { loglog = HyperLogLogPlus.Builder.build(bytes); } catch (final IOException e) { LOGGER.error("Exception while reading statistic", e); } } } private static class PrecisionValidator implements IValueValidator { @Override public void validate(String name, Integer value) throws ParameterException { if (value < 1 || value > 32) { throw new ParameterException("Precision must be a value between 1 and 32."); } } } @Override public String toString() { final StringBuffer buffer = new StringBuffer(); buffer.append("HYPER_LOG_LOG[type=").append(getTypeName()); buffer.append(", field=").append(getFieldName()); buffer.append(", precision=").append(precision); buffer.append("]"); return buffer.toString(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/field/NumericHistogramStatistic.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.field; import java.nio.ByteBuffer; import java.util.Date; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.store.adapter.statistics.histogram.NumericHistogram; import org.locationtech.geowave.core.store.adapter.statistics.histogram.TDigestNumericHistogram; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.FieldStatistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback; import com.beust.jcommander.Parameter; /** * Uses a T-Digest data structure to very efficiently calculate and store the histogram. * https://www.sciencedirect.com/science/article/pii/S2665963820300403 * *

The default compression is 100. */ public class NumericHistogramStatistic extends FieldStatistic { public static final FieldStatisticType STATS_TYPE = new FieldStatisticType<>("NUMERIC_HISTOGRAM"); @Parameter( names = "--compression", description = "The compression parameter. 100 is a common value for normal uses. 1000 is extremely large. The number of centroids retained will be a smallish (usually less than 10) multiple of this number.") private double compression = 100; public NumericHistogramStatistic() { super(STATS_TYPE); } public NumericHistogramStatistic(final String typeName, final String fieldName) { this(typeName, fieldName, 100); } public NumericHistogramStatistic( final String typeName, final String fieldName, final double compression) { super(STATS_TYPE, typeName, fieldName); this.compression = compression; } public void setCompression(final double compression) { this.compression = compression; } public double getCompression() { return compression; } @Override public boolean isCompatibleWith(final Class fieldClass) { return Number.class.isAssignableFrom(fieldClass) || Date.class.isAssignableFrom(fieldClass); } @Override public String getDescription() { return "A numeric histogram using an efficient t-digest data structure."; } @Override public NumericHistogramValue createEmpty() { return new NumericHistogramValue(this); } @Override protected int byteLength() { return super.byteLength() + Double.BYTES; } @Override protected void writeBytes(ByteBuffer buffer) { super.writeBytes(buffer); buffer.putDouble(compression); } @Override protected void readBytes(ByteBuffer buffer) { super.readBytes(buffer); compression = buffer.getDouble(); } public static class NumericHistogramValue extends StatisticValue implements StatisticsIngestCallback { private TDigestNumericHistogram histogram; public NumericHistogramValue() { super(null); histogram = null; } public NumericHistogramValue(final NumericHistogramStatistic statistic) { super(statistic); histogram = new TDigestNumericHistogram(statistic.compression); } @Override public void merge(final Mergeable merge) { if ((merge != null) && (merge instanceof NumericHistogramValue)) { // here it is important not to use "getValue()" because we want to be able to check for // null, and not just get an empty histogram final TDigestNumericHistogram other = ((NumericHistogramValue) merge).histogram; if ((histogram != null) && (histogram.getTotalCount() > 0)) { if ((other != null) && (other.getTotalCount() > 0)) { histogram.merge(other); } } else { histogram = other; } } } @Override public void entryIngested( final DataTypeAdapter adapter, final T entry, final GeoWaveRow... rows) { final Object o = adapter.getFieldValue(entry, ((NumericHistogramStatistic) getStatistic()).getFieldName()); if (o == null) { return; } double value; if (o instanceof Date) { value = ((Date) o).getTime(); } else if (o instanceof Number) { value = ((Number) o).doubleValue(); } else { return; } if (histogram == null) { histogram = new TDigestNumericHistogram(); } histogram.add(value); } @Override public TDigestNumericHistogram getValue() { if (histogram == null) { histogram = new TDigestNumericHistogram(); } return histogram; } @Override public byte[] toBinary() { if (histogram == null) { return new byte[0]; } final ByteBuffer buffer = ByteBuffer.allocate(histogram.bufferSize()); histogram.toBinary(buffer); return buffer.array(); } @Override public void fromBinary(final byte[] bytes) { histogram = new TDigestNumericHistogram(); if (bytes.length > 0) { histogram.fromBinary(ByteBuffer.wrap(bytes)); } } public double[] quantile(final int bins) { return NumericHistogram.binQuantiles(histogram, bins); } public double cdf(final double val) { return histogram.cdf(val); } public double quantile(final double percentage) { return histogram.quantile(percentage); } public double percentPopulationOverRange(final double start, final double stop) { return cdf(stop) - cdf(start); } public long totalSampleSize() { return histogram.getTotalCount(); } public long[] count(final int binSize) { return NumericHistogram.binCounts(histogram, binSize); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/field/NumericMeanStatistic.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.field; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.FieldStatistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.statistics.StatisticsDeleteCallback; import org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback; public class NumericMeanStatistic extends FieldStatistic { public static final FieldStatisticType STATS_TYPE = new FieldStatisticType<>("NUMERIC_MEAN"); public NumericMeanStatistic() { super(STATS_TYPE); } public NumericMeanStatistic(final String typeName, final String fieldName) { super(STATS_TYPE, typeName, fieldName); } @Override public String getDescription() { return "Provides the mean and sum of values for a numeric attribute."; } @Override public boolean isCompatibleWith(final Class fieldClass) { return Number.class.isAssignableFrom(fieldClass); } @Override public NumericMeanValue createEmpty() { return new NumericMeanValue(this); } public static class NumericMeanValue extends StatisticValue implements StatisticsIngestCallback, StatisticsDeleteCallback { private double sum = 0; private long count = 0; public NumericMeanValue() { this(null); } private NumericMeanValue(final NumericMeanStatistic statistic) { super(statistic); } public long getCount() { return count; } public double getSum() { return sum; } public double getMean() { if (count <= 0) { return Double.NaN; } return sum / count; } @Override public void merge(final Mergeable merge) { if ((merge != null) && (merge instanceof NumericMeanValue)) { final NumericMeanValue other = (NumericMeanValue) merge; sum += other.getSum(); count += other.count; } } @Override public void entryIngested( final DataTypeAdapter adapter, final T entry, final GeoWaveRow... rows) { final Object o = adapter.getFieldValue(entry, ((NumericMeanStatistic) getStatistic()).getFieldName()); if (o == null) { return; } final double num = ((Number) o).doubleValue(); if (!Double.isNaN(num)) { if (getBin() != null && getStatistic().getBinningStrategy() != null) { final double weight = getStatistic().getBinningStrategy().getWeight(getBin(), adapter, entry, rows); sum += (num * weight); count += (weight); } else { sum += num; count++; } } } @Override public void entryDeleted( final DataTypeAdapter adapter, final T entry, final GeoWaveRow... rows) { final Object o = adapter.getFieldValue(entry, ((NumericMeanStatistic) getStatistic()).getFieldName()); if (o == null) { return; } final double num = ((Number) o).doubleValue(); if (!Double.isNaN(num)) { if (getBin() != null && getStatistic().getBinningStrategy() != null) { final double weight = getStatistic().getBinningStrategy().getWeight(getBin(), adapter, entry, rows); sum -= (num * weight); count -= (weight); } else { sum -= num; count--; } } } @Override public Double getValue() { return getMean(); } @Override public byte[] toBinary() { final ByteBuffer buffer = ByteBuffer.allocate(Double.BYTES + VarintUtils.unsignedLongByteLength(count)); VarintUtils.writeUnsignedLong(count, buffer); buffer.putDouble(sum); return buffer.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); count = VarintUtils.readUnsignedLong(buffer); sum = buffer.getDouble(); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/field/NumericRangeStatistic.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.field; import java.nio.ByteBuffer; import org.apache.commons.lang3.Range; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.FieldStatistic; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback; /** * Tracks the range of a numeric attribute. */ public class NumericRangeStatistic extends FieldStatistic { public static final FieldStatisticType STATS_TYPE = new FieldStatisticType<>("NUMERIC_RANGE"); public NumericRangeStatistic() { super(STATS_TYPE); } public NumericRangeStatistic(final String typeName, final String fieldName) { super(STATS_TYPE, typeName, fieldName); } @Override public String getDescription() { return "Provides the minimum and maximum values of a numeric attribute."; } @Override public boolean isCompatibleWith(Class fieldClass) { return Number.class.isAssignableFrom(fieldClass); } @Override public NumericRangeValue createEmpty() { return new NumericRangeValue(this); } public static class NumericRangeValue extends StatisticValue> implements StatisticsIngestCallback { private double min = Double.MAX_VALUE; private double max = -Double.MAX_VALUE; public NumericRangeValue() { this(null); } private NumericRangeValue(final Statistic statistic) { super(statistic); } public boolean isSet() { if ((min == Double.MAX_VALUE) && (max == -Double.MAX_VALUE)) { return false; } return true; } public double getMin() { return min; } public double getMax() { return max; } public double getRange() { return max - min; } @Override public void merge(Mergeable merge) { if (merge != null && merge instanceof NumericRangeValue) { final NumericRangeValue other = (NumericRangeValue) merge; if (other.isSet()) { min = Math.min(min, other.getMin()); max = Math.max(max, other.getMax()); } } } @Override public void entryIngested(DataTypeAdapter adapter, T entry, GeoWaveRow... rows) { final Object o = adapter.getFieldValue(entry, ((NumericRangeStatistic) getStatistic()).getFieldName()); if (o == null) { return; } final double num = ((Number) o).doubleValue(); if (!Double.isNaN(num)) { min = Math.min(min, num); max = Math.max(max, num); } } @Override public Range getValue() { if (isSet()) { return Range.between(min, max); } return null; } @Override public byte[] toBinary() { final ByteBuffer buffer = ByteBuffer.allocate(Double.BYTES * 2); buffer.putDouble(min); buffer.putDouble(max); return buffer.array(); } @Override public void fromBinary(byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); min = buffer.getDouble(); max = buffer.getDouble(); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/field/NumericStatsStatistic.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.field; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.FieldStatistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback; /** * Tracks the min, max, count, mean, sum, variance and standard deviation of a numeric attribute. */ public class NumericStatsStatistic extends FieldStatistic { public static final FieldStatisticType STATS_TYPE = new FieldStatisticType<>("NUMERIC_STATS"); public NumericStatsStatistic() { super(STATS_TYPE); } public NumericStatsStatistic(final String typeName, final String fieldName) { super(STATS_TYPE, typeName, fieldName); } @Override public String getDescription() { return "Provides the min, max, count, mean, sum, variance and standard deviation of values for a numeric attribute."; } @Override public boolean isCompatibleWith(final Class fieldClass) { return Number.class.isAssignableFrom(fieldClass); } @Override public NumericStatsValue createEmpty() { return new NumericStatsValue(this); } public static class NumericStatsValue extends StatisticValue implements StatisticsIngestCallback { private StatsAccumulator acc = new StatsAccumulator(); public NumericStatsValue() { this(null); } private NumericStatsValue(final NumericStatsStatistic statistic) { super(statistic); } @Override public void merge(final Mergeable merge) { if ((merge != null) && (merge instanceof NumericStatsValue)) { final NumericStatsValue other = (NumericStatsValue) merge; acc.addAll(other.acc.snapshot()); } } @Override public void entryIngested( final DataTypeAdapter adapter, final T entry, final GeoWaveRow... rows) { final Object o = adapter.getFieldValue(entry, ((NumericStatsStatistic) getStatistic()).getFieldName()); if (o == null) { return; } final double num = ((Number) o).doubleValue(); if (!Double.isNaN(num)) { acc.add(num); } } @Override public Stats getValue() { return acc.snapshot(); } @Override public byte[] toBinary() { return acc.snapshot().toByteArray(); } @Override public void fromBinary(final byte[] bytes) { acc = new StatsAccumulator(); acc.addAll(Stats.fromByteArray(bytes)); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/field/Stats.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * Copyright (C) 2012 The Guava Authors * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.field; // This is a copy from Guava, because HBase is still dependent on Guava 12 as a server-side library // dependency and this was first introduced in Guava 20, this is basically a re-packaging of the // Guava class to eliminate the Guava version incompatiblities for libraries such as HBase import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import static com.google.common.primitives.Doubles.isFinite; import static java.lang.Double.NaN; import static java.lang.Double.doubleToLongBits; import static java.lang.Double.isNaN; import static org.locationtech.geowave.core.store.statistics.field.StatsAccumulator.calculateNewMeanNonFinite; import static org.locationtech.geowave.core.store.statistics.field.StatsAccumulator.ensureNonNegative; import java.io.Serializable; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.Iterator; import java.util.stream.Collector; import java.util.stream.DoubleStream; import java.util.stream.IntStream; import java.util.stream.LongStream; import org.checkerframework.checker.nullness.qual.Nullable; import com.google.common.annotations.Beta; import com.google.common.annotations.GwtIncompatible; import com.google.common.base.MoreObjects; import com.google.common.base.Objects; /** * A bundle of statistical summary values -- sum, count, mean/average, min and max, and several * forms of variance -- that were computed from a single set of zero or more floating-point values. * *

There are two ways to obtain a {@code Stats} instance: * *

  • If all the values you want to summarize are already known, use the appropriate {@code * Stats.of} factory method below. Primitive arrays, iterables and iterators of any kind of * {@code Number}, and primitive varargs are supported.
  • Or, to avoid storing up all the data * first, create a {@link StatsAccumulator} instance, feed values to it as you get them, then call * {@link StatsAccumulator#snapshot}.
* *

Static convenience methods called {@code meanOf} are also provided for users who wish to * calculate only the mean. * *

Java 8 users: If you are not using any of the variance statistics, you may wish to use * built-in JDK libraries instead of this class. * * @author Pete Gillin * @author Kevin Bourrillion * @since 20.0 */ @Beta @GwtIncompatible public final class Stats implements Serializable { private final long count; private final double mean; private final double sumOfSquaresOfDeltas; private final double min; private final double max; /** * Internal constructor. Users should use {@link #of} or {@link StatsAccumulator#snapshot}. * *

To ensure that the created instance obeys its contract, the parameters should satisfy the * following constraints. This is the callers responsibility and is not enforced here. * *

  • If {@code count} is 0, {@code mean} may have any finite value (its only usage will be * to get multiplied by 0 to calculate the sum), and the other parameters may have any values * (they will not be used).
  • If {@code count} is 1, {@code sumOfSquaresOfDeltas} must be * exactly 0.0 or {@link Double#NaN}.
*/ Stats( final long count, final double mean, final double sumOfSquaresOfDeltas, final double min, final double max) { this.count = count; this.mean = mean; this.sumOfSquaresOfDeltas = sumOfSquaresOfDeltas; this.min = min; this.max = max; } /** * Returns statistics over a dataset containing the given values. * * @param values a series of values, which will be converted to {@code double} values (this may * cause loss of precision) */ public static Stats of(final Iterable values) { final StatsAccumulator accumulator = new StatsAccumulator(); accumulator.addAll(values); return accumulator.snapshot(); } /** * Returns statistics over a dataset containing the given values. The iterator will be completely * consumed by this method. * * @param values a series of values, which will be converted to {@code double} values (this may * cause loss of precision) */ public static Stats of(final Iterator values) { final StatsAccumulator accumulator = new StatsAccumulator(); accumulator.addAll(values); return accumulator.snapshot(); } /** * Returns statistics over a dataset containing the given values. * * @param values a series of values */ public static Stats of(final double... values) { final StatsAccumulator acummulator = new StatsAccumulator(); acummulator.addAll(values); return acummulator.snapshot(); } /** * Returns statistics over a dataset containing the given values. * * @param values a series of values */ public static Stats of(final int... values) { final StatsAccumulator acummulator = new StatsAccumulator(); acummulator.addAll(values); return acummulator.snapshot(); } /** * Returns statistics over a dataset containing the given values. * * @param values a series of values, which will be converted to {@code double} values (this may * cause loss of precision for longs of magnitude over 2^53 (slightly over 9e15)) */ public static Stats of(final long... values) { final StatsAccumulator acummulator = new StatsAccumulator(); acummulator.addAll(values); return acummulator.snapshot(); } /** * Returns statistics over a dataset containing the given values. The stream will be completely * consumed by this method. * *

If you have a {@code Stream} rather than a {@code DoubleStream}, you should collect * the values using {@link #toStats()} instead. * * @param values a series of values * @since 28.2 */ public static Stats of(final DoubleStream values) { return values.collect( StatsAccumulator::new, StatsAccumulator::add, StatsAccumulator::addAll).snapshot(); } /** * Returns statistics over a dataset containing the given values. The stream will be completely * consumed by this method. * *

If you have a {@code Stream} rather than an {@code IntStream}, you should collect * the values using {@link #toStats()} instead. * * @param values a series of values * @since 28.2 */ public static Stats of(final IntStream values) { return values.collect( StatsAccumulator::new, StatsAccumulator::add, StatsAccumulator::addAll).snapshot(); } /** * Returns statistics over a dataset containing the given values. The stream will be completely * consumed by this method. * *

If you have a {@code Stream} rather than a {@code LongStream}, you should collect the * values using {@link #toStats()} instead. * * @param values a series of values, which will be converted to {@code double} values (this may * cause loss of precision for longs of magnitude over 2^53 (slightly over 9e15)) * @since 28.2 */ public static Stats of(final LongStream values) { return values.collect( StatsAccumulator::new, StatsAccumulator::add, StatsAccumulator::addAll).snapshot(); } /** * Returns a {@link Collector} which accumulates statistics from a {@link java.util.stream.Stream} * of any type of boxed {@link Number} into a {@link Stats}. Use by calling {@code * boxedNumericStream.collect(toStats())}. The numbers will be converted to {@code double} values * (which may cause loss of precision). * *

If you have any of the primitive streams {@code DoubleStream}, {@code IntStream}, or {@code * LongStream}, you should use the factory method {@link #of} instead. * * @since 28.2 */ public static Collector toStats() { return Collector.of(StatsAccumulator::new, (a, x) -> a.add(x.doubleValue()), (l, r) -> { l.addAll(r); return l; }, StatsAccumulator::snapshot, Collector.Characteristics.UNORDERED); } /** Returns the number of values. */ public long count() { return count; } /** * Returns the arithmetic mean of the * values. The count must be non-zero. * *

If these values are a sample drawn from a population, this is also an unbiased estimator of * the arithmetic mean of the population. * *

Non-finite values

* *

If the dataset contains {@link Double#NaN} then the result is {@link Double#NaN}. If it * contains both {@link Double#POSITIVE_INFINITY} and {@link Double#NEGATIVE_INFINITY} then the * result is {@link Double#NaN}. If it contains {@link Double#POSITIVE_INFINITY} and finite values * only or {@link Double#POSITIVE_INFINITY} only, the result is {@link Double#POSITIVE_INFINITY}. * If it contains {@link Double#NEGATIVE_INFINITY} and finite values only or * {@link Double#NEGATIVE_INFINITY} only, the result is {@link Double#NEGATIVE_INFINITY}. * *

If you only want to calculate the mean, use {@link #meanOf} instead of creating a * {@link Stats} instance. * * @throws IllegalStateException if the dataset is empty */ public double mean() { checkState(count != 0); return mean; } /** * Returns the sum of the values. * *

Non-finite values

* *

If the dataset contains {@link Double#NaN} then the result is {@link Double#NaN}. If it * contains both {@link Double#POSITIVE_INFINITY} and {@link Double#NEGATIVE_INFINITY} then the * result is {@link Double#NaN}. If it contains {@link Double#POSITIVE_INFINITY} and finite values * only or {@link Double#POSITIVE_INFINITY} only, the result is {@link Double#POSITIVE_INFINITY}. * If it contains {@link Double#NEGATIVE_INFINITY} and finite values only or * {@link Double#NEGATIVE_INFINITY} only, the result is {@link Double#NEGATIVE_INFINITY}. */ public double sum() { return mean * count; } /** * Returns the population * variance of the values. The count must be non-zero. * *

This is guaranteed to return zero if the dataset contains only exactly one finite value. It * is not guaranteed to return zero when the dataset consists of the same value multiple times, * due to numerical errors. However, it is guaranteed never to return a negative result. * *

Non-finite values

* *

If the dataset contains any non-finite values ({@link Double#POSITIVE_INFINITY}, * {@link Double#NEGATIVE_INFINITY}, or {@link Double#NaN}) then the result is {@link Double#NaN}. * * @throws IllegalStateException if the dataset is empty */ public double populationVariance() { checkState(count > 0); if (isNaN(sumOfSquaresOfDeltas)) { return NaN; } if (count == 1) { return 0.0; } return ensureNonNegative(sumOfSquaresOfDeltas) / count(); } /** * Returns the * population standard deviation of the values. The count must be non-zero. * *

This is guaranteed to return zero if the dataset contains only exactly one finite value. It * is not guaranteed to return zero when the dataset consists of the same value multiple times, * due to numerical errors. However, it is guaranteed never to return a negative result. * *

Non-finite values

* *

If the dataset contains any non-finite values ({@link Double#POSITIVE_INFINITY}, * {@link Double#NEGATIVE_INFINITY}, or {@link Double#NaN}) then the result is {@link Double#NaN}. * * @throws IllegalStateException if the dataset is empty */ public double populationStandardDeviation() { return Math.sqrt(populationVariance()); } /** * Returns the unbiased sample * variance of the values. If this dataset is a sample drawn from a population, this is an * unbiased estimator of the population variance of the population. The count must be greater than * one. * *

This is not guaranteed to return zero when the dataset consists of the same value multiple * times, due to numerical errors. However, it is guaranteed never to return a negative result. * *

Non-finite values

* *

If the dataset contains any non-finite values ({@link Double#POSITIVE_INFINITY}, * {@link Double#NEGATIVE_INFINITY}, or {@link Double#NaN}) then the result is {@link Double#NaN}. * * @throws IllegalStateException if the dataset is empty or contains a single value */ public double sampleVariance() { checkState(count > 1); if (isNaN(sumOfSquaresOfDeltas)) { return NaN; } return ensureNonNegative(sumOfSquaresOfDeltas) / (count - 1); } /** * Returns the * corrected sample standard deviation of the values. If this dataset is a sample drawn from a * population, this is an estimator of the population standard deviation of the population which * is less biased than {@link #populationStandardDeviation()} (the unbiased estimator depends on * the distribution). The count must be greater than one. * *

This is not guaranteed to return zero when the dataset consists of the same value multiple * times, due to numerical errors. However, it is guaranteed never to return a negative result. * *

Non-finite values

* *

If the dataset contains any non-finite values ({@link Double#POSITIVE_INFINITY}, * {@link Double#NEGATIVE_INFINITY}, or {@link Double#NaN}) then the result is {@link Double#NaN}. * * @throws IllegalStateException if the dataset is empty or contains a single value */ public double sampleStandardDeviation() { return Math.sqrt(sampleVariance()); } /** * Returns the lowest value in the dataset. The count must be non-zero. * *

Non-finite values

* *

If the dataset contains {@link Double#NaN} then the result is {@link Double#NaN}. If it * contains {@link Double#NEGATIVE_INFINITY} and not {@link Double#NaN} then the result is * {@link Double#NEGATIVE_INFINITY}. If it contains {@link Double#POSITIVE_INFINITY} and finite * values only then the result is the lowest finite value. If it contains * {@link Double#POSITIVE_INFINITY} only then the result is {@link Double#POSITIVE_INFINITY}. * * @throws IllegalStateException if the dataset is empty */ public double min() { checkState(count != 0); return min; } /** * Returns the highest value in the dataset. The count must be non-zero. * *

Non-finite values

* *

If the dataset contains {@link Double#NaN} then the result is {@link Double#NaN}. If it * contains {@link Double#POSITIVE_INFINITY} and not {@link Double#NaN} then the result is * {@link Double#POSITIVE_INFINITY}. If it contains {@link Double#NEGATIVE_INFINITY} and finite * values only then the result is the highest finite value. If it contains * {@link Double#NEGATIVE_INFINITY} only then the result is {@link Double#NEGATIVE_INFINITY}. * * @throws IllegalStateException if the dataset is empty */ public double max() { checkState(count != 0); return max; } /** * {@inheritDoc} * *

Note: This tests exact equality of the calculated statistics, including the floating * point values. Two instances are guaranteed to be considered equal if one is copied from the * other using {@code second = new StatsAccumulator().addAll(first).snapshot()}, if both were * obtained by calling {@code snapshot()} on the same {@link StatsAccumulator} without adding any * values in between the two calls, or if one is obtained from the other after round-tripping * through java serialization. However, floating point rounding errors mean that it may be false * for some instances where the statistics are mathematically equal, including instances * constructed from the same values in a different order... or (in the general case) even in the * same order. (It is guaranteed to return true for instances constructed from the same values in * the same order if {@code strictfp} is in effect, or if the system architecture guarantees * {@code strictfp}-like semantics.) */ @Override public boolean equals(@Nullable final Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final Stats other = (Stats) obj; return (count == other.count) && (doubleToLongBits(mean) == doubleToLongBits(other.mean)) && (doubleToLongBits(sumOfSquaresOfDeltas) == doubleToLongBits(other.sumOfSquaresOfDeltas)) && (doubleToLongBits(min) == doubleToLongBits(other.min)) && (doubleToLongBits(max) == doubleToLongBits(other.max)); } /** * {@inheritDoc} * *

Note: This hash code is consistent with exact equality of the calculated statistics, * including the floating point values. See the note on {@link #equals} for details. */ @Override public int hashCode() { return Objects.hashCode(count, mean, sumOfSquaresOfDeltas, min, max); } @Override public String toString() { if (count() > 0) { return MoreObjects.toStringHelper(this).add("count", count).add("mean", mean).add( "populationStandardDeviation", populationStandardDeviation()).add("min", min).add("max", max).toString(); } else { return MoreObjects.toStringHelper(this).add("count", count).toString(); } } double sumOfSquaresOfDeltas() { return sumOfSquaresOfDeltas; } /** * Returns the arithmetic mean of the * values. The count must be non-zero. * *

The definition of the mean is the same as {@link Stats#mean}. * * @param values a series of values, which will be converted to {@code double} values (this may * cause loss of precision) * @throws IllegalArgumentException if the dataset is empty */ public static double meanOf(final Iterable values) { return meanOf(values.iterator()); } /** * Returns the arithmetic mean of the * values. The count must be non-zero. * *

The definition of the mean is the same as {@link Stats#mean}. * * @param values a series of values, which will be converted to {@code double} values (this may * cause loss of precision) * @throws IllegalArgumentException if the dataset is empty */ public static double meanOf(final Iterator values) { checkArgument(values.hasNext()); long count = 1; double mean = values.next().doubleValue(); while (values.hasNext()) { final double value = values.next().doubleValue(); count++; if (isFinite(value) && isFinite(mean)) { // Art of Computer Programming vol. 2, Knuth, 4.2.2, (15) mean += (value - mean) / count; } else { mean = calculateNewMeanNonFinite(mean, value); } } return mean; } /** * Returns the arithmetic mean of the * values. The count must be non-zero. * *

The definition of the mean is the same as {@link Stats#mean}. * * @param values a series of values * @throws IllegalArgumentException if the dataset is empty */ public static double meanOf(final double... values) { checkArgument(values.length > 0); double mean = values[0]; for (int index = 1; index < values.length; index++) { final double value = values[index]; if (isFinite(value) && isFinite(mean)) { // Art of Computer Programming vol. 2, Knuth, 4.2.2, (15) mean += (value - mean) / (index + 1); } else { mean = calculateNewMeanNonFinite(mean, value); } } return mean; } /** * Returns the arithmetic mean of the * values. The count must be non-zero. * *

The definition of the mean is the same as {@link Stats#mean}. * * @param values a series of values * @throws IllegalArgumentException if the dataset is empty */ public static double meanOf(final int... values) { checkArgument(values.length > 0); double mean = values[0]; for (int index = 1; index < values.length; index++) { final double value = values[index]; if (isFinite(value) && isFinite(mean)) { // Art of Computer Programming vol. 2, Knuth, 4.2.2, (15) mean += (value - mean) / (index + 1); } else { mean = calculateNewMeanNonFinite(mean, value); } } return mean; } /** * Returns the arithmetic mean of the * values. The count must be non-zero. * *

The definition of the mean is the same as {@link Stats#mean}. * * @param values a series of values, which will be converted to {@code double} values (this may * cause loss of precision for longs of magnitude over 2^53 (slightly over 9e15)) * @throws IllegalArgumentException if the dataset is empty */ public static double meanOf(final long... values) { checkArgument(values.length > 0); double mean = values[0]; for (int index = 1; index < values.length; index++) { final double value = values[index]; if (isFinite(value) && isFinite(mean)) { // Art of Computer Programming vol. 2, Knuth, 4.2.2, (15) mean += (value - mean) / (index + 1); } else { mean = calculateNewMeanNonFinite(mean, value); } } return mean; } // Serialization helpers /** The size of byte array representation in bytes. */ static final int BYTES = (Long.SIZE + (Double.SIZE * 4)) / Byte.SIZE; /** * Gets a byte array representation of this instance. * *

Note: No guarantees are made regarding stability of the representation between * versions. */ public byte[] toByteArray() { final ByteBuffer buff = ByteBuffer.allocate(BYTES).order(ByteOrder.LITTLE_ENDIAN); writeTo(buff); return buff.array(); } /** * Writes to the given {@link ByteBuffer} a byte representation of this instance. * *

Note: No guarantees are made regarding stability of the representation between * versions. * * @param buffer A {@link ByteBuffer} with at least BYTES {@link ByteBuffer#remaining}, ordered as * {@link ByteOrder#LITTLE_ENDIAN}, to which a BYTES-long byte representation of this * instance is written. In the process increases the position of {@link ByteBuffer} by * BYTES. */ void writeTo(final ByteBuffer buffer) { checkNotNull(buffer); checkArgument( buffer.remaining() >= BYTES, "Expected at least Stats.BYTES = %s remaining , got %s", BYTES, buffer.remaining()); buffer.putLong(count).putDouble(mean).putDouble(sumOfSquaresOfDeltas).putDouble(min).putDouble( max); } /** * Creates a Stats instance from the given byte representation which was obtained by * {@link #toByteArray}. * *

Note: No guarantees are made regarding stability of the representation between * versions. */ public static Stats fromByteArray(final byte[] byteArray) { checkNotNull(byteArray); checkArgument( byteArray.length == BYTES, "Expected Stats.BYTES = %s remaining , got %s", BYTES, byteArray.length); return readFrom(ByteBuffer.wrap(byteArray).order(ByteOrder.LITTLE_ENDIAN)); } /** * Creates a Stats instance from the byte representation read from the given {@link ByteBuffer}. * *

Note: No guarantees are made regarding stability of the representation between * versions. * * @param buffer A {@link ByteBuffer} with at least BYTES {@link ByteBuffer#remaining}, ordered as * {@link ByteOrder#LITTLE_ENDIAN}, from which a BYTES-long byte representation of this * instance is read. In the process increases the position of {@link ByteBuffer} by BYTES. */ static Stats readFrom(final ByteBuffer buffer) { checkNotNull(buffer); checkArgument( buffer.remaining() >= BYTES, "Expected at least Stats.BYTES = %s remaining , got %s", BYTES, buffer.remaining()); return new Stats( buffer.getLong(), buffer.getDouble(), buffer.getDouble(), buffer.getDouble(), buffer.getDouble()); } private static final long serialVersionUID = 0; } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/field/StatsAccumulator.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * Copyright (C) 2012 The Guava Authors * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.field; // This is a copy from Guava, because HBase is still dependent on Guava 12 as a server-side library // dependency and this was first introduced in Guava 20, this is basically a re-packaging of the // Guava class to eliminate the Guava version incompatiblities for libraries such as HBase import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static com.google.common.primitives.Doubles.isFinite; import static java.lang.Double.NaN; import static java.lang.Double.isNaN; import java.util.Iterator; import java.util.stream.DoubleStream; import java.util.stream.IntStream; import java.util.stream.LongStream; import com.google.common.annotations.Beta; import com.google.common.annotations.GwtIncompatible; /** * A mutable object which accumulates double values and tracks some basic statistics over all the * values added so far. The values may be added singly or in groups. This class is not thread safe. * * @author Pete Gillin * @author Kevin Bourrillion * @since 20.0 */ @Beta @GwtIncompatible public final class StatsAccumulator { // These fields must satisfy the requirements of Stats' constructor as well as those of the stat // methods of this class. private long count = 0; private double mean = 0.0; // any finite value will do, we only use it to multiply by zero for sum private double sumOfSquaresOfDeltas = 0.0; private double min = NaN; // any value will do private double max = NaN; // any value will do /** Adds the given value to the dataset. */ public void add(final double value) { if (count == 0) { count = 1; mean = value; min = value; max = value; if (!isFinite(value)) { sumOfSquaresOfDeltas = NaN; } } else { count++; if (isFinite(value) && isFinite(mean)) { // Art of Computer Programming vol. 2, Knuth, 4.2.2, (15) and (16) final double delta = value - mean; mean += delta / count; sumOfSquaresOfDeltas += delta * (value - mean); } else { mean = calculateNewMeanNonFinite(mean, value); sumOfSquaresOfDeltas = NaN; } min = Math.min(min, value); max = Math.max(max, value); } } /** * Adds the given values to the dataset. * * @param values a series of values, which will be converted to {@code double} values (this may * cause loss of precision) */ public void addAll(final Iterable values) { for (final Number value : values) { add(value.doubleValue()); } } /** * Adds the given values to the dataset. * * @param values a series of values, which will be converted to {@code double} values (this may * cause loss of precision) */ public void addAll(final Iterator values) { while (values.hasNext()) { add(values.next().doubleValue()); } } /** * Adds the given values to the dataset. * * @param values a series of values */ public void addAll(final double... values) { for (final double value : values) { add(value); } } /** * Adds the given values to the dataset. * * @param values a series of values */ public void addAll(final int... values) { for (final int value : values) { add(value); } } /** * Adds the given values to the dataset. * * @param values a series of values, which will be converted to {@code double} values (this may * cause loss of precision for longs of magnitude over 2^53 (slightly over 9e15)) */ public void addAll(final long... values) { for (final long value : values) { add(value); } } /** * Adds the given values to the dataset. The stream will be completely consumed by this method. * * @param values a series of values * @since 28.2 */ public void addAll(final DoubleStream values) { addAll(values.collect(StatsAccumulator::new, StatsAccumulator::add, StatsAccumulator::addAll)); } /** * Adds the given values to the dataset. The stream will be completely consumed by this method. * * @param values a series of values * @since 28.2 */ public void addAll(final IntStream values) { addAll(values.collect(StatsAccumulator::new, StatsAccumulator::add, StatsAccumulator::addAll)); } /** * Adds the given values to the dataset. The stream will be completely consumed by this method. * * @param values a series of values, which will be converted to {@code double} values (this may * cause loss of precision for longs of magnitude over 2^53 (slightly over 9e15)) * @since 28.2 */ public void addAll(final LongStream values) { addAll(values.collect(StatsAccumulator::new, StatsAccumulator::add, StatsAccumulator::addAll)); } /** * Adds the given statistics to the dataset, as if the individual values used to compute the * statistics had been added directly. */ public void addAll(final Stats values) { if (values.count() == 0) { return; } merge(values.count(), values.mean(), values.sumOfSquaresOfDeltas(), values.min(), values.max()); } /** * Adds the given statistics to the dataset, as if the individual values used to compute the * statistics had been added directly. * * @since 28.2 */ public void addAll(final StatsAccumulator values) { if (values.count() == 0) { return; } merge(values.count(), values.mean(), values.sumOfSquaresOfDeltas(), values.min(), values.max()); } private void merge( final long otherCount, final double otherMean, final double otherSumOfSquaresOfDeltas, final double otherMin, final double otherMax) { if (count == 0) { count = otherCount; mean = otherMean; sumOfSquaresOfDeltas = otherSumOfSquaresOfDeltas; min = otherMin; max = otherMax; } else { count += otherCount; if (isFinite(mean) && isFinite(otherMean)) { // This is a generalized version of the calculation in add(double) above. final double delta = otherMean - mean; mean += (delta * otherCount) / count; sumOfSquaresOfDeltas += otherSumOfSquaresOfDeltas + (delta * (otherMean - mean) * otherCount); } else { mean = calculateNewMeanNonFinite(mean, otherMean); sumOfSquaresOfDeltas = NaN; } min = Math.min(min, otherMin); max = Math.max(max, otherMax); } } /** Returns an immutable snapshot of the current statistics. */ public Stats snapshot() { return new Stats(count, mean, sumOfSquaresOfDeltas, min, max); } /** Returns the number of values. */ public long count() { return count; } /** * Returns the arithmetic mean of the * values. The count must be non-zero. * *

If these values are a sample drawn from a population, this is also an unbiased estimator of * the arithmetic mean of the population. * *

Non-finite values

* *

If the dataset contains {@link Double#NaN} then the result is {@link Double#NaN}. If it * contains both {@link Double#POSITIVE_INFINITY} and {@link Double#NEGATIVE_INFINITY} then the * result is {@link Double#NaN}. If it contains {@link Double#POSITIVE_INFINITY} and finite values * only or {@link Double#POSITIVE_INFINITY} only, the result is {@link Double#POSITIVE_INFINITY}. * If it contains {@link Double#NEGATIVE_INFINITY} and finite values only or * {@link Double#NEGATIVE_INFINITY} only, the result is {@link Double#NEGATIVE_INFINITY}. * * @throws IllegalStateException if the dataset is empty */ public double mean() { checkState(count != 0); return mean; } /** * Returns the sum of the values. * *

Non-finite values

* *

If the dataset contains {@link Double#NaN} then the result is {@link Double#NaN}. If it * contains both {@link Double#POSITIVE_INFINITY} and {@link Double#NEGATIVE_INFINITY} then the * result is {@link Double#NaN}. If it contains {@link Double#POSITIVE_INFINITY} and finite values * only or {@link Double#POSITIVE_INFINITY} only, the result is {@link Double#POSITIVE_INFINITY}. * If it contains {@link Double#NEGATIVE_INFINITY} and finite values only or * {@link Double#NEGATIVE_INFINITY} only, the result is {@link Double#NEGATIVE_INFINITY}. */ public final double sum() { return mean * count; } /** * Returns the population * variance of the values. The count must be non-zero. * *

This is guaranteed to return zero if the dataset contains only exactly one finite value. It * is not guaranteed to return zero when the dataset consists of the same value multiple times, * due to numerical errors. However, it is guaranteed never to return a negative result. * *

Non-finite values

* *

If the dataset contains any non-finite values ({@link Double#POSITIVE_INFINITY}, * {@link Double#NEGATIVE_INFINITY}, or {@link Double#NaN}) then the result is {@link Double#NaN}. * * @throws IllegalStateException if the dataset is empty */ public final double populationVariance() { checkState(count != 0); if (isNaN(sumOfSquaresOfDeltas)) { return NaN; } if (count == 1) { return 0.0; } return ensureNonNegative(sumOfSquaresOfDeltas) / count; } /** * Returns the * population standard deviation of the values. The count must be non-zero. * *

This is guaranteed to return zero if the dataset contains only exactly one finite value. It * is not guaranteed to return zero when the dataset consists of the same value multiple times, * due to numerical errors. However, it is guaranteed never to return a negative result. * *

Non-finite values

* *

If the dataset contains any non-finite values ({@link Double#POSITIVE_INFINITY}, * {@link Double#NEGATIVE_INFINITY}, or {@link Double#NaN}) then the result is {@link Double#NaN}. * * @throws IllegalStateException if the dataset is empty */ public final double populationStandardDeviation() { return Math.sqrt(populationVariance()); } /** * Returns the unbiased sample * variance of the values. If this dataset is a sample drawn from a population, this is an * unbiased estimator of the population variance of the population. The count must be greater than * one. * *

This is not guaranteed to return zero when the dataset consists of the same value multiple * times, due to numerical errors. However, it is guaranteed never to return a negative result. * *

Non-finite values

* *

If the dataset contains any non-finite values ({@link Double#POSITIVE_INFINITY}, * {@link Double#NEGATIVE_INFINITY}, or {@link Double#NaN}) then the result is {@link Double#NaN}. * * @throws IllegalStateException if the dataset is empty or contains a single value */ public final double sampleVariance() { checkState(count > 1); if (isNaN(sumOfSquaresOfDeltas)) { return NaN; } return ensureNonNegative(sumOfSquaresOfDeltas) / (count - 1); } /** * Returns the * corrected sample standard deviation of the values. If this dataset is a sample drawn from a * population, this is an estimator of the population standard deviation of the population which * is less biased than {@link #populationStandardDeviation()} (the unbiased estimator depends on * the distribution). The count must be greater than one. * *

This is not guaranteed to return zero when the dataset consists of the same value multiple * times, due to numerical errors. However, it is guaranteed never to return a negative result. * *

Non-finite values

* *

If the dataset contains any non-finite values ({@link Double#POSITIVE_INFINITY}, * {@link Double#NEGATIVE_INFINITY}, or {@link Double#NaN}) then the result is {@link Double#NaN}. * * @throws IllegalStateException if the dataset is empty or contains a single value */ public final double sampleStandardDeviation() { return Math.sqrt(sampleVariance()); } /** * Returns the lowest value in the dataset. The count must be non-zero. * *

Non-finite values

* *

If the dataset contains {@link Double#NaN} then the result is {@link Double#NaN}. If it * contains {@link Double#NEGATIVE_INFINITY} and not {@link Double#NaN} then the result is * {@link Double#NEGATIVE_INFINITY}. If it contains {@link Double#POSITIVE_INFINITY} and finite * values only then the result is the lowest finite value. If it contains * {@link Double#POSITIVE_INFINITY} only then the result is {@link Double#POSITIVE_INFINITY}. * * @throws IllegalStateException if the dataset is empty */ public double min() { checkState(count != 0); return min; } /** * Returns the highest value in the dataset. The count must be non-zero. * *

Non-finite values

* *

If the dataset contains {@link Double#NaN} then the result is {@link Double#NaN}. If it * contains {@link Double#POSITIVE_INFINITY} and not {@link Double#NaN} then the result is * {@link Double#POSITIVE_INFINITY}. If it contains {@link Double#NEGATIVE_INFINITY} and finite * values only then the result is the highest finite value. If it contains * {@link Double#NEGATIVE_INFINITY} only then the result is {@link Double#NEGATIVE_INFINITY}. * * @throws IllegalStateException if the dataset is empty */ public double max() { checkState(count != 0); return max; } double sumOfSquaresOfDeltas() { return sumOfSquaresOfDeltas; } /** * Calculates the new value for the accumulated mean when a value is added, in the case where at * least one of the previous mean and the value is non-finite. */ static double calculateNewMeanNonFinite(final double previousMean, final double value) { /* * Desired behaviour is to match the results of applying the naive mean formula. In particular, * the update formula can subtract infinities in cases where the naive formula would add them. * * Consequently: 1. If the previous mean is finite and the new value is non-finite then the new * mean is that value (whether it is NaN or infinity). 2. If the new value is finite and the * previous mean is non-finite then the mean is unchanged (whether it is NaN or infinity). 3. If * both the previous mean and the new value are non-finite and... 3a. ...either or both is NaN * (so mean != value) then the new mean is NaN. 3b. ...they are both the same infinities (so * mean == value) then the mean is unchanged. 3c. ...they are different infinities (so mean != * value) then the new mean is NaN. */ if (isFinite(previousMean)) { // This is case 1. return value; } else if (isFinite(value) || (previousMean == value)) { // This is case 2. or 3b. return previousMean; } else { // This is case 3a. or 3c. return NaN; } } /** Returns its argument if it is non-negative, zero if it is negative. */ static double ensureNonNegative(final double value) { checkArgument(!isNaN(value)); return Math.max(value, 0.0); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/index/DifferingVisibilityCountStatistic.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.index; import java.nio.ByteBuffer; import java.util.HashSet; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.IndexStatistic; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.statistics.StatisticsDeleteCallback; import org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback; /** * Counts the number of entries with differing visibilities. */ public class DifferingVisibilityCountStatistic extends IndexStatistic { public static final IndexStatisticType STATS_TYPE = new IndexStatisticType<>("DIFFERING_VISIBILITY_COUNT"); public DifferingVisibilityCountStatistic() { super(STATS_TYPE); } public DifferingVisibilityCountStatistic(final String indexName) { super(STATS_TYPE, indexName); } @Override public String getDescription() { return "Counts the number of differing visibilities in the index."; } @Override public DifferingVisibilityCountValue createEmpty() { return new DifferingVisibilityCountValue(this); } public static class DifferingVisibilityCountValue extends StatisticValue implements StatisticsIngestCallback, StatisticsDeleteCallback { private long entriesWithDifferingFieldVisibilities = 0; public DifferingVisibilityCountValue() { this(null); } public DifferingVisibilityCountValue(Statistic statistic) { super(statistic); } public boolean isAnyEntryDifferingFieldVisiblity() { return entriesWithDifferingFieldVisibilities > 0; } @Override public void merge(Mergeable merge) { if ((merge != null) && (merge instanceof DifferingVisibilityCountValue)) { entriesWithDifferingFieldVisibilities += ((DifferingVisibilityCountValue) merge).entriesWithDifferingFieldVisibilities; } } /** This is expensive, but necessary since there may be duplicates */ // TODO entryDeleted should only be called once with all duplicates private transient HashSet ids = new HashSet<>(); @Override public void entryIngested(DataTypeAdapter adapter, T entry, GeoWaveRow... rows) { for (final GeoWaveRow kv : rows) { if (entryHasDifferentVisibilities(kv)) { if (ids.add(new ByteArray(rows[0].getDataId()))) { entriesWithDifferingFieldVisibilities++; } } } } @Override public void entryDeleted( DataTypeAdapter adapter, final T entry, final GeoWaveRow... kvs) { for (final GeoWaveRow kv : kvs) { if (entryHasDifferentVisibilities(kv)) { entriesWithDifferingFieldVisibilities--; } } } @Override public Long getValue() { return entriesWithDifferingFieldVisibilities; } @Override public byte[] toBinary() { return VarintUtils.writeUnsignedLong(entriesWithDifferingFieldVisibilities); } @Override public void fromBinary(byte[] bytes) { entriesWithDifferingFieldVisibilities = VarintUtils.readUnsignedLong(ByteBuffer.wrap(bytes)); } } private static boolean entryHasDifferentVisibilities(final GeoWaveRow geowaveRow) { if ((geowaveRow.getFieldValues() != null) && (geowaveRow.getFieldValues().length > 1)) { // if there is 0 or 1 field, there won't be differing visibilities return true; } return false; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/index/DuplicateEntryCountStatistic.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.index; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.IndexStatistic; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.statistics.StatisticsDeleteCallback; import org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback; /** * Counts the number of entries with duplicates in the index. */ public class DuplicateEntryCountStatistic extends IndexStatistic { public static final IndexStatisticType STATS_TYPE = new IndexStatisticType<>("DUPLICATE_ENTRY_COUNT"); public DuplicateEntryCountStatistic() { super(STATS_TYPE); } public DuplicateEntryCountStatistic(final String indexName) { super(STATS_TYPE, indexName); } @Override public DuplicateEntryCountValue createEmpty() { return new DuplicateEntryCountValue(this); } @Override public String getDescription() { return "Counts the number of entries with duplicates in the index."; } public static class DuplicateEntryCountValue extends StatisticValue implements StatisticsIngestCallback, StatisticsDeleteCallback { private long entriesWithDuplicates = 0L; public DuplicateEntryCountValue() { this(null); } public DuplicateEntryCountValue(final Statistic statistic) { super(statistic); } public boolean isAnyEntryHaveDuplicates() { return entriesWithDuplicates > 0; } @Override public Long getValue() { return entriesWithDuplicates; } @Override public void merge(Mergeable merge) { if ((merge != null) && (merge instanceof DuplicateEntryCountValue)) { entriesWithDuplicates += ((DuplicateEntryCountValue) merge).getValue(); } } @Override public byte[] toBinary() { return VarintUtils.writeSignedLong(entriesWithDuplicates); } @Override public void fromBinary(byte[] bytes) { entriesWithDuplicates = VarintUtils.readSignedLong(ByteBuffer.wrap(bytes)); } @Override public void entryDeleted(DataTypeAdapter adapter, T entry, GeoWaveRow... rows) { if (rows.length > 0) { if (entryHasDuplicates(rows[0])) { entriesWithDuplicates--; } } } @Override public void entryIngested(DataTypeAdapter adapter, T entry, GeoWaveRow... rows) { if (rows.length > 0) { if (entryHasDuplicates(rows[0])) { entriesWithDuplicates++; } } } private static boolean entryHasDuplicates(final GeoWaveRow kv) { return kv.getNumberOfDuplicates() > 0; } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/index/FieldVisibilityCountStatistic.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.index; import java.nio.ByteBuffer; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.IndexStatistic; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.data.visibility.VisibilityExpression; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.entities.GeoWaveValue; import org.locationtech.geowave.core.store.statistics.StatisticsDeleteCallback; import org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback; import com.google.common.collect.Maps; import com.google.common.collect.Sets; /** * Maintains a count of entries for every visibility. */ public class FieldVisibilityCountStatistic extends IndexStatistic { public static final IndexStatisticType STATS_TYPE = new IndexStatisticType<>("FIELD_VISIBILITY_COUNT"); public FieldVisibilityCountStatistic() { super(STATS_TYPE); } public FieldVisibilityCountStatistic(final String indexName) { super(STATS_TYPE, indexName); } @Override public String getDescription() { return "Counts the number of entries for each field visibility."; } @Override public FieldVisibilityCountValue createEmpty() { return new FieldVisibilityCountValue(this); } public static class FieldVisibilityCountValue extends StatisticValue> implements StatisticsIngestCallback, StatisticsDeleteCallback { private final Map countsPerVisibility = Maps.newHashMap(); public FieldVisibilityCountValue() { this(null); } public FieldVisibilityCountValue(final Statistic statistic) { super(statistic); } public boolean isAuthorizationsLimiting(final String... authorizations) { final Set set = Sets.newHashSet(authorizations); for (final Entry vis : countsPerVisibility.entrySet()) { if ((vis.getValue() > 0) && (vis.getKey() != null) && (vis.getKey().getBytes().length > 0) && !VisibilityExpression.evaluate(vis.getKey().getString(), set)) { return true; } } return false; } @Override public void merge(Mergeable merge) { if ((merge != null) && (merge instanceof FieldVisibilityCountValue)) { final Map otherCounts = ((FieldVisibilityCountValue) merge).countsPerVisibility; for (final Entry entry : otherCounts.entrySet()) { Long count = countsPerVisibility.get(entry.getKey()); if (count == null) { count = 0L; } countsPerVisibility.put(entry.getKey(), count + entry.getValue()); } } } @Override public void entryDeleted(DataTypeAdapter adapter, T entry, GeoWaveRow... rows) { updateEntry(-1, rows); } @Override public void entryIngested(DataTypeAdapter adapter, T entry, GeoWaveRow... rows) { updateEntry(1, rows); } private void updateEntry(final int incrementValue, final GeoWaveRow... kvs) { for (final GeoWaveRow row : kvs) { final GeoWaveValue[] values = row.getFieldValues(); for (final GeoWaveValue v : values) { ByteArray visibility = new ByteArray(new byte[] {}); if (v.getVisibility() != null) { visibility = new ByteArray(v.getVisibility()); } Long count = countsPerVisibility.get(visibility); if (count == null) { count = 0L; } countsPerVisibility.put(visibility, count + incrementValue); } } } @Override public Map getValue() { return countsPerVisibility; } @Override public byte[] toBinary() { int bufferSize = 0; int serializedCounts = 0; for (final Entry entry : countsPerVisibility.entrySet()) { if (entry.getValue() != 0) { bufferSize += VarintUtils.unsignedIntByteLength(entry.getKey().getBytes().length); bufferSize += entry.getKey().getBytes().length; bufferSize += VarintUtils.unsignedLongByteLength(entry.getValue()); serializedCounts++; } } bufferSize += VarintUtils.unsignedIntByteLength(serializedCounts); final ByteBuffer buf = ByteBuffer.allocate(bufferSize); VarintUtils.writeUnsignedInt(serializedCounts, buf); for (final Entry entry : countsPerVisibility.entrySet()) { if (entry.getValue() != 0) { VarintUtils.writeUnsignedInt(entry.getKey().getBytes().length, buf); buf.put(entry.getKey().getBytes()); VarintUtils.writeUnsignedLong(entry.getValue(), buf); } } return buf.array(); } @Override public void fromBinary(byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int size = VarintUtils.readUnsignedInt(buf); ByteArrayUtils.verifyBufferSize(buf, size); countsPerVisibility.clear(); for (int i = 0; i < size; i++) { final int idCount = VarintUtils.readUnsignedInt(buf); final byte[] id = ByteArrayUtils.safeRead(buf, idCount); final long count = VarintUtils.readUnsignedLong(buf); if (count != 0) { countsPerVisibility.put(new ByteArray(id), count); } } } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/index/IndexMetaDataSetStatistic.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.index; import java.nio.ByteBuffer; import java.util.List; import org.locationtech.geowave.core.index.IndexMetaData; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.IndexStatistic; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.statistics.StatisticsDeleteCallback; import org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback; import org.locationtech.geowave.core.store.util.DataStoreUtils; import com.clearspring.analytics.util.Lists; /** * Maintains metadata about an index. The tracked metadata is provided by the index strategy. */ public class IndexMetaDataSetStatistic extends IndexStatistic { public static final IndexStatisticType STATS_TYPE = new IndexStatisticType<>("INDEX_METADATA"); private byte[] metadata = null; public IndexMetaDataSetStatistic() { this(null, Lists.newArrayList()); } public IndexMetaDataSetStatistic(final String indexName) { this(indexName, Lists.newArrayList()); } public IndexMetaDataSetStatistic(final String indexName, List baseMetadata) { super(STATS_TYPE, indexName); this.metadata = PersistenceUtils.toBinary(baseMetadata); } @Override public String getDescription() { return "Maintains metadata about an index."; } @Override public IndexMetaDataSetValue createEmpty() { IndexMetaDataSetValue value = new IndexMetaDataSetValue(this); value.fromBinary(metadata); return value; } @Override protected int byteLength() { return super.byteLength() + metadata.length + VarintUtils.unsignedIntByteLength(metadata.length); } @Override protected void writeBytes(final ByteBuffer buffer) { super.writeBytes(buffer); VarintUtils.writeUnsignedInt(metadata.length, buffer); buffer.put(metadata); } @Override protected void readBytes(final ByteBuffer buffer) { super.readBytes(buffer); metadata = new byte[VarintUtils.readUnsignedInt(buffer)]; buffer.get(metadata); } public static class IndexMetaDataSetValue extends StatisticValue> implements StatisticsIngestCallback, StatisticsDeleteCallback { private List metadata; public IndexMetaDataSetValue() { this(null); } public IndexMetaDataSetValue(Statistic statistic) { super(statistic); } public IndexMetaData[] toArray() { return metadata.toArray(new IndexMetaData[metadata.size()]); } @Override public void merge(Mergeable merge) { if ((merge != null) && (merge instanceof IndexMetaDataSetValue)) { for (int i = 0; i < metadata.size(); i++) { final IndexMetaData imd = metadata.get(i); final IndexMetaData imd2 = ((IndexMetaDataSetValue) merge).metadata.get(i); imd.merge(imd2); } } } @Override public void entryDeleted(DataTypeAdapter adapter, T entry, GeoWaveRow... rows) { if (!this.metadata.isEmpty()) { final InsertionIds insertionIds = DataStoreUtils.keysToInsertionIds(rows); for (final IndexMetaData imd : this.metadata) { imd.insertionIdsRemoved(insertionIds); } } } @Override public void entryIngested(DataTypeAdapter adapter, T entry, GeoWaveRow... rows) { if (!this.metadata.isEmpty()) { final InsertionIds insertionIds = DataStoreUtils.keysToInsertionIds(rows); for (final IndexMetaData imd : this.metadata) { imd.insertionIdsAdded(insertionIds); } } } @Override public List getValue() { return metadata; } @Override public byte[] toBinary() { return PersistenceUtils.toBinary(metadata); } @SuppressWarnings({"unchecked", "rawtypes"}) @Override public void fromBinary(byte[] bytes) { metadata = (List) PersistenceUtils.fromBinaryAsList(bytes); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/index/IndexStatisticType.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.index; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.statistics.StatisticType; /** * Statistic type for index statistics. Generally used for type checking. */ public class IndexStatisticType> extends StatisticType { private static final long serialVersionUID = 1L; public IndexStatisticType(final String id) { super(id); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/index/MaxDuplicatesStatistic.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.index; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.IndexStatistic; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback; /** * Maintains the maximum number of duplicates that a single entry in the data set contains. */ public class MaxDuplicatesStatistic extends IndexStatistic { public static final IndexStatisticType STATS_TYPE = new IndexStatisticType<>("MAX_DUPLICATES"); public MaxDuplicatesStatistic() { super(STATS_TYPE); } public MaxDuplicatesStatistic(final String indexName) { super(STATS_TYPE, indexName); } @Override public String getDescription() { return "Maintains the maximum number of duplicates for an entry in the data set."; } @Override public MaxDuplicatesValue createEmpty() { return new MaxDuplicatesValue(this); } public static class MaxDuplicatesValue extends StatisticValue implements StatisticsIngestCallback { public MaxDuplicatesValue() { this(null); } public MaxDuplicatesValue(Statistic statistic) { super(statistic); } private int maxDuplicates = 0; public int getEntriesWithDifferingFieldVisibilities() { return maxDuplicates; } @Override public void merge(Mergeable merge) { if (merge != null && merge instanceof MaxDuplicatesValue) { maxDuplicates = Math.max(maxDuplicates, ((MaxDuplicatesValue) merge).getValue()); } } @Override public void entryIngested(DataTypeAdapter adapter, T entry, GeoWaveRow... rows) { for (final GeoWaveRow kv : rows) { maxDuplicates = Math.max(maxDuplicates, kv.getNumberOfDuplicates()); } } @Override public Integer getValue() { return maxDuplicates; } @Override public byte[] toBinary() { return VarintUtils.writeUnsignedInt(maxDuplicates); } @Override public void fromBinary(byte[] bytes) { maxDuplicates = VarintUtils.readUnsignedInt(ByteBuffer.wrap(bytes)); } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/index/PartitionsStatistic.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.index; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.HashSet; import java.util.Set; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.IndexStatistic; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback; /** * This class is responsible for maintaining all unique Partition IDs that are being used within a * data set. */ public class PartitionsStatistic extends IndexStatistic { public static final IndexStatisticType STATS_TYPE = new IndexStatisticType<>("PARTITIONS"); public PartitionsStatistic() { super(STATS_TYPE); } public PartitionsStatistic(final String indexName) { super(STATS_TYPE, indexName); } @Override public String getDescription() { return "Maintains a set of all unique partition IDs."; } @Override public PartitionsValue createEmpty() { return new PartitionsValue(this); } public static class PartitionsValue extends StatisticValue> implements StatisticsIngestCallback { private Set partitions = new HashSet<>(); public PartitionsValue() { this(null); } public PartitionsValue(Statistic statistic) { super(statistic); } @Override public void merge(Mergeable merge) { if (merge instanceof PartitionsValue) { partitions.addAll(((PartitionsValue) merge).partitions); } } @Override public void entryIngested(DataTypeAdapter adapter, T entry, GeoWaveRow... rows) { for (final GeoWaveRow kv : rows) { partitions.add(getPartitionKey(kv.getPartitionKey())); } } @Override public Set getValue() { return partitions; } @Override public byte[] toBinary() { if (!partitions.isEmpty()) { // we know each partition is constant size, so start with the size // of the partition keys final ByteArray first = partitions.iterator().next(); if ((first != null) && (first.getBytes() != null)) { final ByteBuffer buffer = ByteBuffer.allocate((first.getBytes().length * partitions.size()) + 1); buffer.put((byte) first.getBytes().length); for (final ByteArray e : partitions) { buffer.put(e.getBytes()); } return buffer.array(); } } return new byte[0]; } @Override public void fromBinary(byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); partitions = new HashSet<>(); if (buffer.remaining() > 0) { final int partitionKeySize = unsignedToBytes(buffer.get()); if (partitionKeySize > 0) { final int numPartitions = buffer.remaining() / partitionKeySize; for (int i = 0; i < numPartitions; i++) { final byte[] partition = ByteArrayUtils.safeRead(buffer, partitionKeySize); partitions.add(new ByteArray(partition)); } } } } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("["); for (ByteArray partition : partitions) { sb.append(Arrays.toString(partition.getBytes())).append(","); } if (partitions.size() > 0) { // Remove last comma sb.deleteCharAt(sb.length() - 1); } sb.append("]"); return sb.toString(); } } protected static ByteArray getPartitionKey(final byte[] partitionBytes) { return ((partitionBytes == null) || (partitionBytes.length == 0)) ? null : new ByteArray(partitionBytes); } public static int unsignedToBytes(final byte b) { return b & 0xFF; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/index/RowRangeHistogramStatistic.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.index; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.store.adapter.statistics.histogram.ByteUtils; import org.locationtech.geowave.core.store.adapter.statistics.histogram.NumericHistogram; import org.locationtech.geowave.core.store.adapter.statistics.histogram.TDigestNumericHistogram; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.IndexStatistic; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback; /** * Dynamic histogram provide very high accuracy for CDF and quantiles over the a numeric attribute. */ public class RowRangeHistogramStatistic extends IndexStatistic { public static final IndexStatisticType STATS_TYPE = new IndexStatisticType<>("ROW_RANGE_HISTOGRAM"); public RowRangeHistogramStatistic() { super(STATS_TYPE); } public RowRangeHistogramStatistic(final String indexName) { super(STATS_TYPE, indexName); } @Override public String getDescription() { return "Provides a histogram of row ranges."; } @Override public RowRangeHistogramValue createEmpty() { return new RowRangeHistogramValue(this); } public static class RowRangeHistogramValue extends StatisticValue implements StatisticsIngestCallback { private NumericHistogram histogram; public RowRangeHistogramValue() { this(null); } public RowRangeHistogramValue(final Statistic statistic) { super(statistic); histogram = createHistogram(); } public double cardinality(final byte[] start, final byte[] end) { final double startSum = start == null ? 0 : histogram.sum(ByteUtils.toDouble(start), true);; final double endSum = end == null ? histogram.getTotalCount() : histogram.sum(ByteUtils.toDoubleAsNextPrefix(end), true); return endSum - startSum; } public double[] quantile(final int bins) { final double[] result = new double[bins]; final double binSize = 1.0 / bins; for (int bin = 0; bin < bins; bin++) { result[bin] = quantile(binSize * (bin + 1)); } return result; } public double cdf(final byte[] id) { return histogram.cdf(ByteUtils.toDouble(id)); } public double quantile(final double percentage) { return histogram.quantile((percentage)); } public double percentPopulationOverRange(final byte[] start, final byte[] stop) { return cdf(stop) - cdf(start); } public long getTotalCount() { return histogram.getTotalCount(); } @Override public void merge(Mergeable merge) { if (merge instanceof RowRangeHistogramValue) { final NumericHistogram otherHistogram = ((RowRangeHistogramValue) merge).histogram; if (histogram == null) { histogram = otherHistogram; } else if (otherHistogram != null) { histogram.merge(otherHistogram); } } } @Override public void entryIngested(DataTypeAdapter adapter, T entry, GeoWaveRow... rows) { for (final GeoWaveRow kv : rows) { final byte[] idBytes = kv.getSortKey(); histogram.add(ByteUtils.toDouble(idBytes)); } } @Override public NumericHistogram getValue() { return histogram; } @Override public byte[] toBinary() { final ByteBuffer buffer = ByteBuffer.allocate(histogram.bufferSize()); histogram.toBinary(buffer); return buffer.array(); } @Override public void fromBinary(byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); if (buffer.hasRemaining()) { histogram.fromBinary(buffer); } } } private static NumericHistogram createHistogram() { return new TDigestNumericHistogram(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/query/AbstractStatisticQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.query; import org.locationtech.geowave.core.store.api.BinConstraints; import org.locationtech.geowave.core.store.api.StatisticQuery; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.statistics.StatisticType; /** * Base statistic query implementation. */ public abstract class AbstractStatisticQuery, R> implements StatisticQuery { private final StatisticType statisticType; private final String tag; private final BinConstraints binConstraints; private final String[] authorizations; public AbstractStatisticQuery( final StatisticType statisticType, final String tag, final BinConstraints binConstraints, final String[] authorizations) { this.statisticType = statisticType; this.tag = tag; this.binConstraints = binConstraints; this.authorizations = authorizations; } @Override public StatisticType statisticType() { return statisticType; } @Override public String tag() { return tag; } @Override public BinConstraints binConstraints() { return binConstraints; } @Override public String[] authorizations() { return authorizations; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/query/AbstractStatisticQueryBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.query; import java.util.Arrays; import java.util.List; import org.locationtech.geowave.core.store.api.BinConstraints; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticQueryBuilder; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.statistics.StatisticType; import com.clearspring.analytics.util.Lists; /** * Base statistic query builder implementation. */ @SuppressWarnings("unchecked") public abstract class AbstractStatisticQueryBuilder, R, B extends StatisticQueryBuilder> implements StatisticQueryBuilder { protected final StatisticType statisticType; protected String tag = null; protected BinConstraints binConstraints = null; protected List authorizations = Lists.newArrayList(); public AbstractStatisticQueryBuilder(final StatisticType statisticType) { this.statisticType = statisticType; } @Override public B binConstraints(final BinConstraints binConstraints) { this.binConstraints = binConstraints; return (B) this; } @Override public B tag(final String tag) { this.tag = tag; return (B) this; } @Override public B internal() { this.tag = Statistic.INTERNAL_TAG; return (B) this; } @Override public B addAuthorization(final String authorization) { authorizations.add(authorization); return (B) this; } @Override public B authorizations(final String[] authorizations) { if (authorizations != null) { this.authorizations = Arrays.asList(authorizations); } else { this.authorizations.clear(); } return (B) this; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/query/BinConstraintsImpl.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.query; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.ByteArrayRange; import org.locationtech.geowave.core.store.api.BinConstraints; import org.locationtech.geowave.core.store.api.Statistic; /** * The basic implementations for BinConstraints */ public class BinConstraintsImpl implements BinConstraints { private ByteArrayConstraints constraints; private Object object; public BinConstraintsImpl() { super(); constraints = new ExplicitConstraints(); } public BinConstraintsImpl(final boolean allBins) { super(); constraints = new ExplicitConstraints(allBins); } public BinConstraintsImpl(final ByteArray[] bins, final boolean isPrefix) { super(); constraints = new ExplicitConstraints(bins, isPrefix); } public BinConstraintsImpl(final ByteArrayRange[] binRanges) { super(); constraints = new ExplicitConstraints(binRanges); } public BinConstraintsImpl(final Object object) { super(); this.object = object; } @Override public ByteArrayConstraints constraints(final Statistic stat) { if (constraints != null) { return constraints; } else if ((stat != null) && (stat.getBinningStrategy() != null) && (object != null)) { constraints = stat.getBinningStrategy().constraints(object); } else { constraints = new ExplicitConstraints(); } return constraints; } public static class ExplicitConstraints implements ByteArrayConstraints { private final ByteArray[] bins; private final ByteArrayRange[] binRanges; private final boolean isPrefix; private final boolean isAllBins; public ExplicitConstraints() { // empty constraints this(false); } public ExplicitConstraints(final boolean allBins) { // empty constraints this(new ByteArray[0], false, allBins); } public ExplicitConstraints(final ByteArray[] bins) { this(bins, false); } public ExplicitConstraints(final ByteArrayRange[] binRanges) { this(new ByteArray[0], binRanges, false, false); } public ExplicitConstraints(final ByteArray[] bins, final boolean isPrefix) { this(bins, isPrefix, false); } public ExplicitConstraints( final ByteArray[] bins, final boolean isPrefix, final boolean isAllBins) { this(bins, new ByteArrayRange[0], isPrefix, isAllBins); } public ExplicitConstraints( final ByteArray[] bins, final ByteArrayRange[] binRanges, final boolean isPrefix, final boolean isAllBins) { this.bins = bins; this.binRanges = binRanges; this.isPrefix = isPrefix; this.isAllBins = isAllBins; } @Override public ByteArray[] getBins() { return bins; } @Override public boolean isPrefix() { return isPrefix; } @Override public boolean isAllBins() { return isAllBins; } @Override public ByteArrayRange[] getBinRanges() { return binRanges; } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/query/DataTypeStatisticQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.query; import org.locationtech.geowave.core.store.api.BinConstraints; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.statistics.StatisticType; /** * Statistic query implementation for data type statistics. */ public class DataTypeStatisticQuery, R> extends AbstractStatisticQuery { private final String typeName; public DataTypeStatisticQuery( final StatisticType statisticType, final String typeName, final String tag, final BinConstraints binConstraints, final String[] authorizations) { super(statisticType, tag, binConstraints, authorizations); this.typeName = typeName; } public String typeName() { return typeName; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/query/DataTypeStatisticQueryBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.query; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.statistics.adapter.DataTypeStatisticType; /** * Statistic query builder implementation for data type statistics. */ public class DataTypeStatisticQueryBuilder, R> extends AbstractStatisticQueryBuilder> { protected String typeName = null; public DataTypeStatisticQueryBuilder(final DataTypeStatisticType type) { super(type); } public DataTypeStatisticQueryBuilder typeName(final String typeName) { this.typeName = typeName; return this; } @Override public AbstractStatisticQuery build() { final String[] authorizationsArray = authorizations.toArray(new String[authorizations.size()]); return new DataTypeStatisticQuery<>( statisticType, typeName, tag, binConstraints, authorizationsArray); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/query/FieldStatisticQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.query; import org.locationtech.geowave.core.store.api.BinConstraints; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.statistics.StatisticType; /** * Statistic query implementation for field statistics. */ public class FieldStatisticQuery, R> extends AbstractStatisticQuery { private final String typeName; private final String fieldName; public FieldStatisticQuery( final StatisticType statisticType, final String typeName, final String fieldName, final String tag, final BinConstraints binConstraints, final String[] authorizations) { super(statisticType, tag, binConstraints, authorizations); this.typeName = typeName; this.fieldName = fieldName; } public String typeName() { return typeName; } public String fieldName() { return fieldName; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/query/FieldStatisticQueryBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.query; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.statistics.field.FieldStatisticType; /** * Statistic query builder implementation for field statistics. */ public class FieldStatisticQueryBuilder, R> extends AbstractStatisticQueryBuilder> { protected String typeName = null; protected String fieldName = null; public FieldStatisticQueryBuilder(final FieldStatisticType type) { super(type); } public FieldStatisticQueryBuilder typeName(final String typeName) { this.typeName = typeName; return this; } public FieldStatisticQueryBuilder fieldName(final String fieldName) { this.fieldName = fieldName; return this; } @Override public AbstractStatisticQuery build() { final String[] authorizationsArray = authorizations.toArray(new String[authorizations.size()]); return new FieldStatisticQuery<>( statisticType, typeName, fieldName, tag, binConstraints, authorizationsArray); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/query/IndexStatisticQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.query; import org.locationtech.geowave.core.store.api.BinConstraints; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.statistics.StatisticType; /** * Statistic query implementation for index statistics. */ public class IndexStatisticQuery, R> extends AbstractStatisticQuery { private final String indexName; public IndexStatisticQuery( final StatisticType statisticType, final String indexName, final String tag, final BinConstraints binConstraints, final String[] authorizations) { super(statisticType, tag, binConstraints, authorizations); this.indexName = indexName; } public String indexName() { return indexName; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/query/IndexStatisticQueryBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.query; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.statistics.index.IndexStatisticType; /** * Statistic query builder implementation for index statistics. */ public class IndexStatisticQueryBuilder, R> extends AbstractStatisticQueryBuilder> { protected String indexName = null; public IndexStatisticQueryBuilder(final IndexStatisticType type) { super(type); } public IndexStatisticQueryBuilder indexName(final String indexName) { this.indexName = indexName; return this; } @Override public AbstractStatisticQuery build() { final String[] authorizationsArray = authorizations.toArray(new String[authorizations.size()]); return new IndexStatisticQuery<>( statisticType, indexName, tag, binConstraints, authorizationsArray); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/visibility/DefaultStatisticVisibility.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.visibility; import org.locationtech.geowave.core.store.EntryVisibilityHandler; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.entities.GeoWaveValue; import org.locationtech.geowave.core.store.flatten.BitmaskUtils; /** * This assigns the visibility of the key-value with the most-significant field bitmask (the first * fields in the bitmask are the indexed fields, and all indexed fields should be the default * visibility which should be the minimal set of visibility constraints of any field) */ public class DefaultStatisticVisibility implements EntryVisibilityHandler { @Override public byte[] getVisibility(final T entry, final GeoWaveRow... kvs) { if (kvs.length == 1 && kvs[0].getFieldValues().length == 1) { return kvs[0].getFieldValues()[0].getVisibility(); } int lowestOrdinal = Integer.MAX_VALUE; byte[] lowestOrdinalVisibility = null; for (final GeoWaveRow kv : kvs) { for (final GeoWaveValue v : kv.getFieldValues()) { final int pos = BitmaskUtils.getLowestFieldPosition(v.getFieldMask()); if (pos == 0) { return v.getVisibility(); } if (pos <= lowestOrdinal) { lowestOrdinal = pos; lowestOrdinalVisibility = v.getVisibility(); } } } return lowestOrdinalVisibility; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/statistics/visibility/FieldDependentStatisticVisibility.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.visibility; import java.util.Arrays; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; import java.util.stream.Collectors; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.EntryVisibilityHandler; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.entities.GeoWaveValue; import org.locationtech.geowave.core.store.flatten.BitmaskUtils; import org.locationtech.geowave.core.store.index.CommonIndexModel; import com.google.common.collect.Sets; /** * Supplies visibility for a given field based on the bit position of that field in the index model. */ public class FieldDependentStatisticVisibility implements EntryVisibilityHandler { private final byte[] fieldBitmask; public FieldDependentStatisticVisibility( final CommonIndexModel model, final InternalDataAdapter adapter, final String... fieldNames) { final SortedSet bitPositions = Arrays.stream(fieldNames).map( field -> adapter.getPositionOfOrderedField(model, field)).collect( Collectors.toCollection(TreeSet::new)); this.fieldBitmask = BitmaskUtils.generateCompositeBitmask(bitPositions); } @Override public byte[] getVisibility(final T entry, final GeoWaveRow... kvs) { if ((kvs.length == 1) && (kvs[0].getFieldValues().length == 1)) { return kvs[0].getFieldValues()[0].getVisibility(); } final Set visibilities = Sets.newHashSet(); for (final GeoWaveRow r : kvs) { for (final GeoWaveValue v : r.getFieldValues()) { if ((v.getFieldMask() != null) && (v.getFieldMask().length > 0)) { if (BitmaskUtils.bitmaskOverlaps(v.getFieldMask(), fieldBitmask)) { visibilities.add(StringUtils.stringFromBinary(v.getVisibility())); } } } } if (visibilities.size() == 1) { return StringUtils.stringToBinary(visibilities.iterator().next()); } else if (visibilities.size() > 1) { // This will combine all different visibilities using an AND operator. For example a // visibility of A and B will result in (A)&(B). Each token is wrapped in parentheses to // account for more complex visibility expressions. return StringUtils.stringToBinary( visibilities.stream().map(token -> "(" + token + ")").collect(Collectors.joining("&"))); } return new byte[0]; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/util/AsyncNativeEntryIteratorWrapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.util; import java.util.Iterator; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.base.dataidx.BatchDataIndexRetrieval; import org.locationtech.geowave.core.store.base.dataidx.BatchDataIndexRetrievalIteratorHelper; import org.locationtech.geowave.core.store.callback.ScanCallback; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.query.filter.QueryFilter; public class AsyncNativeEntryIteratorWrapper extends NativeEntryIteratorWrapper { private final BatchDataIndexRetrievalIteratorHelper batchHelper; public AsyncNativeEntryIteratorWrapper( final PersistentAdapterStore adapterStore, final AdapterIndexMappingStore mappingStore, final Index index, final Iterator scannerIt, final QueryFilter[] clientFilters, final ScanCallback scanCallback, final byte[] fieldSubsetBitmask, final double[] maxResolutionSubsamplingPerDimension, final boolean decodePersistenceEncoding, final BatchDataIndexRetrieval dataIndexRetrieval) { super( adapterStore, mappingStore, index, scannerIt, clientFilters, scanCallback, fieldSubsetBitmask, maxResolutionSubsamplingPerDimension, decodePersistenceEncoding, dataIndexRetrieval); batchHelper = new BatchDataIndexRetrievalIteratorHelper<>(dataIndexRetrieval); } @Override protected T decodeRow( final GeoWaveRow row, final QueryFilter[] clientFilters, final Index index) { final T retVal = super.decodeRow(row, clientFilters, index); return batchHelper.postDecodeRow(retVal); } @Override public boolean hasNext() { batchHelper.preHasNext(); return super.hasNext(); } @Override protected void findNext() { super.findNext(); final boolean hasNextValue = (nextValue != null); final T batchNextValue = batchHelper.postFindNext(hasNextValue, hasNextScannedResult()); if (!hasNextValue) { nextValue = batchNextValue; } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/util/ClasspathUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.util; import java.io.File; import java.io.FileFilter; import java.io.FileOutputStream; import java.io.IOException; import java.net.URISyntaxException; import java.net.URL; import java.net.URLClassLoader; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.jar.Attributes; import java.util.jar.JarOutputStream; import java.util.jar.Manifest; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.impl.VFSClassLoader; import org.locationtech.geowave.core.index.SPIServiceRegistry; import org.locationtech.geowave.core.store.spi.ClassLoaderTransformerSpi; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ClasspathUtils { private static final Logger LOGGER = LoggerFactory.getLogger(ClasspathUtils.class); private static List transformerList = null; public static String setupPathingJarClassPath( final File dir, final Class context, final URL... additionalClasspathUrls) throws IOException { return setupPathingJarClassPath( new File(dir.getParentFile().getAbsolutePath() + File.separator + "pathing", "pathing.jar"), null, context, additionalClasspathUrls); } public static String setupPathingJarClassPath( final File jarFile, final String mainClass, final Class context, final URL... additionalClasspathUrls) throws IOException { final File jarDir = jarFile.getParentFile(); final String classpath = getClasspath(context, additionalClasspathUrls); if (!jarDir.exists()) { try { jarDir.mkdirs(); } catch (final Exception e) { LOGGER.error("Failed to create pathing jar directory: " + e); return null; } } if (jarFile.exists()) { try { jarFile.delete(); } catch (final Exception e) { LOGGER.error("Failed to delete old pathing jar: " + e); return null; } } // build jar final Manifest manifest = new Manifest(); manifest.getMainAttributes().put(Attributes.Name.MANIFEST_VERSION, "1.0"); manifest.getMainAttributes().put(Attributes.Name.CLASS_PATH, classpath); if (mainClass != null) { manifest.getMainAttributes().put(Attributes.Name.MAIN_CLASS, mainClass); } // HP Fortify "Improper Resource Shutdown or Release" false positive // target is inside try-as-resource clause (and is auto-closeable) and // the FileOutputStream // is closed implicitly by target.close() try (final JarOutputStream target = new JarOutputStream(new FileOutputStream(jarFile), manifest)) { target.close(); } return jarFile.getAbsolutePath(); } private static String getClasspath(final Class context, final URL... additionalUrls) throws IOException { try { final ArrayList classloaders = new ArrayList<>(); ClassLoader cl = context.getClassLoader(); while (cl != null) { classloaders.add(cl); cl = cl.getParent(); } Collections.reverse(classloaders); final StringBuilder classpathBuilder = new StringBuilder(); for (final URL u : additionalUrls) { append(classpathBuilder, u); } // assume 0 is the system classloader and skip it for (int i = 0; i < classloaders.size(); i++) { final ClassLoader classLoader = classloaders.get(i); if (classLoader instanceof URLClassLoader) { for (final URL u : ((URLClassLoader) classLoader).getURLs()) { append(classpathBuilder, u); } } else if (classLoader instanceof VFSClassLoader) { final VFSClassLoader vcl = (VFSClassLoader) classLoader; for (final FileObject f : vcl.getFileObjects()) { append(classpathBuilder, f.getURL()); } } else { throw new IllegalArgumentException( "Unknown classloader type : " + classLoader.getClass().getName()); } } classpathBuilder.deleteCharAt(0); return classpathBuilder.toString(); } catch (final URISyntaxException e) { throw new IOException(e); } } private static void append(final StringBuilder classpathBuilder, final URL url) throws URISyntaxException { final File file = new File(url.toURI()); // do not include dirs containing hadoop or accumulo site files if (!containsSiteFile(file)) { final int index = file.getAbsolutePath().indexOf(":\\"); String windowsFriendlyPath; if (index > 0) { windowsFriendlyPath = "file:/" + file.getAbsolutePath().substring(0, index) + ":/" + file.getAbsolutePath().substring(index + 2); } else { windowsFriendlyPath = file.getAbsolutePath(); } classpathBuilder.append(" ").append(windowsFriendlyPath.replace("\\", "/")); if (file.isDirectory()) { classpathBuilder.append("/"); } } } private static boolean containsSiteFile(final File f) { if (f.isDirectory()) { final File[] sitefile = f.listFiles(new FileFilter() { @Override public boolean accept(final File pathname) { return pathname.getName().endsWith("site.xml"); } }); return (sitefile != null) && (sitefile.length > 0); } return false; } public static synchronized ClassLoader transformClassLoader(final ClassLoader classLoader) { if (transformerList == null) { final Iterator transformers = new SPIServiceRegistry(ClassLoaderTransformerSpi.class).load( ClassLoaderTransformerSpi.class); transformerList = new ArrayList<>(); while (transformers.hasNext()) { transformerList.add(transformers.next()); } } for (final ClassLoaderTransformerSpi t : transformerList) { final ClassLoader cl = t.transform(classLoader); if (cl != null) { return cl; } } return null; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/util/CompoundHierarchicalIndexStrategyWrapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.util; import java.util.ArrayList; import java.util.List; import org.locationtech.geowave.core.index.CompoundIndexStrategy; import org.locationtech.geowave.core.index.HierarchicalNumericIndexStrategy; import org.locationtech.geowave.core.index.IndexMetaData; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.MultiDimensionalCoordinateRanges; import org.locationtech.geowave.core.index.MultiDimensionalCoordinates; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.index.PartitionIndexStrategy; import org.locationtech.geowave.core.index.QueryRanges; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.index.simple.RoundRobinKeyIndexStrategy; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class wraps the first occurrence of a hierarchical index within a compound index such that * sub strategies within the hierarchy are replaced maintaining the rest of the structure of the * compound index */ public class CompoundHierarchicalIndexStrategyWrapper implements HierarchicalNumericIndexStrategy { private static final Logger LOGGER = LoggerFactory.getLogger(CompoundHierarchicalIndexStrategyWrapper.class); private List parentStrategies; private HierarchicalNumericIndexStrategy firstHierarchicalStrategy; public CompoundHierarchicalIndexStrategyWrapper( final List parentStrategies, final HierarchicalNumericIndexStrategy firstHierarchicalStrategy) { this.parentStrategies = parentStrategies; this.firstHierarchicalStrategy = firstHierarchicalStrategy; } public CompoundHierarchicalIndexStrategyWrapper() { super(); } @Override public SubStrategy[] getSubStrategies() { // for these substrategies we need to replace the last parent strategy's // hierarchical index strategy with the underlying substrategy index // strategy final SubStrategy[] subStrategies = firstHierarchicalStrategy.getSubStrategies(); final SubStrategy[] retVal = new SubStrategy[subStrategies.length]; for (int i = 0; i < subStrategies.length; i++) { NumericIndexStrategy currentStrategyToBeReplaced = firstHierarchicalStrategy; NumericIndexStrategy currentStrategyReplacement = subStrategies[i].getIndexStrategy(); for (int j = parentStrategies.size() - 1; j >= 0; j--) { // traverse parents in reverse order final CompoundIndexStrategy parent = parentStrategies.get(j); if (parent.getPrimarySubStrategy().equals(currentStrategyToBeReplaced)) { // replace primary currentStrategyReplacement = new CompoundIndexStrategy( currentStrategyReplacement, parent.getSecondarySubStrategy()); } else { // replace secondary currentStrategyReplacement = new CompoundIndexStrategy(parent.getPrimarySubStrategy(), currentStrategyReplacement); } currentStrategyToBeReplaced = parent; } retVal[i] = new SubStrategy(currentStrategyReplacement, subStrategies[i].getPrefix()); } return retVal; } @Override public byte[] toBinary() { return PersistenceUtils.toBinary(parentStrategies.get(0)); } @Override public QueryRanges getQueryRanges( final MultiDimensionalNumericData indexedRange, final IndexMetaData... hints) { return parentStrategies.get(0).getQueryRanges(indexedRange, hints); } @Override public void fromBinary(final byte[] bytes) { final CompoundIndexStrategy rootStrategy = (CompoundIndexStrategy) PersistenceUtils.fromBinary(bytes); parentStrategies = new ArrayList<>(); // discover hierarchy firstHierarchicalStrategy = findHierarchicalStrategy(rootStrategy, parentStrategies); } @Override public QueryRanges getQueryRanges( final MultiDimensionalNumericData indexedRange, final int maxEstimatedRangeDecomposition, final IndexMetaData... hints) { return parentStrategies.get(0).getQueryRanges( indexedRange, maxEstimatedRangeDecomposition, hints); } @Override public NumericDimensionDefinition[] getOrderedDimensionDefinitions() { return parentStrategies.get(0).getOrderedDimensionDefinitions(); } @Override public InsertionIds getInsertionIds(final MultiDimensionalNumericData indexedData) { return parentStrategies.get(0).getInsertionIds(indexedData); } @Override public double[] getHighestPrecisionIdRangePerDimension() { return parentStrategies.get(0).getHighestPrecisionIdRangePerDimension(); } @Override public int getPartitionKeyLength() { return parentStrategies.get(0).getPartitionKeyLength(); } @Override public InsertionIds getInsertionIds( final MultiDimensionalNumericData indexedData, final int maxEstimatedDuplicateIds) { return parentStrategies.get(0).getInsertionIds(indexedData, maxEstimatedDuplicateIds); } @Override public MultiDimensionalNumericData getRangeForId( final byte[] partitionKey, final byte[] sortKey) { return parentStrategies.get(0).getRangeForId(partitionKey, sortKey); } @Override public String getId() { return parentStrategies.get(0).getId(); } @Override public List createMetaData() { return parentStrategies.get(0).createMetaData(); } public static HierarchicalNumericIndexStrategy findHierarchicalStrategy( final NumericIndexStrategy indexStrategy) { final List parentStrategies = new ArrayList<>(); final HierarchicalNumericIndexStrategy firstHierarchicalStrategy = findHierarchicalStrategy(indexStrategy, parentStrategies); if (firstHierarchicalStrategy == null) { return null; } else if (parentStrategies.isEmpty()) { return firstHierarchicalStrategy; } else { return new CompoundHierarchicalIndexStrategyWrapper( parentStrategies, firstHierarchicalStrategy); } } public static HierarchicalNumericIndexStrategy findHierarchicalStrategy( final NumericIndexStrategy indexStrategy, final List parentStrategies) { if (indexStrategy instanceof HierarchicalNumericIndexStrategy) { return (HierarchicalNumericIndexStrategy) indexStrategy; } if (indexStrategy instanceof CompoundIndexStrategy) { final PartitionIndexStrategy primaryIndex = ((CompoundIndexStrategy) indexStrategy).getPrimarySubStrategy(); final NumericIndexStrategy secondaryIndex = ((CompoundIndexStrategy) indexStrategy).getSecondarySubStrategy(); // warn if round robin is used if (primaryIndex instanceof RoundRobinKeyIndexStrategy) { LOGGER.warn("Round Robin partitioning won't work correctly with raster merge strategies"); } else if (secondaryIndex instanceof RoundRobinKeyIndexStrategy) { LOGGER.warn("Round Robin partitioning won't work correctly with raster merge strategies"); } final HierarchicalNumericIndexStrategy secondary = findHierarchicalStrategy(secondaryIndex); if (secondary != null) { // add it to beginning because we are recursing back from the // leaf strategy up to the parent parentStrategies.add(0, (CompoundIndexStrategy) indexStrategy); return secondary; } } return null; } @Override public MultiDimensionalCoordinates getCoordinatesPerDimension( final byte[] partitionKey, final byte[] sortKey) { return parentStrategies.get(0).getCoordinatesPerDimension(partitionKey, sortKey); } @Override public MultiDimensionalCoordinateRanges[] getCoordinateRangesPerDimension( final MultiDimensionalNumericData dataRange, final IndexMetaData... hints) { return parentStrategies.get(0).getCoordinateRangesPerDimension(dataRange, hints); } @Override public byte[][] getInsertionPartitionKeys(final MultiDimensionalNumericData insertionData) { return parentStrategies.get(0).getInsertionPartitionKeys(insertionData); } @Override public byte[][] getQueryPartitionKeys( final MultiDimensionalNumericData queryData, final IndexMetaData... hints) { return parentStrategies.get(0).getQueryPartitionKeys(queryData, hints); } @Override public byte[][] getPredefinedSplits() { return parentStrategies.get(0).getPredefinedSplits(); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/util/DataAdapterAndIndexCache.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.util; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Set; public class DataAdapterAndIndexCache { private static Map CACHE_MAP = new HashMap<>(); public static synchronized DataAdapterAndIndexCache getInstance( final String cacheId, final String gwNamespace, final String storeType) { final String qualifiedId = (((gwNamespace != null) && !gwNamespace.isEmpty()) ? cacheId + "_" + gwNamespace : cacheId) + "_" + storeType; DataAdapterAndIndexCache instance = CACHE_MAP.get(qualifiedId); if (instance == null) { instance = new DataAdapterAndIndexCache(); CACHE_MAP.put(qualifiedId, instance); } return instance; } private final Set cache = new HashSet<>(); // TODO: there should techinically be a notion of geowave datastore in here, // as multiple different datastores (perhaps simply different gwNamespaces) // could use the same adapter and index public synchronized boolean add(final short internalAdapterId, final String indexId) { if (cache.contains(new DataAdapterAndIndex(internalAdapterId, indexId))) { return true; } else { cache.add(new DataAdapterAndIndex(internalAdapterId, indexId)); return false; } } public synchronized void deleteIndex(final String indexId) { final Iterator it = cache.iterator(); while (it.hasNext()) { if (indexId.equals(it.next().indexId)) { it.remove(); } } } public synchronized void deleteAll() { cache.clear(); } private static class DataAdapterAndIndex { private final short internalAdapterId; private final String indexId; public DataAdapterAndIndex(final short internalAdapterId, final String indexId) { this.internalAdapterId = internalAdapterId; this.indexId = indexId; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((indexId == null) ? 0 : indexId.hashCode()); result = (prime * result) + internalAdapterId; return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final DataAdapterAndIndex other = (DataAdapterAndIndex) obj; if (indexId == null) { if (other.indexId != null) { return false; } } else if (!indexId.equals(other.indexId)) { return false; } if (internalAdapterId != other.internalAdapterId) { return false; } return true; } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/util/DataStoreUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.util; import java.io.File; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.TreeMap; import java.util.UUID; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.ByteArrayRange; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.CustomIndexStrategy; import org.locationtech.geowave.core.index.HierarchicalNumericIndexStrategy; import org.locationtech.geowave.core.index.HierarchicalNumericIndexStrategy.SubStrategy; import org.locationtech.geowave.core.index.IndexMetaData; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.index.QueryRanges; import org.locationtech.geowave.core.index.SinglePartitionInsertionIds; import org.locationtech.geowave.core.index.SinglePartitionQueryRanges; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.AdapterPersistenceEncoding; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter; import org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter.RowTransform; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.VisibilityHandler; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.data.PersistentDataset; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.visibility.UnconstrainedVisibilityHandler; import org.locationtech.geowave.core.store.dimension.NumericDimensionField; import org.locationtech.geowave.core.store.entities.GeoWaveKey; import org.locationtech.geowave.core.store.entities.GeoWaveKeyImpl; import org.locationtech.geowave.core.store.entities.GeoWaveMetadata; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.entities.GeoWaveRowImpl; import org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer; import org.locationtech.geowave.core.store.entities.GeoWaveValue; import org.locationtech.geowave.core.store.entities.GeoWaveValueImpl; import org.locationtech.geowave.core.store.flatten.BitmaskUtils; import org.locationtech.geowave.core.store.flatten.FlattenedDataSet; import org.locationtech.geowave.core.store.flatten.FlattenedFieldInfo; import org.locationtech.geowave.core.store.flatten.FlattenedUnreadData; import org.locationtech.geowave.core.store.flatten.FlattenedUnreadDataSingleRow; import org.locationtech.geowave.core.store.index.CommonIndexModel; import org.locationtech.geowave.core.store.index.CustomIndex; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.operations.DataStoreOperations; import org.locationtech.geowave.core.store.operations.MetadataDeleter; import org.locationtech.geowave.core.store.operations.MetadataQuery; import org.locationtech.geowave.core.store.operations.MetadataReader; import org.locationtech.geowave.core.store.operations.MetadataType; import org.locationtech.geowave.core.store.operations.RangeReaderParams; import org.locationtech.geowave.core.store.operations.ReaderParamsBuilder; import org.locationtech.geowave.core.store.operations.RowDeleter; import org.locationtech.geowave.core.store.operations.RowReader; import org.locationtech.geowave.core.store.operations.RowWriter; import org.locationtech.geowave.core.store.query.constraints.CustomQueryConstraints.InternalCustomConstraints; import org.locationtech.geowave.core.store.query.options.CommonQueryOptions.HintKey; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.locationtech.geowave.core.store.statistics.binning.CompositeBinningStrategy; import org.locationtech.geowave.core.store.statistics.binning.DataTypeBinningStrategy; import org.locationtech.geowave.core.store.statistics.binning.PartitionBinningStrategy; import org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic; import org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic.RowRangeHistogramValue; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.ParameterException; import com.clearspring.analytics.util.Lists; import com.google.common.collect.Maps; /* */ public class DataStoreUtils { private static final Logger LOGGER = LoggerFactory.getLogger(DataStoreUtils.class); public static String DEFAULT_GEOWAVE_DIRECTORY = System.getProperty("user.home") + File.separator + "geowave"; public static HintKey MAX_RESOLUTION_SUBSAMPLING_PER_DIMENSION = new HintKey<>(double[].class); public static HintKey MAX_RANGE_DECOMPOSITION = new HintKey<>(Integer.class); public static HintKey TARGET_RESOLUTION_PER_DIMENSION_FOR_HIERARCHICAL_INDEX = new HintKey<>(double[].class); // we append a 0 byte, 8 bytes of timestamp, and 16 bytes of UUID public static final int UNIQUE_ADDED_BYTES = 1 + 8 + 16; public static final byte UNIQUE_ID_DELIMITER = 0; public static final VisibilityHandler UNCONSTRAINED_VISIBILITY = new UnconstrainedVisibilityHandler(); public static final byte[] EMTPY_VISIBILITY = new byte[] {}; public static DataTypeAdapter getDataAdapter( final DataStorePluginOptions dataStore, final String typeName) { final Short adapterId = dataStore.createInternalAdapterStore().getAdapterId(typeName); if (adapterId == null) { return null; } final DataTypeAdapter adapter = dataStore.createAdapterStore().getAdapter(adapterId); if (adapter == null) { return null; } return adapter; } public static FlattenedUnreadData aggregateFieldData( final GeoWaveKey key, final GeoWaveValue value, final PersistentDataset commonData, final CommonIndexModel model, final List commonIndexFieldIds) { final byte[] fieldMask = value.getFieldMask(); final byte[] valueBytes = value.getValue(); final FlattenedDataSet dataSet = DataStoreUtils.decomposeFlattenedFields( fieldMask, valueBytes, value.getVisibility(), commonIndexFieldIds.size() - 1); final List fieldInfos = dataSet.getFieldsRead(); for (final FlattenedFieldInfo fieldInfo : fieldInfos) { final int ordinal = fieldInfo.getFieldPosition(); if (ordinal < commonIndexFieldIds.size()) { final String commonIndexFieldName = commonIndexFieldIds.get(ordinal); final FieldReader reader = model.getReader(commonIndexFieldName); if (reader != null) { final Object fieldValue = reader.readField(fieldInfo.getValue()); commonData.addValue(commonIndexFieldName, fieldValue); } else { LOGGER.error("Could not find reader for common index field: " + commonIndexFieldName); } } } return dataSet.getFieldsDeferred(); } public static boolean startsWithIfPrefix( final byte[] source, final byte[] match, final boolean prefix) { if (!prefix) { if (match.length != (source.length)) { return false; } } else if (match.length > (source.length)) { return false; } return ByteArrayUtils.startsWith(source, match); } public static List getUniqueDimensionFields(final CommonIndexModel model) { final List dimensionFieldIds = new ArrayList<>(); for (final NumericDimensionField dimension : model.getDimensions()) { if (!dimensionFieldIds.contains(dimension.getFieldName())) { dimensionFieldIds.add(dimension.getFieldName()); } } return dimensionFieldIds; } public static long cardinality( final DataStatisticsStore statisticsStore, final RowRangeHistogramStatistic rowRangeHistogramStatistic, final DataTypeAdapter adapter, final Index index, final QueryRanges queryRanges) { long count = 0; for (final SinglePartitionQueryRanges partitionRange : queryRanges.getPartitionQueryRanges()) { final RowRangeHistogramValue value = statisticsStore.getStatisticValue( rowRangeHistogramStatistic, CompositeBinningStrategy.getBin( DataTypeBinningStrategy.getBin(adapter), PartitionBinningStrategy.getBin(partitionRange.getPartitionKey()))); if (value == null) { return Long.MAX_VALUE - 1; } for (final ByteArrayRange range : partitionRange.getSortKeyRanges()) { count += value.cardinality(range.getStart(), range.getEnd()); } } return count; } @SuppressWarnings({"rawtypes", "unchecked"}) public static InsertionIds getInsertionIdsForEntry( final T entry, final InternalDataAdapter adapter, final AdapterToIndexMapping indexMapping, final Index index) { if (index instanceof CustomIndexStrategy) { return ((CustomIndexStrategy) index).getInsertionIds(entry); } else { final AdapterPersistenceEncoding encoding = adapter.encode(entry, indexMapping, index); return encoding.getInsertionIds(index); } } public static InsertionIds keysToInsertionIds(final GeoWaveKey... geoWaveKeys) { final Map> sortKeysPerPartition = new HashMap<>(); for (final GeoWaveKey key : geoWaveKeys) { final ByteArray partitionKey = new ByteArray(key.getPartitionKey()); List sortKeys = sortKeysPerPartition.get(partitionKey); if (sortKeys == null) { sortKeys = new ArrayList<>(); sortKeysPerPartition.put(partitionKey, sortKeys); } sortKeys.add(key.getSortKey()); } final Set insertionIds = new HashSet<>(); for (final Entry> e : sortKeysPerPartition.entrySet()) { insertionIds.add(new SinglePartitionInsertionIds(e.getKey().getBytes(), e.getValue())); } return new InsertionIds(insertionIds); } public static boolean rowIdsMatch(final GeoWaveKey rowId1, final GeoWaveKey rowId2) { if (!Arrays.equals(rowId1.getPartitionKey(), rowId2.getPartitionKey()) || !Arrays.equals(rowId1.getSortKey(), rowId2.getSortKey()) || (rowId1.getAdapterId() != rowId2.getAdapterId())) { return false; } if (Arrays.equals(rowId1.getDataId(), rowId2.getDataId())) { return true; } return Arrays.equals(rowId1.getDataId(), rowId2.getDataId()); } public static byte[] removeUniqueId(byte[] dataId) { if ((dataId.length < UNIQUE_ADDED_BYTES) || (dataId[dataId.length - UNIQUE_ADDED_BYTES] != UNIQUE_ID_DELIMITER)) { return dataId; } dataId = Arrays.copyOfRange(dataId, 0, dataId.length - UNIQUE_ADDED_BYTES); return dataId; } /** * Takes a byte array representing a serialized composite group of FieldInfos sharing a common * visibility and returns a List of the individual FieldInfos * * @param bitmask the composite bitmask representing the fields contained within the * flattenedValue * @param flattenedValue the serialized composite FieldInfo * @param commonVisibility the shared visibility * @param maxFieldPosition can short-circuit read and defer decomposition of fields after a given * position * @return the dataset that has been read */ public static FlattenedDataSet decomposeFlattenedFields( final byte[] bitmask, final byte[] flattenedValue, final byte[] commonVisibility, final int maxFieldPosition) { final List fieldInfoList = new LinkedList<>(); if ((flattenedValue != null) && (flattenedValue.length > 0)) { if ((bitmask != null) && (bitmask.length > 0)) { final List fieldPositions = BitmaskUtils.getFieldPositions(bitmask); final boolean sharedVisibility = fieldPositions.size() > 1; if (sharedVisibility) { final ByteBuffer input = ByteBuffer.wrap(flattenedValue); for (int i = 0; i < fieldPositions.size(); i++) { final Integer fieldPosition = fieldPositions.get(i); if ((maxFieldPosition > -2) && (fieldPosition > maxFieldPosition)) { return new FlattenedDataSet( fieldInfoList, new FlattenedUnreadDataSingleRow(input, i, fieldPositions)); } final int fieldLength = VarintUtils.readUnsignedInt(input); final byte[] fieldValueBytes = ByteArrayUtils.safeRead(input, fieldLength); fieldInfoList.add(new FlattenedFieldInfo(fieldPosition, fieldValueBytes)); } } else { fieldInfoList.add(new FlattenedFieldInfo(fieldPositions.get(0), flattenedValue)); } } else { // assume fields are in positional order final ByteBuffer input = ByteBuffer.wrap(flattenedValue); for (int i = 0; input.hasRemaining(); i++) { final Integer fieldPosition = i; final int fieldLength = VarintUtils.readUnsignedInt(input); final byte[] fieldValueBytes = ByteArrayUtils.safeRead(input, fieldLength); fieldInfoList.add(new FlattenedFieldInfo(fieldPosition, fieldValueBytes)); } } } return new FlattenedDataSet(fieldInfoList, null); } public static QueryRanges constraintsToQueryRanges( final List constraints, final Index index, final double[] targetResolutionPerDimensionForHierarchicalIndex, final int maxRanges, final IndexMetaData... hints) { if ((index instanceof CustomIndex) && (constraints != null) && (constraints.size() == 1) && (constraints.get(0) instanceof InternalCustomConstraints)) { return ((CustomIndex) index).getQueryRanges( ((InternalCustomConstraints) constraints.get(0)).getCustomConstraints()); } NumericIndexStrategy indexStrategy = index.getIndexStrategy(); SubStrategy targetIndexStrategy = null; if ((targetResolutionPerDimensionForHierarchicalIndex != null) && (targetResolutionPerDimensionForHierarchicalIndex.length == indexStrategy.getOrderedDimensionDefinitions().length)) { // determine the correct tier to query for the given resolution final HierarchicalNumericIndexStrategy strategy = CompoundHierarchicalIndexStrategyWrapper.findHierarchicalStrategy(indexStrategy); if (strategy != null) { final TreeMap sortedStrategies = new TreeMap<>(); for (final SubStrategy subStrategy : strategy.getSubStrategies()) { final double[] idRangePerDimension = subStrategy.getIndexStrategy().getHighestPrecisionIdRangePerDimension(); double rangeSum = 0; for (final double range : idRangePerDimension) { rangeSum += range; } // sort by the sum of the range in each dimension sortedStrategies.put(rangeSum, subStrategy); } for (final SubStrategy subStrategy : sortedStrategies.descendingMap().values()) { final double[] highestPrecisionIdRangePerDimension = subStrategy.getIndexStrategy().getHighestPrecisionIdRangePerDimension(); // if the id range is less than or equal to the target // resolution in each dimension, use this substrategy boolean withinTargetResolution = true; for (int d = 0; d < highestPrecisionIdRangePerDimension.length; d++) { if (highestPrecisionIdRangePerDimension[d] > targetResolutionPerDimensionForHierarchicalIndex[d]) { withinTargetResolution = false; break; } } if (withinTargetResolution) { targetIndexStrategy = subStrategy; break; } } if (targetIndexStrategy == null) { // if there is not a substrategy that is within the target // resolution, use the first substrategy (the lowest range // per dimension, which is the highest precision) targetIndexStrategy = sortedStrategies.firstEntry().getValue(); } indexStrategy = targetIndexStrategy.getIndexStrategy(); } } if ((constraints == null) || constraints.isEmpty()) { if (targetIndexStrategy != null) { // at least use the prefix of a substrategy if chosen return new QueryRanges(new byte[][] {targetIndexStrategy.getPrefix()}); } return new QueryRanges(); // implies in negative and // positive infinity } else { final List ranges = new ArrayList<>(constraints.size()); for (final MultiDimensionalNumericData nd : constraints) { ranges.add(indexStrategy.getQueryRanges(nd, maxRanges, hints)); } return ranges.size() > 1 ? new QueryRanges(ranges) : ranges.get(0); } } public static String getQualifiedTableName( final String tableNamespace, final String unqualifiedTableName) { return ((tableNamespace == null) || tableNamespace.isEmpty()) ? unqualifiedTableName : tableNamespace + "_" + unqualifiedTableName; } public static ByteArray ensureUniqueId(final byte[] id, final boolean hasMetadata) { final ByteBuffer buf = ByteBuffer.allocate(id.length + UNIQUE_ADDED_BYTES); byte[] metadata = null; byte[] dataId; if (hasMetadata) { final int metadataStartIdx = id.length - 12; final byte[] lengths = Arrays.copyOfRange(id, metadataStartIdx, id.length); final ByteBuffer lengthsBuf = ByteBuffer.wrap(lengths); final int adapterIdLength = lengthsBuf.getInt(); int dataIdLength = lengthsBuf.getInt(); dataIdLength += UNIQUE_ADDED_BYTES; final int duplicates = lengthsBuf.getInt(); final ByteBuffer newLengths = ByteBuffer.allocate(12); newLengths.putInt(adapterIdLength); newLengths.putInt(dataIdLength); newLengths.putInt(duplicates); newLengths.rewind(); metadata = newLengths.array(); dataId = Arrays.copyOfRange(id, 0, metadataStartIdx); } else { dataId = id; } buf.put(dataId); final long timestamp = System.currentTimeMillis(); buf.put(new byte[] {UNIQUE_ID_DELIMITER}); final UUID uuid = UUID.randomUUID(); buf.putLong(timestamp); buf.putLong(uuid.getLeastSignificantBits()); buf.putLong(uuid.getMostSignificantBits()); if (hasMetadata) { buf.put(metadata); } return new ByteArray(buf.array()); } private static final byte[] OPEN_PAREN_BYTE = "(".getBytes(StringUtils.getGeoWaveCharset()); private static final byte[] MERGE_VIS_BYTES = ")&(".getBytes(StringUtils.getGeoWaveCharset()); private static final byte[] CLOSE_PAREN_BYTE = ")".getBytes(StringUtils.getGeoWaveCharset()); public static byte[] mergeVisibilities(final byte vis1[], final byte vis2[]) { if ((vis1 == null) || (vis1.length == 0)) { return vis2; } else if ((vis2 == null) || (vis2.length == 0)) { return vis1; } else if (Arrays.equals(vis1, vis2)) { return vis1; } final ByteBuffer buffer = ByteBuffer.allocate( vis1.length + OPEN_PAREN_BYTE.length + MERGE_VIS_BYTES.length + CLOSE_PAREN_BYTE.length + vis2.length); buffer.put(OPEN_PAREN_BYTE); buffer.put(vis1); buffer.put(MERGE_VIS_BYTES); buffer.put(vis2); buffer.put(CLOSE_PAREN_BYTE); return buffer.array(); } public static GeoWaveRow mergeSingleRowValues( final GeoWaveRow singleRow, final RowTransform rowTransform) { if (singleRow.getFieldValues().length < 2) { return singleRow; } // merge all values into a single value Mergeable merged = null; for (final GeoWaveValue fieldValue : singleRow.getFieldValues()) { final Mergeable mergeable = rowTransform.getRowAsMergeableObject( singleRow.getAdapterId(), new ByteArray(fieldValue.getFieldMask()), fieldValue.getValue()); if (merged == null) { merged = mergeable; } else { merged.merge(mergeable); } } final GeoWaveValue[] mergedFieldValues = new GeoWaveValue[] { new GeoWaveValueImpl( singleRow.getFieldValues()[0].getFieldMask(), singleRow.getFieldValues()[0].getVisibility(), rowTransform.getBinaryFromMergedObject(merged))}; return new GeoWaveRowImpl( new GeoWaveKeyImpl( singleRow.getDataId(), singleRow.getAdapterId(), singleRow.getPartitionKey(), singleRow.getSortKey(), singleRow.getNumberOfDuplicates()), mergedFieldValues); } @SuppressWarnings({"rawtypes", "unchecked"}) public static boolean mergeData( final DataStoreOperations operations, final Integer maxRangeDecomposition, final Index index, final PersistentAdapterStore adapterStore, final InternalAdapterStore internalAdapterStore, final AdapterIndexMappingStore adapterIndexMappingStore) { final RowDeleter deleter = operations.createRowDeleter(index.getName(), adapterStore, internalAdapterStore); try { final Map mergingAdapters = new HashMap<>(); final InternalDataAdapter[] adapters = adapterStore.getAdapters(); for (final InternalDataAdapter adapter : adapters) { if ((adapter.getAdapter() instanceof RowMergingDataAdapter) && (((RowMergingDataAdapter) adapter.getAdapter()).getTransform() != null)) { mergingAdapters.put(adapter.getAdapterId(), adapter); } } final ReaderParamsBuilder paramsBuilder = new ReaderParamsBuilder<>( index, adapterStore, adapterIndexMappingStore, internalAdapterStore, GeoWaveRowIteratorTransformer.NO_OP_TRANSFORMER).isClientsideRowMerging( true).maxRangeDecomposition(maxRangeDecomposition); final short[] adapterIds = new short[1]; for (final Entry adapter : mergingAdapters.entrySet()) { adapterIds[0] = adapter.getKey(); paramsBuilder.adapterIds(adapterIds); try (final RowWriter writer = operations.createWriter(index, adapter.getValue()); final RowReader reader = operations.createReader(paramsBuilder.build())) { final RewritingMergingEntryIterator iterator = new RewritingMergingEntryIterator( adapterStore, adapterIndexMappingStore, index, reader, Maps.transformValues(mergingAdapters, v -> v.getAdapter()), writer, deleter); while (iterator.hasNext()) { iterator.next(); } } catch (final Exception e) { LOGGER.error("Exception occurred while merging data.", e); throw new RuntimeException(e); } } } finally { try { deleter.close(); } catch (final Exception e) { LOGGER.warn("Exception occurred when closing deleter.", e); } } return true; } public static boolean isMergingIteratorRequired( final RangeReaderParams readerParams, final boolean visibilityEnabled) { return readerParams.isClientsideRowMerging() || (readerParams.isMixedVisibility() && visibilityEnabled); } public static List loadIndices(final IndexStore indexStore, final String indexNames) { final List loadedIndices = Lists.newArrayList(); // Is there a comma? final String[] indices = indexNames.split(","); for (final String idxName : indices) { final Index index = indexStore.getIndex(idxName); if (index == null) { throw new ParameterException("Unable to find index with name: " + idxName); } loadedIndices.add(index); } return Collections.unmodifiableList(loadedIndices); } public static List loadIndices(final DataStore dataStore, final String indexNames) { final List loadedIndices = Lists.newArrayList(); // Is there a comma? final String[] indices = indexNames.split(","); final Index[] dataStoreIndices = dataStore.getIndices(); for (final String idxName : indices) { boolean found = false; for (final Index index : dataStoreIndices) { if (index.getName().equals(idxName)) { loadedIndices.add(index); found = true; break; } } if (!found) { throw new ParameterException("Unable to find index with name: " + idxName); } } return Collections.unmodifiableList(loadedIndices); } public static void safeMetadataDelete( final MetadataDeleter deleter, final DataStoreOperations operations, final MetadataType metadataType, final MetadataQuery query) { // we need to respect visibilities although this may be much slower final MetadataReader reader = operations.createMetadataReader(metadataType); try (final CloseableIterator it = reader.query(query)) { while (it.hasNext()) { final GeoWaveMetadata entry = it.next(); deleter.delete( new MetadataQuery( entry.getPrimaryId(), entry.getSecondaryId(), query.getAuthorizations())); } } } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/util/GenericTypeResolver.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ /* * Copyright 2002-2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.locationtech.geowave.core.store.util; import java.lang.ref.Reference; import java.lang.ref.WeakReference; import java.lang.reflect.Array; import java.lang.reflect.GenericArrayType; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.lang.reflect.TypeVariable; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.WeakHashMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class is a derivative from hte Spring Framework library. Helper class for resolving generic * types against type variables. * *

Mainly intended for usage within the framework, resolving method parameter types even when * they are declared generically. * * @author Juergen Hoeller * @author Rob Harrop * @author Roy Clarkson * @since 1.0 */ public abstract class GenericTypeResolver { private static final Logger LOGGER = LoggerFactory.getLogger(GenericTypeResolver.class); /** Cache from Class to TypeVariable Map */ private static final Map, Reference, Type>>> typeVariableCache = Collections.synchronizedMap( new WeakHashMap, Reference, Type>>>()); /** * Resolve the single type argument of the given generic interface against the given target class * which is assumed to implement the generic interface and possibly declare a concrete type for * its type variable. * * @param clazz the target class to check against * @param genericIfc the generic interface or superclass to resolve the type argument from * @return the resolved type of the argument, or null if not resolvable */ public static Class resolveTypeArgument(final Class clazz, final Class genericIfc) { final Class[] typeArgs = resolveTypeArguments(clazz, genericIfc); if (typeArgs == null) { return null; } if (typeArgs.length != 1) { throw new IllegalArgumentException( "Expected 1 type argument on generic interface [" + genericIfc.getName() + "] but found " + typeArgs.length); } return typeArgs[0]; } /** * Resolve the type arguments of the given generic interface against the given target class which * is assumed to implement the generic interface and possibly declare concrete types for its type * variables. * * @param clazz the target class to check against * @param genericIfc the generic interface or superclass to resolve the type argument from * @return the resolved type of each argument, with the array size matching the number of actual * type arguments, or null if not resolvable */ public static Class[] resolveTypeArguments(final Class clazz, final Class genericIfc) { return doResolveTypeArguments(clazz, clazz, genericIfc); } private static Class[] doResolveTypeArguments( final Class ownerClass, Class classToIntrospect, final Class genericIfc) { while (classToIntrospect != null) { if (genericIfc.isInterface()) { final Type[] ifcs = classToIntrospect.getGenericInterfaces(); for (final Type ifc : ifcs) { final Class[] result = doResolveTypeArguments(ownerClass, ifc, genericIfc); if (result != null) { return result; } } } else { final Class[] result = doResolveTypeArguments( ownerClass, classToIntrospect.getGenericSuperclass(), genericIfc); if (result != null) { return result; } } classToIntrospect = classToIntrospect.getSuperclass(); } return null; } private static Class[] doResolveTypeArguments( final Class ownerClass, final Type ifc, final Class genericIfc) { if (ifc instanceof ParameterizedType) { final ParameterizedType paramIfc = (ParameterizedType) ifc; final Type rawType = paramIfc.getRawType(); if (genericIfc.equals(rawType)) { final Type[] typeArgs = paramIfc.getActualTypeArguments(); final Class[] result = new Class[typeArgs.length]; for (int i = 0; i < typeArgs.length; i++) { final Type arg = typeArgs[i]; result[i] = extractClass(ownerClass, arg); } return result; } else if (genericIfc.isAssignableFrom((Class) rawType)) { return doResolveTypeArguments(ownerClass, (Class) rawType, genericIfc); } } else if ((ifc != null) && genericIfc.isAssignableFrom((Class) ifc)) { return doResolveTypeArguments(ownerClass, (Class) ifc, genericIfc); } return null; } /** Extract a class instance from given Type. */ private static Class extractClass(final Class ownerClass, Type arg) { if (arg instanceof ParameterizedType) { return extractClass(ownerClass, ((ParameterizedType) arg).getRawType()); } else if (arg instanceof GenericArrayType) { final GenericArrayType gat = (GenericArrayType) arg; final Type gt = gat.getGenericComponentType(); final Class componentClass = extractClass(ownerClass, gt); return Array.newInstance(componentClass, 0).getClass(); } else if (arg instanceof TypeVariable) { final TypeVariable tv = (TypeVariable) arg; arg = getTypeVariableMap(ownerClass).get(tv); if (arg == null) { arg = extractBoundForTypeVariable(tv); } else { arg = extractClass(ownerClass, arg); } } return (arg instanceof Class ? (Class) arg : Object.class); } /** * Resolve the specified generic type against the given TypeVariable map. * * @param genericType the generic type to resolve * @param typeVariableMap the TypeVariable Map to resolved against * @return the type if it resolves to a Class, or Object.class otherwise */ public static Class resolveType( final Type genericType, final Map, Type> typeVariableMap) { final Type rawType = getRawType(genericType, typeVariableMap); return (rawType instanceof Class ? (Class) rawType : Object.class); } /** * Determine the raw type for the given generic parameter type. * * @param genericType the generic type to resolve * @param typeVariableMap the TypeVariable Map to resolved against * @return the resolved raw type */ static Type getRawType(final Type genericType, final Map, Type> typeVariableMap) { Type resolvedType = genericType; if (genericType instanceof TypeVariable) { final TypeVariable tv = (TypeVariable) genericType; resolvedType = typeVariableMap.get(tv); if (resolvedType == null) { resolvedType = extractBoundForTypeVariable(tv); } } if (resolvedType instanceof ParameterizedType) { return ((ParameterizedType) resolvedType).getRawType(); } else { return resolvedType; } } /** * Build a mapping of {@link TypeVariable#getName TypeVariable names} to concrete {@link Class} * for the specified {@link Class}. Searches all super types, enclosing types and interfaces. */ public static Map, Type> getTypeVariableMap(final Class clazz) { final Reference, Type>> ref = typeVariableCache.get(clazz); Map, Type> typeVariableMap = (ref != null ? ref.get() : null); if (clazz == null) { throw new IllegalArgumentException("clazz can not be null"); } if (typeVariableMap == null) { typeVariableMap = new HashMap<>(); // interfaces extractTypeVariablesFromGenericInterfaces(clazz.getGenericInterfaces(), typeVariableMap); // super class Type genericType = clazz.getGenericSuperclass(); Class type = clazz.getSuperclass(); while ((type != null) && !Object.class.equals(type)) { if (genericType instanceof ParameterizedType) { final ParameterizedType pt = (ParameterizedType) genericType; populateTypeMapFromParameterizedType(pt, typeVariableMap); } extractTypeVariablesFromGenericInterfaces(type.getGenericInterfaces(), typeVariableMap); genericType = type.getGenericSuperclass(); type = type.getSuperclass(); } // enclosing class type = clazz; while (type.isMemberClass()) { genericType = type.getGenericSuperclass(); if (genericType instanceof ParameterizedType) { final ParameterizedType pt = (ParameterizedType) genericType; populateTypeMapFromParameterizedType(pt, typeVariableMap); } type = type.getEnclosingClass(); if (type == null) { LOGGER.error("type.getEnclosingClass() returned null"); return null; } } typeVariableCache.put(clazz, new WeakReference<>(typeVariableMap)); } return typeVariableMap; } /** Extracts the bound Type for a given {@link TypeVariable}. */ static Type extractBoundForTypeVariable(final TypeVariable typeVariable) { final Type[] bounds = typeVariable.getBounds(); if (bounds.length == 0) { return Object.class; } Type bound = bounds[0]; if (bound instanceof TypeVariable) { bound = extractBoundForTypeVariable((TypeVariable) bound); } return bound; } private static void extractTypeVariablesFromGenericInterfaces( final Type[] genericInterfaces, final Map, Type> typeVariableMap) { for (final Type genericInterface : genericInterfaces) { if (genericInterface instanceof ParameterizedType) { final ParameterizedType pt = (ParameterizedType) genericInterface; populateTypeMapFromParameterizedType(pt, typeVariableMap); if (pt.getRawType() instanceof Class) { extractTypeVariablesFromGenericInterfaces( ((Class) pt.getRawType()).getGenericInterfaces(), typeVariableMap); } } else if (genericInterface instanceof Class) { extractTypeVariablesFromGenericInterfaces( ((Class) genericInterface).getGenericInterfaces(), typeVariableMap); } } } /** * Read the {@link TypeVariable TypeVariables} from the supplied {@link ParameterizedType} and add * mappings corresponding to the {@link TypeVariable#getName TypeVariable name} -> concrete type * to the supplied {@link Map}. * *

Consider this case: * *

 See the NOTICE file distributed with this work for additional information regarding copyright
 * ownership. All rights reserved. This program and the accompanying materials are made available
 * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is
 * available at http://www.apache.org/licenses/LICENSE-2.0.txt
 */
package org.locationtech.geowave.core.store.util;

import java.util.Iterator;
import java.util.NoSuchElementException;
import org.locationtech.geowave.core.index.ByteArray;
import org.locationtech.geowave.core.index.IndexUtils;
import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;
import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;
import org.locationtech.geowave.core.store.adapter.exceptions.AdapterException;
import org.locationtech.geowave.core.store.api.Index;
import org.locationtech.geowave.core.store.base.BaseDataStoreUtils;
import org.locationtech.geowave.core.store.base.dataidx.DataIndexRetrieval;
import org.locationtech.geowave.core.store.callback.ScanCallback;
import org.locationtech.geowave.core.store.entities.GeoWaveRow;
import org.locationtech.geowave.core.store.query.filter.QueryFilter;

public class NativeEntryIteratorWrapper implements Iterator {
  private final byte[] fieldSubsetBitmask;
  private final boolean decodePersistenceEncoding;
  private Integer bitPosition = null;
  private ByteArray skipUntilRow;
  private boolean reachedEnd = false;
  private boolean adapterValid = true;
  protected final DataIndexRetrieval dataIndexRetrieval;
  protected final PersistentAdapterStore adapterStore;
  protected final AdapterIndexMappingStore mappingStore;
  protected final Index index;
  protected final Iterator scannerIt;
  protected final QueryFilter[] clientFilters;
  protected final ScanCallback scanCallback;

  protected T nextValue;

  public NativeEntryIteratorWrapper(
      final PersistentAdapterStore adapterStore,
      final AdapterIndexMappingStore mappingStore,
      final Index index,
      final Iterator scannerIt,
      final QueryFilter[] clientFilters,
      final ScanCallback scanCallback,
      final byte[] fieldSubsetBitmask,
      final double[] maxResolutionSubsamplingPerDimension,
      final boolean decodePersistenceEncoding,
      final DataIndexRetrieval dataIndexRetrieval) {
    this.adapterStore = adapterStore;
    this.mappingStore = mappingStore;
    this.index = index;
    this.scannerIt = scannerIt;
    this.clientFilters = clientFilters;
    this.scanCallback = scanCallback;
    this.fieldSubsetBitmask = fieldSubsetBitmask;
    this.decodePersistenceEncoding = decodePersistenceEncoding;
    this.dataIndexRetrieval = dataIndexRetrieval;
    initializeBitPosition(maxResolutionSubsamplingPerDimension);
  }


  protected void findNext() {
    while ((nextValue == null) && hasNextScannedResult()) {
      final GeoWaveRow row = getNextEncodedResult();
      final T decodedValue = decodeRow(row, clientFilters, index);
      if (decodedValue != null) {
        nextValue = decodedValue;
        return;
      }
    }
  }

  protected boolean hasNextScannedResult() {
    return scannerIt.hasNext();
  }

  protected GeoWaveRow getNextEncodedResult() {
    return scannerIt.next();
  }

  @Override
  public boolean hasNext() {
    findNext();
    return nextValue != null;
  }

  @Override
  public T next() throws NoSuchElementException {
    if (nextValue == null) {
      findNext();
    }
    final T previousNext = nextValue;
    if (nextValue == null) {
      throw new NoSuchElementException();
    }
    nextValue = null;
    return previousNext;
  }

  @Override
  public void remove() {
    scannerIt.remove();
  }

  @SuppressWarnings("unchecked")
  protected T decodeRow(
      final GeoWaveRow row,
      final QueryFilter[] clientFilters,
      final Index index) {
    Object decodedRow = null;
    if (adapterValid && ((bitPosition == null) || passesSkipFilter(row))) {
      try {
        decodedRow =
            BaseDataStoreUtils.decodeRow(
                row,
                clientFilters,
                null,
                null,
                adapterStore,
                mappingStore,
                index,
                scanCallback,
                fieldSubsetBitmask,
                decodePersistenceEncoding,
                dataIndexRetrieval);

        if (decodedRow != null) {
          incrementSkipRow(row);
        }
      } catch (final AdapterException e) {
        adapterValid = false;
        // Attempting to decode future rows with the same adapter is
        // pointless.
      }
    }
    return (T) decodedRow;
  }

  boolean first = false;

  private boolean passesSkipFilter(final GeoWaveRow row) {
    if ((reachedEnd == true)
        || ((skipUntilRow != null)
            && ((skipUntilRow.compareTo(new ByteArray(row.getSortKey()))) > 0))) {
      return false;
    }

    return true;
  }

  private void incrementSkipRow(final GeoWaveRow row) {
    if (bitPosition != null) {
      final byte[] nextRow = IndexUtils.getNextRowForSkip(row.getSortKey(), bitPosition);
      if (nextRow == null) {
        reachedEnd = true;
      } else {
        skipUntilRow = new ByteArray(nextRow);
      }
    }
  }

  private void initializeBitPosition(final double[] maxResolutionSubsamplingPerDimension) {
    if ((maxResolutionSubsamplingPerDimension != null)
        && (maxResolutionSubsamplingPerDimension.length > 0)) {
      bitPosition =
          IndexUtils.getBitPositionOnSortKeyFromSubsamplingArray(
              index.getIndexStrategy(),
              maxResolutionSubsamplingPerDimension);
    }
  }
}


================================================
FILE: core/store/src/main/java/org/locationtech/geowave/core/store/util/NativeEntryTransformer.java
================================================
/**
 * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation
 *
 * 

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.util; import java.util.Iterator; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.base.dataidx.DataIndexRetrieval; import org.locationtech.geowave.core.store.callback.ScanCallback; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer; import org.locationtech.geowave.core.store.query.filter.QueryFilter; public class NativeEntryTransformer implements GeoWaveRowIteratorTransformer { private final PersistentAdapterStore adapterStore; private final AdapterIndexMappingStore mappingStore; private final Index index; private final QueryFilter[] clientFilters; private final ScanCallback scanCallback; private final byte[] fieldSubsetBitmask; private final double[] maxResolutionSubsamplingPerDimension; private final boolean decodePersistenceEncoding; private final DataIndexRetrieval dataIndexRetrieval; public NativeEntryTransformer( final PersistentAdapterStore adapterStore, final AdapterIndexMappingStore mappingStore, final Index index, final QueryFilter[] clientFilters, final ScanCallback scanCallback, final byte[] fieldSubsetBitmask, final double[] maxResolutionSubsamplingPerDimension, final boolean decodePersistenceEncoding, final DataIndexRetrieval dataIndexRetrieval) { this.adapterStore = adapterStore; this.mappingStore = mappingStore; this.index = index; this.clientFilters = clientFilters; this.scanCallback = scanCallback; this.fieldSubsetBitmask = fieldSubsetBitmask; this.decodePersistenceEncoding = decodePersistenceEncoding; this.maxResolutionSubsamplingPerDimension = maxResolutionSubsamplingPerDimension; this.dataIndexRetrieval = dataIndexRetrieval; } @Override public Iterator apply(final Iterator rowIter) { return GeoWaveRowIteratorFactory.iterator( adapterStore, mappingStore, index, rowIter, clientFilters, scanCallback, fieldSubsetBitmask, maxResolutionSubsamplingPerDimension, decodePersistenceEncoding, dataIndexRetrieval); } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/util/RewritingMergingEntryIterator.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.util; import java.util.Iterator; import java.util.Map; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter; import org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter.RowTransform; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.operations.RowDeleter; import org.locationtech.geowave.core.store.operations.RowWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class RewritingMergingEntryIterator extends MergingEntryIterator { private static final Logger LOGGER = LoggerFactory.getLogger(RewritingMergingEntryIterator.class); private final RowWriter writer; private final RowDeleter deleter; public RewritingMergingEntryIterator( final PersistentAdapterStore adapterStore, final AdapterIndexMappingStore mappingStore, final Index index, final Iterator scannerIt, final Map mergingAdapters, final RowWriter writer, final RowDeleter deleter) { super(adapterStore, mappingStore, index, scannerIt, null, null, mergingAdapters, null, null); this.writer = writer; this.deleter = deleter; } @Override protected GeoWaveRow mergeSingleRowValues( final GeoWaveRow singleRow, final RowTransform rowTransform) { if (singleRow.getFieldValues().length < 2) { return singleRow; } deleter.delete(singleRow); deleter.flush(); final GeoWaveRow merged = super.mergeSingleRowValues(singleRow, rowTransform); writer.write(merged); return merged; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/util/RowConsumer.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.util; import java.util.Iterator; import java.util.NoSuchElementException; import java.util.concurrent.BlockingQueue; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class RowConsumer implements Iterator { private static final Logger LOGGER = LoggerFactory.getLogger(RowConsumer.class); public static final Object POISON = new Object(); private Object nextRow = null; private final BlockingQueue blockingQueue; public RowConsumer(final BlockingQueue blockingQueue) { this.blockingQueue = blockingQueue; } @Override public boolean hasNext() { if (nextRow != null) { return true; } else { try { nextRow = blockingQueue.take(); } catch (final InterruptedException e) { LOGGER.warn("Interrupted while waiting on hasNext", e); return false; } } if (!nextRow.equals(POISON)) { return true; } else { try { blockingQueue.put(POISON); } catch (final InterruptedException e) { LOGGER.warn("Interrupted while finishing consuming from queue", e); } nextRow = null; return false; } } int count = 0; @Override public T next() { final T retVal = (T) nextRow; if (retVal == null) { throw new NoSuchElementException("No more rows"); } nextRow = null; return retVal; } } ================================================ FILE: core/store/src/main/java/org/locationtech/geowave/core/store/util/SecondaryIndexEntryIteratorWrapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.util; import java.util.Iterator; import java.util.NoSuchElementException; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; public abstract class SecondaryIndexEntryIteratorWrapper implements Iterator, CloseableIterator { private final Iterator scanIterator; protected final InternalDataAdapter adapter; private RowType nextValue; public SecondaryIndexEntryIteratorWrapper( final Iterator scanIterator, final InternalDataAdapter adapter) { super(); this.scanIterator = scanIterator; this.adapter = adapter; } @Override public boolean hasNext() { findNext(); return nextValue != null; } @Override public RowType next() { if (nextValue == null) { findNext(); } final RowType previousNext = nextValue; if (nextValue == null) { throw new NoSuchElementException(); } nextValue = null; return previousNext; } @Override public void remove() { scanIterator.remove(); } private void findNext() { while ((nextValue == null) && scanIterator.hasNext()) { final Object row = scanIterator.next(); final RowType decodedValue = decodeRow(row); if (decodedValue != null) { nextValue = decodedValue; return; } } } protected abstract RowType decodeRow(Object row); } ================================================ FILE: core/store/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi ================================================ org.locationtech.geowave.core.store.cli.store.StoreOperationProvider org.locationtech.geowave.core.store.cli.index.IndexOperationProvider org.locationtech.geowave.core.store.cli.stats.StatsOperationProvider org.locationtech.geowave.core.store.cli.query.QueryOperationProvider org.locationtech.geowave.core.store.cli.type.TypeOperationProvider ================================================ FILE: core/store/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.DefaultConfigProviderSpi ================================================ org.locationtech.geowave.core.store.operations.config.IndexDefaultConfigProvider ================================================ FILE: core/store/src/main/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi ================================================ org.locationtech.geowave.core.store.StorePersistableRegistry org.locationtech.geowave.core.store.statistics.StatisticsPersistableRegistry org.locationtech.geowave.core.store.index.IndexFieldMapperPersistableRegistry ================================================ FILE: core/store/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.cli.query.QueryOutputFormatSpi ================================================ org.locationtech.geowave.core.store.cli.query.ConsoleQueryOutputFormat org.locationtech.geowave.core.store.cli.query.CSVQueryOutputFormat ================================================ FILE: core/store/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.data.field.FieldSerializationProviderSpi ================================================ org.locationtech.geowave.core.store.data.field.base.BigDecimalArraySerializationProvider org.locationtech.geowave.core.store.data.field.base.BigDecimalSerializationProvider org.locationtech.geowave.core.store.data.field.base.BigIntegerArraySerializationProvider org.locationtech.geowave.core.store.data.field.base.BigIntegerSerializationProvider org.locationtech.geowave.core.store.data.field.base.BooleanArraySerializationProvider org.locationtech.geowave.core.store.data.field.base.BooleanSerializationProvider org.locationtech.geowave.core.store.data.field.base.ByteArraySerializationProvider org.locationtech.geowave.core.store.data.field.base.ByteSerializationProvider org.locationtech.geowave.core.store.data.field.base.DoubleArraySerializationProvider org.locationtech.geowave.core.store.data.field.base.DoubleSerializationProvider org.locationtech.geowave.core.store.data.field.base.FloatArraySerializationProvider org.locationtech.geowave.core.store.data.field.base.FloatSerializationProvider org.locationtech.geowave.core.store.data.field.base.IntegerArraySerializationProvider org.locationtech.geowave.core.store.data.field.base.IntegerSerializationProvider org.locationtech.geowave.core.store.data.field.base.LongArraySerializationProvider org.locationtech.geowave.core.store.data.field.base.LongSerializationProvider org.locationtech.geowave.core.store.data.field.base.PrimitiveBooleanArraySerializationProvider org.locationtech.geowave.core.store.data.field.base.PrimitiveByteArraySerializationProvider org.locationtech.geowave.core.store.data.field.base.PrimitiveDoubleArraySerializationProvider org.locationtech.geowave.core.store.data.field.base.PrimitiveFloatArraySerializationProvider org.locationtech.geowave.core.store.data.field.base.PrimitiveIntArraySerializationProvider org.locationtech.geowave.core.store.data.field.base.PrimitiveLongArraySerializationProvider org.locationtech.geowave.core.store.data.field.base.PrimitiveShortArraySerializationProvider org.locationtech.geowave.core.store.data.field.base.ShortArraySerializationProvider org.locationtech.geowave.core.store.data.field.base.ShortSerializationProvider org.locationtech.geowave.core.store.data.field.base.StringArraySerializationProvider org.locationtech.geowave.core.store.data.field.base.StringSerializationProvider ================================================ FILE: core/store/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.index.AttributeIndexProviderSpi ================================================ org.locationtech.geowave.core.store.index.NumericAttributeIndexProvider org.locationtech.geowave.core.store.index.TextAttributeIndexProvider ================================================ FILE: core/store/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.index.IndexFieldMapperRegistrySPI ================================================ org.locationtech.geowave.core.store.index.CoreRegisteredIndexFieldMappers ================================================ FILE: core/store/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.query.gwql.GWQLExtensionRegistrySpi ================================================ org.locationtech.geowave.core.store.query.gwql.GWQLCoreExtensions ================================================ FILE: core/store/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.spi.DimensionalityTypeProviderSpi ================================================ org.locationtech.geowave.core.store.index.AttributeDimensionalityTypeProvider ================================================ FILE: core/store/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.statistics.StatisticsRegistrySPI ================================================ org.locationtech.geowave.core.store.statistics.CoreRegisteredStatistics ================================================ FILE: core/store/src/test/java/org/locationtech/geowave/core/store/DataStorePropertyTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import org.junit.Test; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.data.visibility.GlobalVisibilityHandler; public class DataStorePropertyTest { @Test public void testSerialization() { DataStoreProperty property = new DataStoreProperty("key", 15L); assertEquals("key", property.getKey()); assertEquals(15L, property.getValue()); byte[] serialized = PersistenceUtils.toBinary(property); property = (DataStoreProperty) PersistenceUtils.fromBinary(serialized); assertEquals("key", property.getKey()); assertEquals(15L, property.getValue()); property = new DataStoreProperty("key", "some value"); assertEquals("key", property.getKey()); assertEquals("some value", property.getValue()); serialized = PersistenceUtils.toBinary(property); property = (DataStoreProperty) PersistenceUtils.fromBinary(serialized); assertEquals("key", property.getKey()); assertEquals("some value", property.getValue()); // You should be able to store persistables as well property = new DataStoreProperty("key", new GlobalVisibilityHandler("a")); assertEquals("key", property.getKey()); assertTrue(property.getValue() instanceof GlobalVisibilityHandler); assertEquals( "a", ((GlobalVisibilityHandler) property.getValue()).getVisibility(null, null, null)); serialized = PersistenceUtils.toBinary(property); property = (DataStoreProperty) PersistenceUtils.fromBinary(serialized); assertEquals("key", property.getKey()); assertTrue(property.getValue() instanceof GlobalVisibilityHandler); assertEquals( "a", ((GlobalVisibilityHandler) property.getValue()).getVisibility(null, null, null)); } } ================================================ FILE: core/store/src/test/java/org/locationtech/geowave/core/store/TestStorePersistableRegistry.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store; import org.locationtech.geowave.core.index.persist.InternalPersistableRegistry; import org.locationtech.geowave.core.index.persist.PersistableRegistrySpi; import org.locationtech.geowave.core.store.adapter.AbstractDataTypeAdapterTest.TestTypeBasicDataAdapter; import org.locationtech.geowave.core.store.adapter.AbstractDataTypeAdapterTest.TestTypeBasicDataAdapterSeparateDataID; import org.locationtech.geowave.core.store.adapter.MockComponents.MockAbstractDataAdapter; import org.locationtech.geowave.core.store.adapter.MockComponents.MockIndexStrategy; import org.locationtech.geowave.core.store.adapter.MockComponents.TestDimensionField; import org.locationtech.geowave.core.store.adapter.MockComponents.TestIndexModel; import org.locationtech.geowave.core.store.query.BasicQueryByClassTest.ExampleDimensionOne; import org.locationtech.geowave.core.store.query.BasicQueryByClassTest.ExampleNumericIndexStrategy; public class TestStorePersistableRegistry implements PersistableRegistrySpi, InternalPersistableRegistry { @Override public PersistableIdAndConstructor[] getSupportedPersistables() { return new PersistableIdAndConstructor[] { new PersistableIdAndConstructor((short) 10200, MockAbstractDataAdapter::new), new PersistableIdAndConstructor((short) 10201, TestDimensionField::new), new PersistableIdAndConstructor((short) 10202, MockIndexStrategy::new), new PersistableIdAndConstructor((short) 10203, TestIndexModel::new), new PersistableIdAndConstructor((short) 10204, ExampleNumericIndexStrategy::new), new PersistableIdAndConstructor((short) 10205, ExampleDimensionOne::new), new PersistableIdAndConstructor((short) 10206, TestTypeBasicDataAdapter::new), new PersistableIdAndConstructor( (short) 10207, TestTypeBasicDataAdapterSeparateDataID::new)}; } } ================================================ FILE: core/store/src/test/java/org/locationtech/geowave/core/store/adapter/AbstractDataTypeAdapterTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import org.junit.Test; import org.locationtech.geowave.core.index.IndexDimensionHint; import org.locationtech.geowave.core.index.persist.PersistenceUtils; public class AbstractDataTypeAdapterTest { @Test public void testBasicDataTypeAdapter() { AbstractDataTypeAdapter adapter = new TestTypeBasicDataAdapter("myType"); assertEquals("myType", adapter.getTypeName()); assertEquals(TestType.class, adapter.getDataClass()); assertEquals(4, adapter.getFieldDescriptors().length); assertEquals("name", adapter.getFieldDescriptors()[0].fieldName()); assertEquals("doubleField", adapter.getFieldDescriptors()[1].fieldName()); assertEquals("intField", adapter.getFieldDescriptors()[2].fieldName()); assertEquals("boolField", adapter.getFieldDescriptors()[3].fieldName()); final byte[] adapterBytes = PersistenceUtils.toBinary(adapter); adapter = (AbstractDataTypeAdapter) PersistenceUtils.fromBinary(adapterBytes); assertEquals("myType", adapter.getTypeName()); assertEquals(TestType.class, adapter.getDataClass()); assertEquals(4, adapter.getFieldDescriptors().length); assertEquals("name", adapter.getFieldDescriptors()[0].fieldName()); assertEquals("doubleField", adapter.getFieldDescriptors()[1].fieldName()); assertEquals("intField", adapter.getFieldDescriptors()[2].fieldName()); assertEquals("boolField", adapter.getFieldDescriptors()[3].fieldName()); final TestType testEntry = new TestType("id1", 2.5, 8, true); assertEquals("id1", adapter.getFieldValue(testEntry, "name")); assertEquals(2.5, (double) adapter.getFieldValue(testEntry, "doubleField"), 0.001); assertEquals(8, adapter.getFieldValue(testEntry, "intField")); assertTrue((boolean) adapter.getFieldValue(testEntry, "boolField")); final TestType builtEntry = adapter.buildObject("id1", new Object[] {"id1", 2.5, 8, true}); assertEquals("id1", builtEntry.name); assertEquals(2.5, builtEntry.doubleField, 0.001); assertEquals((Integer) 8, builtEntry.intField); assertTrue(builtEntry.boolField); } @Test public void testBasicDataTypeAdapterSeparateDataId() { AbstractDataTypeAdapter adapter = new TestTypeBasicDataAdapterSeparateDataID("myType"); assertEquals("myType", adapter.getTypeName()); assertEquals(TestType.class, adapter.getDataClass()); assertEquals(3, adapter.getFieldDescriptors().length); assertEquals("name", adapter.getDataIDFieldDescriptor().fieldName()); assertEquals("doubleField", adapter.getFieldDescriptors()[0].fieldName()); assertEquals("intField", adapter.getFieldDescriptors()[1].fieldName()); assertEquals("boolField", adapter.getFieldDescriptors()[2].fieldName()); final byte[] adapterBytes = PersistenceUtils.toBinary(adapter); adapter = (AbstractDataTypeAdapter) PersistenceUtils.fromBinary(adapterBytes); assertEquals("myType", adapter.getTypeName()); assertEquals(TestType.class, adapter.getDataClass()); assertEquals(3, adapter.getFieldDescriptors().length); assertEquals("name", adapter.getDataIDFieldDescriptor().fieldName()); assertEquals("doubleField", adapter.getFieldDescriptors()[0].fieldName()); assertEquals("intField", adapter.getFieldDescriptors()[1].fieldName()); assertEquals("boolField", adapter.getFieldDescriptors()[2].fieldName()); final TestType testEntry = new TestType("id1", 2.5, 8, true); assertEquals("id1", adapter.getFieldValue(testEntry, "name")); assertEquals(2.5, (double) adapter.getFieldValue(testEntry, "doubleField"), 0.001); assertEquals(8, adapter.getFieldValue(testEntry, "intField")); assertTrue((boolean) adapter.getFieldValue(testEntry, "boolField")); final TestType builtEntry = adapter.buildObject("id1", new Object[] {2.5, 8, true}); assertEquals("id1", builtEntry.name); assertEquals(2.5, builtEntry.doubleField, 0.001); assertEquals((Integer) 8, builtEntry.intField); assertTrue(builtEntry.boolField); } public static class TestType { public String name; public Double doubleField; public Integer intField; public Boolean boolField; public TestType( final String name, final Double doubleField, final Integer intField, final Boolean boolField) { this.name = name; this.doubleField = doubleField; this.intField = intField; this.boolField = boolField; } } public static class TestTypeBasicDataAdapter extends AbstractDataTypeAdapter { static final FieldDescriptor[] fields = new FieldDescriptor[] { new FieldDescriptorBuilder<>(String.class).fieldName("name").build(), new FieldDescriptorBuilder<>(Double.class).fieldName("doubleField").build(), new FieldDescriptorBuilder<>(Integer.class).fieldName("intField").indexHint( new IndexDimensionHint("test")).build(), new FieldDescriptorBuilder<>(Boolean.class).fieldName("boolField").build()}; public TestTypeBasicDataAdapter() {} public TestTypeBasicDataAdapter(final String typeName) { super(typeName, fields, fields[0]); } @Override public Object getFieldValue(TestType entry, String fieldName) { switch (fieldName) { case "name": return entry.name; case "doubleField": return entry.doubleField; case "intField": return entry.intField; case "boolField": return entry.boolField; } return null; } @Override public TestType buildObject(final Object dataId, Object[] fieldValues) { return new TestType( (String) fieldValues[0], (Double) fieldValues[1], (Integer) fieldValues[2], (Boolean) fieldValues[3]); } } public static class TestTypeBasicDataAdapterSeparateDataID extends AbstractDataTypeAdapter { static final FieldDescriptor dataIDField = new FieldDescriptorBuilder<>(String.class).fieldName("name").build(); static final FieldDescriptor[] fields = new FieldDescriptor[] { new FieldDescriptorBuilder<>(Double.class).fieldName("doubleField").build(), new FieldDescriptorBuilder<>(Integer.class).fieldName("intField").indexHint( new IndexDimensionHint("test")).build(), new FieldDescriptorBuilder<>(Boolean.class).fieldName("boolField").build()}; public TestTypeBasicDataAdapterSeparateDataID() {} public TestTypeBasicDataAdapterSeparateDataID(final String typeName) { super(typeName, fields, dataIDField); } @Override public Object getFieldValue(TestType entry, String fieldName) { switch (fieldName) { case "name": return entry.name; case "doubleField": return entry.doubleField; case "intField": return entry.intField; case "boolField": return entry.boolField; } return null; } @Override public TestType buildObject(final Object dataId, Object[] fieldValues) { return new TestType( (String) dataId, (Double) fieldValues[0], (Integer) fieldValues[1], (Boolean) fieldValues[2]); } } } ================================================ FILE: core/store/src/test/java/org/locationtech/geowave/core/store/adapter/BasicDataTypeAdapterTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import org.junit.Test; import org.locationtech.geowave.core.index.IndexDimensionHint; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.adapter.annotation.GeoWaveField; import org.locationtech.geowave.core.store.adapter.annotation.GeoWaveDataType; public class BasicDataTypeAdapterTest { @Test public void testObjectBasedDataAdapter() { BasicDataTypeAdapter adapter = BasicDataTypeAdapter.newAdapter("myType", TestType.class, "name"); assertEquals("myType", adapter.getTypeName()); assertEquals(TestType.class, adapter.getDataClass()); assertEquals(4, adapter.getFieldDescriptors().length); assertNotNull(adapter.getFieldDescriptor("name")); assertTrue(String.class.isAssignableFrom(adapter.getFieldDescriptor("name").bindingClass())); assertNotNull(adapter.getFieldDescriptor("doubleField")); assertTrue( Double.class.isAssignableFrom(adapter.getFieldDescriptor("doubleField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("intField")); assertTrue( Integer.class.isAssignableFrom(adapter.getFieldDescriptor("intField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("boolField")); assertTrue( Boolean.class.isAssignableFrom(adapter.getFieldDescriptor("boolField").bindingClass())); final byte[] adapterBytes = PersistenceUtils.toBinary(adapter); adapter = (BasicDataTypeAdapter) PersistenceUtils.fromBinary(adapterBytes); assertEquals("myType", adapter.getTypeName()); assertEquals(TestType.class, adapter.getDataClass()); assertEquals(4, adapter.getFieldDescriptors().length); assertNotNull(adapter.getFieldDescriptor("name")); assertTrue(String.class.isAssignableFrom(adapter.getFieldDescriptor("name").bindingClass())); assertNotNull(adapter.getFieldDescriptor("doubleField")); assertTrue( Double.class.isAssignableFrom(adapter.getFieldDescriptor("doubleField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("intField")); assertTrue( Integer.class.isAssignableFrom(adapter.getFieldDescriptor("intField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("boolField")); assertTrue( Boolean.class.isAssignableFrom(adapter.getFieldDescriptor("boolField").bindingClass())); final TestType testEntry = new TestType("id1", 2.5, 8, true); assertEquals("id1", adapter.getFieldValue(testEntry, "name")); assertEquals(2.5, (double) adapter.getFieldValue(testEntry, "doubleField"), 0.001); assertEquals(8, adapter.getFieldValue(testEntry, "intField")); assertTrue((boolean) adapter.getFieldValue(testEntry, "boolField")); final Object[] fields = new Object[4]; for (int i = 0; i < fields.length; i++) { switch (adapter.getFieldDescriptors()[i].fieldName()) { case "name": fields[i] = "id1"; break; case "doubleField": fields[i] = 2.5; break; case "intField": fields[i] = 8; break; case "boolField": fields[i] = true; break; } } final TestType builtEntry = adapter.buildObject("id1", fields); assertEquals("id1", adapter.getFieldValue(builtEntry, "name")); assertEquals(2.5, (double) adapter.getFieldValue(builtEntry, "doubleField"), 0.001); assertEquals(8, adapter.getFieldValue(builtEntry, "intField")); assertTrue((boolean) adapter.getFieldValue(builtEntry, "boolField")); } @Test public void testInheritedObjectBasedDataAdapter() { BasicDataTypeAdapter adapter = BasicDataTypeAdapter.newAdapter("myType", InheritedTestType.class, "name"); assertEquals("myType", adapter.getTypeName()); assertEquals(InheritedTestType.class, adapter.getDataClass()); assertEquals(5, adapter.getFieldDescriptors().length); assertNotNull(adapter.getFieldDescriptor("name")); assertTrue(String.class.isAssignableFrom(adapter.getFieldDescriptor("name").bindingClass())); assertNotNull(adapter.getFieldDescriptor("doubleField")); assertTrue( Double.class.isAssignableFrom(adapter.getFieldDescriptor("doubleField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("intField")); assertTrue( Integer.class.isAssignableFrom(adapter.getFieldDescriptor("intField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("boolField")); assertTrue( Boolean.class.isAssignableFrom(adapter.getFieldDescriptor("boolField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("extraField")); assertTrue( String.class.isAssignableFrom(adapter.getFieldDescriptor("extraField").bindingClass())); final byte[] adapterBytes = PersistenceUtils.toBinary(adapter); adapter = (BasicDataTypeAdapter) PersistenceUtils.fromBinary(adapterBytes); assertEquals("myType", adapter.getTypeName()); assertEquals(InheritedTestType.class, adapter.getDataClass()); assertEquals(5, adapter.getFieldDescriptors().length); assertNotNull(adapter.getFieldDescriptor("name")); assertTrue(String.class.isAssignableFrom(adapter.getFieldDescriptor("name").bindingClass())); assertNotNull(adapter.getFieldDescriptor("doubleField")); assertTrue( Double.class.isAssignableFrom(adapter.getFieldDescriptor("doubleField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("intField")); assertTrue( Integer.class.isAssignableFrom(adapter.getFieldDescriptor("intField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("boolField")); assertTrue( Boolean.class.isAssignableFrom(adapter.getFieldDescriptor("boolField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("extraField")); assertTrue( String.class.isAssignableFrom(adapter.getFieldDescriptor("extraField").bindingClass())); final InheritedTestType testEntry = new InheritedTestType("id1", 2.5, 8, true, "extra"); assertEquals("id1", adapter.getFieldValue(testEntry, "name")); assertEquals(2.5, (double) adapter.getFieldValue(testEntry, "doubleField"), 0.001); assertEquals(8, adapter.getFieldValue(testEntry, "intField")); assertTrue((boolean) adapter.getFieldValue(testEntry, "boolField")); assertEquals("extra", adapter.getFieldValue(testEntry, "extraField")); final Object[] fields = new Object[5]; for (int i = 0; i < fields.length; i++) { switch (adapter.getFieldDescriptors()[i].fieldName()) { case "name": fields[i] = "id1"; break; case "doubleField": fields[i] = 2.5; break; case "intField": fields[i] = 8; break; case "boolField": fields[i] = true; break; case "extraField": fields[i] = "extra"; break; } } final InheritedTestType builtEntry = adapter.buildObject("id1", fields); assertEquals("id1", adapter.getFieldValue(builtEntry, "name")); assertEquals(2.5, (double) adapter.getFieldValue(builtEntry, "doubleField"), 0.001); assertEquals(8, adapter.getFieldValue(builtEntry, "intField")); assertTrue((boolean) adapter.getFieldValue(builtEntry, "boolField")); assertEquals("extra", adapter.getFieldValue(builtEntry, "extraField")); } @Test public void testAnnotatedObjectBasedDataAdapter() { BasicDataTypeAdapter adapter = BasicDataTypeAdapter.newAdapter("myType", AnnotatedTestType.class, "alternateName"); assertEquals("myType", adapter.getTypeName()); assertEquals(AnnotatedTestType.class, adapter.getDataClass()); assertEquals(4, adapter.getFieldDescriptors().length); assertNotNull(adapter.getFieldDescriptor("alternateName")); assertTrue( String.class.isAssignableFrom(adapter.getFieldDescriptor("alternateName").bindingClass())); assertTrue( adapter.getFieldDescriptor("alternateName").indexHints().contains( new IndexDimensionHint("a"))); assertNotNull(adapter.getFieldDescriptor("doubleField")); assertTrue( Double.class.isAssignableFrom(adapter.getFieldDescriptor("doubleField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("intField")); assertTrue( Integer.class.isAssignableFrom(adapter.getFieldDescriptor("intField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("boolField")); assertTrue( Boolean.class.isAssignableFrom(adapter.getFieldDescriptor("boolField").bindingClass())); assertTrue( adapter.getFieldDescriptor("boolField").indexHints().contains(new IndexDimensionHint("a"))); assertTrue( adapter.getFieldDescriptor("boolField").indexHints().contains(new IndexDimensionHint("b"))); assertTrue( adapter.getFieldDescriptor("boolField").indexHints().contains(new IndexDimensionHint("c"))); final byte[] adapterBytes = PersistenceUtils.toBinary(adapter); adapter = (BasicDataTypeAdapter) PersistenceUtils.fromBinary(adapterBytes); assertEquals("myType", adapter.getTypeName()); assertEquals(AnnotatedTestType.class, adapter.getDataClass()); assertEquals(4, adapter.getFieldDescriptors().length); assertNotNull(adapter.getFieldDescriptor("alternateName")); assertTrue( String.class.isAssignableFrom(adapter.getFieldDescriptor("alternateName").bindingClass())); assertTrue( adapter.getFieldDescriptor("alternateName").indexHints().contains( new IndexDimensionHint("a"))); assertNotNull(adapter.getFieldDescriptor("doubleField")); assertTrue( Double.class.isAssignableFrom(adapter.getFieldDescriptor("doubleField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("intField")); assertTrue( Integer.class.isAssignableFrom(adapter.getFieldDescriptor("intField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("boolField")); assertTrue( Boolean.class.isAssignableFrom(adapter.getFieldDescriptor("boolField").bindingClass())); assertTrue( adapter.getFieldDescriptor("boolField").indexHints().contains(new IndexDimensionHint("a"))); assertTrue( adapter.getFieldDescriptor("boolField").indexHints().contains(new IndexDimensionHint("b"))); assertTrue( adapter.getFieldDescriptor("boolField").indexHints().contains(new IndexDimensionHint("c"))); final AnnotatedTestType testEntry = new AnnotatedTestType("id1", 2.5, 8, true, "ignored"); assertEquals("id1", adapter.getFieldValue(testEntry, "alternateName")); assertEquals(2.5, (double) adapter.getFieldValue(testEntry, "doubleField"), 0.001); assertEquals(8, adapter.getFieldValue(testEntry, "intField")); assertTrue((boolean) adapter.getFieldValue(testEntry, "boolField")); final Object[] fields = new Object[4]; for (int i = 0; i < fields.length; i++) { switch (adapter.getFieldDescriptors()[i].fieldName()) { case "alternateName": fields[i] = "id1"; break; case "doubleField": fields[i] = 2.5; break; case "intField": fields[i] = 8; break; case "boolField": fields[i] = true; break; } } final AnnotatedTestType builtEntry = adapter.buildObject("id1", fields); assertEquals("id1", adapter.getFieldValue(builtEntry, "alternateName")); assertEquals(2.5, (double) adapter.getFieldValue(builtEntry, "doubleField"), 0.001); assertEquals(8, adapter.getFieldValue(builtEntry, "intField")); assertTrue((boolean) adapter.getFieldValue(builtEntry, "boolField")); assertNull(builtEntry.ignoredField); } @Test public void testInheritedAnnotatedObjectBasedDataAdapter() { BasicDataTypeAdapter adapter = BasicDataTypeAdapter.newAdapter( "myType", InheritedAnnotatedTestType.class, "alternateName"); assertEquals("myType", adapter.getTypeName()); assertEquals(InheritedAnnotatedTestType.class, adapter.getDataClass()); assertEquals(5, adapter.getFieldDescriptors().length); assertNotNull(adapter.getFieldDescriptor("alternateName")); assertTrue( String.class.isAssignableFrom(adapter.getFieldDescriptor("alternateName").bindingClass())); assertTrue( adapter.getFieldDescriptor("alternateName").indexHints().contains( new IndexDimensionHint("a"))); assertNotNull(adapter.getFieldDescriptor("doubleField")); assertTrue( Double.class.isAssignableFrom(adapter.getFieldDescriptor("doubleField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("intField")); assertTrue( Integer.class.isAssignableFrom(adapter.getFieldDescriptor("intField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("boolField")); assertTrue( Boolean.class.isAssignableFrom(adapter.getFieldDescriptor("boolField").bindingClass())); assertTrue( adapter.getFieldDescriptor("boolField").indexHints().contains(new IndexDimensionHint("a"))); assertTrue( adapter.getFieldDescriptor("boolField").indexHints().contains(new IndexDimensionHint("b"))); assertTrue( adapter.getFieldDescriptor("boolField").indexHints().contains(new IndexDimensionHint("c"))); assertNotNull(adapter.getFieldDescriptor("extraField")); assertTrue( Double.class.isAssignableFrom(adapter.getFieldDescriptor("extraField").bindingClass())); final byte[] adapterBytes = PersistenceUtils.toBinary(adapter); adapter = (BasicDataTypeAdapter) PersistenceUtils.fromBinary(adapterBytes); assertEquals("myType", adapter.getTypeName()); assertEquals(InheritedAnnotatedTestType.class, adapter.getDataClass()); assertEquals(5, adapter.getFieldDescriptors().length); assertNotNull(adapter.getFieldDescriptor("alternateName")); assertTrue( String.class.isAssignableFrom(adapter.getFieldDescriptor("alternateName").bindingClass())); assertTrue( adapter.getFieldDescriptor("alternateName").indexHints().contains( new IndexDimensionHint("a"))); assertNotNull(adapter.getFieldDescriptor("doubleField")); assertTrue( Double.class.isAssignableFrom(adapter.getFieldDescriptor("doubleField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("intField")); assertTrue( Integer.class.isAssignableFrom(adapter.getFieldDescriptor("intField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("boolField")); assertTrue( Boolean.class.isAssignableFrom(adapter.getFieldDescriptor("boolField").bindingClass())); assertTrue( adapter.getFieldDescriptor("boolField").indexHints().contains(new IndexDimensionHint("a"))); assertTrue( adapter.getFieldDescriptor("boolField").indexHints().contains(new IndexDimensionHint("b"))); assertTrue( adapter.getFieldDescriptor("boolField").indexHints().contains(new IndexDimensionHint("c"))); assertNotNull(adapter.getFieldDescriptor("extraField")); assertTrue( Double.class.isAssignableFrom(adapter.getFieldDescriptor("extraField").bindingClass())); final InheritedAnnotatedTestType testEntry = new InheritedAnnotatedTestType("id1", 2.5, 8, true, "ignored", 5.3); assertEquals("id1", adapter.getFieldValue(testEntry, "alternateName")); assertEquals(2.5, (double) adapter.getFieldValue(testEntry, "doubleField"), 0.001); assertEquals(8, adapter.getFieldValue(testEntry, "intField")); assertTrue((boolean) adapter.getFieldValue(testEntry, "boolField")); assertEquals(5.3, (double) adapter.getFieldValue(testEntry, "extraField"), 0.001); final Object[] fields = new Object[5]; for (int i = 0; i < fields.length; i++) { switch (adapter.getFieldDescriptors()[i].fieldName()) { case "alternateName": fields[i] = "id1"; break; case "doubleField": fields[i] = 2.5; break; case "intField": fields[i] = 8; break; case "boolField": fields[i] = true; break; case "extraField": fields[i] = 5.3; break; } } final InheritedAnnotatedTestType builtEntry = adapter.buildObject("id1", fields); assertEquals("id1", adapter.getFieldValue(builtEntry, "alternateName")); assertEquals(2.5, (double) adapter.getFieldValue(builtEntry, "doubleField"), 0.001); assertEquals(8, adapter.getFieldValue(builtEntry, "intField")); assertTrue((boolean) adapter.getFieldValue(builtEntry, "boolField")); assertEquals(5.3, (double) adapter.getFieldValue(builtEntry, "extraField"), 0.001); assertNull(builtEntry.ignoredField); } @Test public void testObjectBasedDataAdapterSeparateDataID() { BasicDataTypeAdapter adapter = BasicDataTypeAdapter.newAdapter("myType", TestType.class, "name", true); assertEquals("myType", adapter.getTypeName()); assertEquals(TestType.class, adapter.getDataClass()); assertEquals(3, adapter.getFieldDescriptors().length); assertNull(adapter.getFieldDescriptor("name")); assertNotNull(adapter.getDataIDFieldDescriptor()); assertTrue(String.class.isAssignableFrom(adapter.getDataIDFieldDescriptor().bindingClass())); assertNotNull(adapter.getFieldDescriptor("doubleField")); assertTrue( Double.class.isAssignableFrom(adapter.getFieldDescriptor("doubleField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("intField")); assertTrue( Integer.class.isAssignableFrom(adapter.getFieldDescriptor("intField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("boolField")); assertTrue( Boolean.class.isAssignableFrom(adapter.getFieldDescriptor("boolField").bindingClass())); final byte[] adapterBytes = PersistenceUtils.toBinary(adapter); adapter = (BasicDataTypeAdapter) PersistenceUtils.fromBinary(adapterBytes); assertEquals("myType", adapter.getTypeName()); assertEquals(TestType.class, adapter.getDataClass()); assertEquals(3, adapter.getFieldDescriptors().length); assertNull(adapter.getFieldDescriptor("name")); assertNotNull(adapter.getDataIDFieldDescriptor()); assertTrue(String.class.isAssignableFrom(adapter.getDataIDFieldDescriptor().bindingClass())); assertNotNull(adapter.getFieldDescriptor("doubleField")); assertTrue( Double.class.isAssignableFrom(adapter.getFieldDescriptor("doubleField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("intField")); assertTrue( Integer.class.isAssignableFrom(adapter.getFieldDescriptor("intField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("boolField")); assertTrue( Boolean.class.isAssignableFrom(adapter.getFieldDescriptor("boolField").bindingClass())); final TestType testEntry = new TestType("id1", 2.5, 8, true); assertEquals("id1", adapter.getFieldValue(testEntry, "name")); assertEquals(2.5, (double) adapter.getFieldValue(testEntry, "doubleField"), 0.001); assertEquals(8, adapter.getFieldValue(testEntry, "intField")); assertTrue((boolean) adapter.getFieldValue(testEntry, "boolField")); final Object[] fields = new Object[3]; for (int i = 0; i < fields.length; i++) { switch (adapter.getFieldDescriptors()[i].fieldName()) { case "doubleField": fields[i] = 2.5; break; case "intField": fields[i] = 8; break; case "boolField": fields[i] = true; break; } } final TestType builtEntry = adapter.buildObject("id1", fields); assertEquals("id1", adapter.getFieldValue(builtEntry, "name")); assertEquals(2.5, (double) adapter.getFieldValue(builtEntry, "doubleField"), 0.001); assertEquals(8, adapter.getFieldValue(builtEntry, "intField")); assertTrue((boolean) adapter.getFieldValue(builtEntry, "boolField")); } @Test public void testInheritedObjectBasedDataAdapterSeparateDataID() { BasicDataTypeAdapter adapter = BasicDataTypeAdapter.newAdapter("myType", InheritedTestType.class, "name", true); assertEquals("myType", adapter.getTypeName()); assertEquals(InheritedTestType.class, adapter.getDataClass()); assertEquals(4, adapter.getFieldDescriptors().length); assertNull(adapter.getFieldDescriptor("name")); assertNotNull(adapter.getDataIDFieldDescriptor()); assertTrue(String.class.isAssignableFrom(adapter.getDataIDFieldDescriptor().bindingClass())); assertNotNull(adapter.getFieldDescriptor("doubleField")); assertTrue( Double.class.isAssignableFrom(adapter.getFieldDescriptor("doubleField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("intField")); assertTrue( Integer.class.isAssignableFrom(adapter.getFieldDescriptor("intField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("boolField")); assertTrue( Boolean.class.isAssignableFrom(adapter.getFieldDescriptor("boolField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("extraField")); assertTrue( String.class.isAssignableFrom(adapter.getFieldDescriptor("extraField").bindingClass())); final byte[] adapterBytes = PersistenceUtils.toBinary(adapter); adapter = (BasicDataTypeAdapter) PersistenceUtils.fromBinary(adapterBytes); assertEquals("myType", adapter.getTypeName()); assertEquals(InheritedTestType.class, adapter.getDataClass()); assertEquals(4, adapter.getFieldDescriptors().length); assertNull(adapter.getFieldDescriptor("name")); assertNotNull(adapter.getDataIDFieldDescriptor()); assertTrue(String.class.isAssignableFrom(adapter.getDataIDFieldDescriptor().bindingClass())); assertNotNull(adapter.getFieldDescriptor("doubleField")); assertTrue( Double.class.isAssignableFrom(adapter.getFieldDescriptor("doubleField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("intField")); assertTrue( Integer.class.isAssignableFrom(adapter.getFieldDescriptor("intField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("boolField")); assertTrue( Boolean.class.isAssignableFrom(adapter.getFieldDescriptor("boolField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("extraField")); assertTrue( String.class.isAssignableFrom(adapter.getFieldDescriptor("extraField").bindingClass())); final InheritedTestType testEntry = new InheritedTestType("id1", 2.5, 8, true, "extra"); assertEquals("id1", adapter.getFieldValue(testEntry, "name")); assertEquals(2.5, (double) adapter.getFieldValue(testEntry, "doubleField"), 0.001); assertEquals(8, adapter.getFieldValue(testEntry, "intField")); assertTrue((boolean) adapter.getFieldValue(testEntry, "boolField")); assertEquals("extra", adapter.getFieldValue(testEntry, "extraField")); final Object[] fields = new Object[4]; for (int i = 0; i < fields.length; i++) { switch (adapter.getFieldDescriptors()[i].fieldName()) { case "doubleField": fields[i] = 2.5; break; case "intField": fields[i] = 8; break; case "boolField": fields[i] = true; break; case "extraField": fields[i] = "extra"; break; } } final InheritedTestType builtEntry = adapter.buildObject("id1", fields); assertEquals("id1", adapter.getFieldValue(builtEntry, "name")); assertEquals(2.5, (double) adapter.getFieldValue(builtEntry, "doubleField"), 0.001); assertEquals(8, adapter.getFieldValue(builtEntry, "intField")); assertTrue((boolean) adapter.getFieldValue(builtEntry, "boolField")); assertEquals("extra", adapter.getFieldValue(builtEntry, "extraField")); } @Test public void testAnnotatedObjectBasedDataAdapterSeparateDataID() { BasicDataTypeAdapter adapter = BasicDataTypeAdapter.newAdapter("myType", AnnotatedTestType.class, "alternateName", true); assertEquals("myType", adapter.getTypeName()); assertEquals(AnnotatedTestType.class, adapter.getDataClass()); assertEquals(3, adapter.getFieldDescriptors().length); assertNull(adapter.getFieldDescriptor("alternateName")); assertNotNull(adapter.getDataIDFieldDescriptor()); assertTrue(String.class.isAssignableFrom(adapter.getDataIDFieldDescriptor().bindingClass())); assertTrue( adapter.getDataIDFieldDescriptor().indexHints().contains(new IndexDimensionHint("a"))); assertNotNull(adapter.getFieldDescriptor("doubleField")); assertTrue( Double.class.isAssignableFrom(adapter.getFieldDescriptor("doubleField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("intField")); assertTrue( Integer.class.isAssignableFrom(adapter.getFieldDescriptor("intField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("boolField")); assertTrue( Boolean.class.isAssignableFrom(adapter.getFieldDescriptor("boolField").bindingClass())); assertTrue( adapter.getFieldDescriptor("boolField").indexHints().contains(new IndexDimensionHint("a"))); assertTrue( adapter.getFieldDescriptor("boolField").indexHints().contains(new IndexDimensionHint("b"))); assertTrue( adapter.getFieldDescriptor("boolField").indexHints().contains(new IndexDimensionHint("c"))); final byte[] adapterBytes = PersistenceUtils.toBinary(adapter); adapter = (BasicDataTypeAdapter) PersistenceUtils.fromBinary(adapterBytes); assertEquals("myType", adapter.getTypeName()); assertEquals(AnnotatedTestType.class, adapter.getDataClass()); assertEquals(3, adapter.getFieldDescriptors().length); assertNull(adapter.getFieldDescriptor("alternateName")); assertNotNull(adapter.getDataIDFieldDescriptor()); assertTrue(String.class.isAssignableFrom(adapter.getDataIDFieldDescriptor().bindingClass())); assertTrue( adapter.getDataIDFieldDescriptor().indexHints().contains(new IndexDimensionHint("a"))); assertNotNull(adapter.getFieldDescriptor("doubleField")); assertTrue( Double.class.isAssignableFrom(adapter.getFieldDescriptor("doubleField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("intField")); assertTrue( Integer.class.isAssignableFrom(adapter.getFieldDescriptor("intField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("boolField")); assertTrue( Boolean.class.isAssignableFrom(adapter.getFieldDescriptor("boolField").bindingClass())); assertTrue( adapter.getFieldDescriptor("boolField").indexHints().contains(new IndexDimensionHint("a"))); assertTrue( adapter.getFieldDescriptor("boolField").indexHints().contains(new IndexDimensionHint("b"))); assertTrue( adapter.getFieldDescriptor("boolField").indexHints().contains(new IndexDimensionHint("c"))); final AnnotatedTestType testEntry = new AnnotatedTestType("id1", 2.5, 8, true, "ignored"); assertEquals("id1", adapter.getFieldValue(testEntry, "alternateName")); assertEquals(2.5, (double) adapter.getFieldValue(testEntry, "doubleField"), 0.001); assertEquals(8, adapter.getFieldValue(testEntry, "intField")); assertTrue((boolean) adapter.getFieldValue(testEntry, "boolField")); final Object[] fields = new Object[3]; for (int i = 0; i < fields.length; i++) { switch (adapter.getFieldDescriptors()[i].fieldName()) { case "doubleField": fields[i] = 2.5; break; case "intField": fields[i] = 8; break; case "boolField": fields[i] = true; break; } } final AnnotatedTestType builtEntry = adapter.buildObject("id1", fields); assertEquals("id1", adapter.getFieldValue(builtEntry, "alternateName")); assertEquals(2.5, (double) adapter.getFieldValue(builtEntry, "doubleField"), 0.001); assertEquals(8, adapter.getFieldValue(builtEntry, "intField")); assertTrue((boolean) adapter.getFieldValue(builtEntry, "boolField")); assertNull(builtEntry.ignoredField); } @Test public void testInheritedAnnotatedObjectBasedDataAdapterSeparateDataID() { BasicDataTypeAdapter adapter = BasicDataTypeAdapter.newAdapter( "myType", InheritedAnnotatedTestType.class, "alternateName", true); assertEquals("myType", adapter.getTypeName()); assertEquals(InheritedAnnotatedTestType.class, adapter.getDataClass()); assertEquals(4, adapter.getFieldDescriptors().length); assertNull(adapter.getFieldDescriptor("alternateName")); assertNotNull(adapter.getDataIDFieldDescriptor()); assertTrue(String.class.isAssignableFrom(adapter.getDataIDFieldDescriptor().bindingClass())); assertTrue( adapter.getDataIDFieldDescriptor().indexHints().contains(new IndexDimensionHint("a"))); assertNotNull(adapter.getFieldDescriptor("doubleField")); assertTrue( Double.class.isAssignableFrom(adapter.getFieldDescriptor("doubleField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("intField")); assertTrue( Integer.class.isAssignableFrom(adapter.getFieldDescriptor("intField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("boolField")); assertTrue( Boolean.class.isAssignableFrom(adapter.getFieldDescriptor("boolField").bindingClass())); assertTrue( adapter.getFieldDescriptor("boolField").indexHints().contains(new IndexDimensionHint("a"))); assertTrue( adapter.getFieldDescriptor("boolField").indexHints().contains(new IndexDimensionHint("b"))); assertTrue( adapter.getFieldDescriptor("boolField").indexHints().contains(new IndexDimensionHint("c"))); assertNotNull(adapter.getFieldDescriptor("extraField")); assertTrue( Double.class.isAssignableFrom(adapter.getFieldDescriptor("extraField").bindingClass())); final byte[] adapterBytes = PersistenceUtils.toBinary(adapter); adapter = (BasicDataTypeAdapter) PersistenceUtils.fromBinary(adapterBytes); assertEquals("myType", adapter.getTypeName()); assertEquals(InheritedAnnotatedTestType.class, adapter.getDataClass()); assertEquals(4, adapter.getFieldDescriptors().length); assertNull(adapter.getFieldDescriptor("alternateName")); assertNotNull(adapter.getDataIDFieldDescriptor()); assertTrue(String.class.isAssignableFrom(adapter.getDataIDFieldDescriptor().bindingClass())); assertTrue( adapter.getDataIDFieldDescriptor().indexHints().contains(new IndexDimensionHint("a"))); assertNotNull(adapter.getFieldDescriptor("doubleField")); assertTrue( Double.class.isAssignableFrom(adapter.getFieldDescriptor("doubleField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("intField")); assertTrue( Integer.class.isAssignableFrom(adapter.getFieldDescriptor("intField").bindingClass())); assertNotNull(adapter.getFieldDescriptor("boolField")); assertTrue( Boolean.class.isAssignableFrom(adapter.getFieldDescriptor("boolField").bindingClass())); assertTrue( adapter.getFieldDescriptor("boolField").indexHints().contains(new IndexDimensionHint("a"))); assertTrue( adapter.getFieldDescriptor("boolField").indexHints().contains(new IndexDimensionHint("b"))); assertTrue( adapter.getFieldDescriptor("boolField").indexHints().contains(new IndexDimensionHint("c"))); assertNotNull(adapter.getFieldDescriptor("extraField")); assertTrue( Double.class.isAssignableFrom(adapter.getFieldDescriptor("extraField").bindingClass())); final InheritedAnnotatedTestType testEntry = new InheritedAnnotatedTestType("id1", 2.5, 8, true, "ignored", 5.3); assertEquals("id1", adapter.getFieldValue(testEntry, "alternateName")); assertEquals(2.5, (double) adapter.getFieldValue(testEntry, "doubleField"), 0.001); assertEquals(8, adapter.getFieldValue(testEntry, "intField")); assertTrue((boolean) adapter.getFieldValue(testEntry, "boolField")); assertEquals(5.3, (double) adapter.getFieldValue(testEntry, "extraField"), 0.001); final Object[] fields = new Object[4]; for (int i = 0; i < fields.length; i++) { switch (adapter.getFieldDescriptors()[i].fieldName()) { case "doubleField": fields[i] = 2.5; break; case "intField": fields[i] = 8; break; case "boolField": fields[i] = true; break; case "extraField": fields[i] = 5.3; break; } } final InheritedAnnotatedTestType builtEntry = adapter.buildObject("id1", fields); assertEquals("id1", adapter.getFieldValue(builtEntry, "alternateName")); assertEquals(2.5, (double) adapter.getFieldValue(builtEntry, "doubleField"), 0.001); assertEquals(8, adapter.getFieldValue(builtEntry, "intField")); assertTrue((boolean) adapter.getFieldValue(builtEntry, "boolField")); assertEquals(5.3, (double) adapter.getFieldValue(builtEntry, "extraField"), 0.001); assertNull(builtEntry.ignoredField); } @Test public void testSingleFieldDataAdapterSeparateDataID() { BasicDataTypeAdapter adapter = BasicDataTypeAdapter.newAdapter("myType", SingleFieldTestType.class, "name", true); assertEquals("myType", adapter.getTypeName()); assertEquals(SingleFieldTestType.class, adapter.getDataClass()); assertEquals(0, adapter.getFieldDescriptors().length); assertNull(adapter.getFieldDescriptor("name")); assertNotNull(adapter.getDataIDFieldDescriptor()); assertTrue(String.class.isAssignableFrom(adapter.getDataIDFieldDescriptor().bindingClass())); final byte[] adapterBytes = PersistenceUtils.toBinary(adapter); adapter = (BasicDataTypeAdapter) PersistenceUtils.fromBinary(adapterBytes); assertEquals("myType", adapter.getTypeName()); assertEquals(SingleFieldTestType.class, adapter.getDataClass()); assertEquals(0, adapter.getFieldDescriptors().length); assertNull(adapter.getFieldDescriptor("name")); assertNotNull(adapter.getDataIDFieldDescriptor()); assertTrue(String.class.isAssignableFrom(adapter.getDataIDFieldDescriptor().bindingClass())); final SingleFieldTestType testEntry = new SingleFieldTestType("id1"); assertEquals("id1", adapter.getFieldValue(testEntry, "name")); final SingleFieldTestType builtEntry = adapter.buildObject("id1", new Object[0]); assertEquals("id1", adapter.getFieldValue(builtEntry, "name")); } public static class TestType { private String name; private double doubleField; public int intField; public boolean boolField; protected TestType() {} public TestType( final String name, final Double doubleField, final Integer intField, final Boolean boolField) { this.name = name; this.doubleField = doubleField; this.intField = intField; this.boolField = boolField; } public void setName(final String name) { this.name = name; } public String getName() { return name; } public void setDoubleField(final double doubleField) { this.doubleField = doubleField; } public double getDoubleField() { return doubleField; } } public static class InheritedTestType extends TestType { public String extraField; public InheritedTestType() { super(); } public InheritedTestType( final String name, final Double doubleField, final Integer intField, final Boolean boolField, final String extraField) { super(name, doubleField, intField, boolField); this.extraField = extraField; } } @GeoWaveDataType public static class AnnotatedTestType { @GeoWaveField(name = "alternateName", indexHints = "a") private String name; @GeoWaveField() private double doubleField; @GeoWaveField() private int intField; @GeoWaveField(indexHints = {"a", "b", "c"}) private boolean boolField; protected String ignoredField; protected AnnotatedTestType() {} public AnnotatedTestType( final String name, final double doubleField, final int intField, final boolean boolField, final String ignoredField) { this.name = name; this.doubleField = doubleField; this.intField = intField; this.boolField = boolField; this.ignoredField = ignoredField; } } @GeoWaveDataType public static class InheritedAnnotatedTestType extends AnnotatedTestType { @GeoWaveField() private Double extraField; protected InheritedAnnotatedTestType() { super(); } public InheritedAnnotatedTestType( final String name, final Double doubleField, final Integer intField, final Boolean boolField, final String ignoredField, final Double extraField) { super(name, doubleField, intField, boolField, ignoredField); this.extraField = extraField; } } public static class SingleFieldTestType { private String name; protected SingleFieldTestType() {} public SingleFieldTestType(final String name) { this.name = name; } public void setName(final String name) { this.name = name; } public String getName() { return name; } } } ================================================ FILE: core/store/src/test/java/org/locationtech/geowave/core/store/adapter/FieldDescriptorTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import org.junit.Test; import org.locationtech.geowave.core.index.IndexDimensionHint; import org.locationtech.geowave.core.index.persist.PersistenceUtils; public class FieldDescriptorTest { @Test public void testFieldDescriptor() { final FieldDescriptor testDescriptor = new FieldDescriptorBuilder<>(String.class).fieldName("testFieldName").indexHint( new IndexDimensionHint("testDimensionHint")).build(); assertEquals("testFieldName", testDescriptor.fieldName()); assertEquals(String.class, testDescriptor.bindingClass()); assertEquals(1, testDescriptor.indexHints().size()); assertTrue(testDescriptor.indexHints().contains(new IndexDimensionHint("testDimensionHint"))); final byte[] fieldDescriptorBytes = PersistenceUtils.toBinary(testDescriptor); final FieldDescriptor deserialized = (FieldDescriptor) PersistenceUtils.fromBinary(fieldDescriptorBytes); assertEquals("testFieldName", deserialized.fieldName()); assertEquals(String.class, deserialized.bindingClass()); assertEquals(1, deserialized.indexHints().size()); assertTrue(deserialized.indexHints().contains(new IndexDimensionHint("testDimensionHint"))); } } ================================================ FILE: core/store/src/test/java/org/locationtech/geowave/core/store/adapter/IndexFieldMapperTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; import static org.junit.Assert.assertEquals; import java.util.Collections; import java.util.Map; import org.junit.Test; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.index.NoOpIndexFieldMapper; import com.google.common.collect.Lists; public class IndexFieldMapperTest { @Test public void testNoOpIndexFieldMapper() { final NoOpIndexFieldMapper mapper = new NoOpIndexFieldMapper<>(Integer.class); FieldDescriptor testField = new FieldDescriptorBuilder<>(Integer.class).fieldName("testField").build(); mapper.init("testIndexField", Lists.newArrayList(testField), null); assertEquals("testIndexField", mapper.indexFieldName()); assertEquals(Integer.class, mapper.indexFieldType()); assertEquals(Integer.class, mapper.adapterFieldType()); assertEquals("testField", mapper.getAdapterFields()[0]); assertEquals(1, mapper.adapterFieldCount()); final MapRowBuilder rowBuilder = new MapRowBuilder(); mapper.toAdapter(42, rowBuilder); Map row = rowBuilder.buildRow(null); assertEquals(1, row.size()); assertEquals((int) 42, (int) row.get("testField")); assertEquals((int) 43, (int) mapper.toIndex(Collections.singletonList(43))); final byte[] mapperBytes = PersistenceUtils.toBinary(mapper); final NoOpIndexFieldMapper deserialized = (NoOpIndexFieldMapper) PersistenceUtils.fromBinary(mapperBytes); assertEquals("testIndexField", deserialized.indexFieldName()); assertEquals(Integer.class, deserialized.indexFieldType()); assertEquals(Integer.class, deserialized.adapterFieldType()); assertEquals("testField", deserialized.getAdapterFields()[0]); assertEquals(1, deserialized.adapterFieldCount()); deserialized.toAdapter(42, rowBuilder); row = rowBuilder.buildRow(null); assertEquals(1, row.size()); assertEquals((int) 42, (int) row.get("testField")); assertEquals((int) 43, (int) deserialized.toIndex(Collections.singletonList(43))); } } ================================================ FILE: core/store/src/test/java/org/locationtech/geowave/core/store/adapter/MockComponents.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import org.locationtech.geowave.core.index.Coordinate; import org.locationtech.geowave.core.index.CoordinateRange; import org.locationtech.geowave.core.index.IndexDimensionHint; import org.locationtech.geowave.core.index.IndexMetaData; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.MultiDimensionalCoordinateRanges; import org.locationtech.geowave.core.index.MultiDimensionalCoordinates; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.index.QueryRanges; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.dimension.bin.BinRange; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.index.numeric.NumericValue; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.IndexFieldMapper; import org.locationtech.geowave.core.store.api.RowBuilder; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldUtils; import org.locationtech.geowave.core.store.data.field.FieldWriter; import org.locationtech.geowave.core.store.dimension.NumericDimensionField; import org.locationtech.geowave.core.store.index.CommonIndexModel; import org.locationtech.geowave.core.store.statistics.DefaultStatisticsProvider; import org.locationtech.geowave.core.store.statistics.adapter.CountStatistic; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.common.primitives.Bytes; public class MockComponents { // Mock class instantiating abstract class so we can test logic // contained in abstract class. public static class MockAbstractDataAdapter implements DefaultStatisticsProvider, DataTypeAdapter { private String id = ID; public MockAbstractDataAdapter() { this(ID); } public MockAbstractDataAdapter(final String id) { super(); this.id = id; // final List> handlers = // new ArrayList<>(); // handlers.add(new TestIndexFieldHandler()); // super.init(handlers, null); } public static final String INTEGER = "TestInteger"; public static final String ID = "TestIntegerAdapter"; private static final FieldDescriptor[] FIELDS = new FieldDescriptor[] { new FieldDescriptorBuilder<>(Integer.class).indexHint( TestDimensionField.TEST_DIMENSION_HINT).fieldName(INTEGER).build(), new FieldDescriptorBuilder<>(String.class).fieldName(ID).build()}; @Override public String getTypeName() { return id; } @Override public byte[] getDataId(final Integer entry) { return StringUtils.stringToBinary("DataID" + entry.toString()); } @SuppressWarnings({"unchecked", "rawtypes"}) @Override public FieldReader getReader(final String fieldId) { if (fieldId.equals(INTEGER)) { return FieldUtils.getDefaultReaderForClass(Integer.class); } else if (fieldId.equals(ID)) { return FieldUtils.getDefaultReaderForClass(String.class); } return null; } @SuppressWarnings({"unchecked", "rawtypes"}) @Override public FieldWriter getWriter(final String fieldId) { if (fieldId.equals(INTEGER)) { return FieldUtils.getDefaultWriterForClass(Integer.class); } else if (fieldId.equals(ID)) { return FieldUtils.getDefaultWriterForClass(String.class); } return null; } @Override public boolean equals(final Object o) { if (this == o) { return true; } if ((o == null) || (getClass() != o.getClass())) { return false; } final MockAbstractDataAdapter that = (MockAbstractDataAdapter) o; return Objects.equals(id, that.id); } @Override public int hashCode() { return Objects.hash(id); } @Override public byte[] toBinary() { final byte[] idBinary = StringUtils.stringToBinary(id); return Bytes.concat(ByteBuffer.allocate(4).putInt(idBinary.length).array(), idBinary); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final byte[] idBinary = new byte[buf.getInt()]; buf.get(idBinary); id = StringUtils.stringFromBinary(idBinary); } @Override public RowBuilder newRowBuilder(final FieldDescriptor[] outputFieldDescriptors) { return new RowBuilder() { @SuppressWarnings("unused") private String myid; private Integer intValue; @Override public void setField(final String id, final Object fieldValue) { if (id.equals(INTEGER)) { intValue = (Integer) fieldValue; } else if (id.equals(ID)) { myid = (String) fieldValue; } } @Override public void setFields(final Map values) { if (values.containsKey(INTEGER)) { intValue = (Integer) values.get(INTEGER); } if (values.containsKey(ID)) { myid = (String) values.get(ID); } } @Override public Integer buildRow(final byte[] dataId) { return new Integer(intValue); } }; } @Override public Class getDataClass() { return Integer.class; } @Override public List>> getDefaultStatistics() { final List>> statistics = Lists.newArrayList(); final CountStatistic count = new CountStatistic(getTypeName()); count.setInternal(); statistics.add(count); return statistics; } @Override public Object getFieldValue(final Integer entry, final String fieldName) { switch (fieldName) { case INTEGER: return entry; case ID: return entry.toString(); default: break; } return null; } @Override public FieldDescriptor[] getFieldDescriptors() { return FIELDS; } @Override public FieldDescriptor getFieldDescriptor(final String fieldName) { switch (fieldName) { case INTEGER: return FIELDS[0]; case ID: return FIELDS[1]; default: break; } return null; } } // class MockAbstractDataAdapter // ************************************************************************* // // Test index field type for dimension. // // ************************************************************************* public static class TestIndexFieldType { private final Integer indexValue; public TestIndexFieldType(final Integer _indexValue) { indexValue = _indexValue; } } public static class TestIndexFieldTypeMapper extends IndexFieldMapper { @Override public TestIndexFieldType toIndex(List nativeFieldValues) { return new TestIndexFieldType(nativeFieldValues.get(0)); } @Override public void toAdapter(TestIndexFieldType indexFieldValue, RowBuilder rowBuilder) { rowBuilder.setField(adapterFields[0], indexFieldValue.indexValue); } @Override public Class indexFieldType() { return TestIndexFieldType.class; } @Override public Class adapterFieldType() { return Integer.class; } @Override public short adapterFieldCount() { return 1; } } // ************************************************************************* // // Test implementation on interface DimensionField for use by // TestIndexModel. // // ************************************************************************* public static class TestDimensionField implements NumericDimensionField { final String fieldName; public static String FIELD = "TestDimensionField1"; public static IndexDimensionHint TEST_DIMENSION_HINT = new IndexDimensionHint("TEST_DIMENSION"); public TestDimensionField() { fieldName = FIELD; } @Override public double normalize(final double value) { return 0; } @Override public BinRange[] getNormalizedRanges(final NumericData range) { return null; } @Override public byte[] toBinary() { return new byte[0]; } @Override public void fromBinary(final byte[] bytes) {} @Override public NumericData getNumericData(final TestIndexFieldType dataElement) { return new NumericValue(dataElement.indexValue); } @Override public String getFieldName() { return fieldName; } @Override public FieldWriter getWriter() { return new IntegerWriter(); } @Override public FieldReader getReader() { return new IntegerReader(); } @Override public NumericDimensionDefinition getBaseDefinition() { return new TestDimensionField(); } @Override public boolean isCompatibleWith(final Class clazz) { return TestIndexFieldType.class.isAssignableFrom(clazz); } @Override public double getRange() { return 0; } @Override public double denormalize(final double value) { return 0; } @Override public NumericRange getDenormalizedRange(final BinRange range) { return null; } @Override public int getFixedBinIdSize() { return 0; } @Override public NumericRange getBounds() { return null; } @Override public NumericData getFullRange() { return null; } @Override public Class getFieldClass() { return TestIndexFieldType.class; } @Override public Set getDimensionHints() { return Sets.newHashSet(TEST_DIMENSION_HINT); } } public static class MockIndexStrategy implements NumericIndexStrategy { @Override public byte[] toBinary() { return new byte[] {}; } @Override public void fromBinary(final byte[] bytes) {} @Override public QueryRanges getQueryRanges( final MultiDimensionalNumericData indexedRange, final IndexMetaData... hints) { return getQueryRanges(indexedRange, -1, hints); } @Override public QueryRanges getQueryRanges( final MultiDimensionalNumericData indexedRange, final int maxEstimatedRangeDecomposition, final IndexMetaData... hints) { return new QueryRanges(); } @Override public InsertionIds getInsertionIds(final MultiDimensionalNumericData indexedData) { final List ids = new ArrayList<>(); for (final NumericData data : indexedData.getDataPerDimension()) { ids.add(Double.toString(data.getCentroid()).getBytes()); } return new InsertionIds(ids); } @Override public InsertionIds getInsertionIds( final MultiDimensionalNumericData indexedData, final int maxEstimatedDuplicateIds) { return this.getInsertionIds(indexedData); } @Override public NumericDimensionDefinition[] getOrderedDimensionDefinitions() { return null; } @Override public String getId() { return "Test"; } @Override public double[] getHighestPrecisionIdRangePerDimension() { return new double[] {Integer.MAX_VALUE}; } @Override public List createMetaData() { return Collections.emptyList(); } @Override public MultiDimensionalCoordinateRanges[] getCoordinateRangesPerDimension( final MultiDimensionalNumericData dataRange, final IndexMetaData... hints) { final CoordinateRange[][] coordinateRangesPerDimension = new CoordinateRange[dataRange.getDimensionCount()][]; for (int d = 0; d < coordinateRangesPerDimension.length; d++) { coordinateRangesPerDimension[d] = new CoordinateRange[1]; coordinateRangesPerDimension[d][0] = new CoordinateRange( dataRange.getMinValuesPerDimension()[0].longValue(), dataRange.getMaxValuesPerDimension()[0].longValue(), new byte[] {}); } return new MultiDimensionalCoordinateRanges[] { new MultiDimensionalCoordinateRanges(new byte[] {}, coordinateRangesPerDimension)}; } @Override public MultiDimensionalNumericData getRangeForId( final byte[] partitionKey, final byte[] sortKey) { return null; } @Override public byte[][] getInsertionPartitionKeys(final MultiDimensionalNumericData insertionData) { return null; } @Override public byte[][] getQueryPartitionKeys( final MultiDimensionalNumericData queryData, final IndexMetaData... hints) { // TODO Auto-generated method stub return null; } @Override public MultiDimensionalCoordinates getCoordinatesPerDimension( final byte[] partitionKey, final byte[] sortKey) { return new MultiDimensionalCoordinates( new byte[] {}, new Coordinate[] { new Coordinate((long) Double.parseDouble(new String(sortKey)), new byte[] {})}); } @Override public int getPartitionKeyLength() { return 0; } } // ************************************************************************* // // Test index model class for use in testing encoding by // AbstractDataAdapter. // // ************************************************************************* public static class TestIndexModel implements CommonIndexModel { private final TestDimensionField[] dimensionFields; private String id = "testmodel"; public TestIndexModel() { dimensionFields = new TestDimensionField[1]; dimensionFields[0] = new TestDimensionField(); } public TestIndexModel(final String id) { dimensionFields = new TestDimensionField[1]; dimensionFields[0] = new TestDimensionField(); this.id = id; } @Override public FieldReader getReader(final String fieldName) { final FieldReader reader = dimensionFields[0].getReader(); return (FieldReader) reader; } @Override public FieldWriter getWriter(final String fieldName) { final FieldWriter writer = dimensionFields[0].getWriter(); return (FieldWriter) writer; } @Override public byte[] toBinary() { return new byte[] {}; } @Override public void fromBinary(final byte[] bytes) {} @Override public TestDimensionField[] getDimensions() { return dimensionFields; } @Override public String getId() { return id; } } public static class IntegerReader implements FieldReader { @Override public TestIndexFieldType readField(final byte[] fieldData) { return new TestIndexFieldType(Integer.parseInt(new String(fieldData))); } } public static class IntegerWriter implements FieldWriter { @Override public byte[] writeField(final TestIndexFieldType fieldValue) { return Integer.toString(fieldValue.indexValue).getBytes(); } } } ================================================ FILE: core/store/src/test/java/org/locationtech/geowave/core/store/adapter/MockRegisteredIndexFieldMappers.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter; import org.locationtech.geowave.core.store.index.IndexFieldMapperRegistrySPI; public class MockRegisteredIndexFieldMappers implements IndexFieldMapperRegistrySPI { @Override public RegisteredFieldMapper[] getRegisteredFieldMappers() { return new RegisteredFieldMapper[] { new RegisteredFieldMapper(MockComponents.TestIndexFieldTypeMapper::new, (short) 10250)}; } } ================================================ FILE: core/store/src/test/java/org/locationtech/geowave/core/store/adapter/statistics/histogram/ByteUtilsTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter.statistics.histogram; import static org.junit.Assert.assertTrue; import java.util.Arrays; import org.junit.Test; public class ByteUtilsTest { @Test public void test() { final double oneTwo = ByteUtils.toDouble("12".getBytes()); final double oneOneTwo = ByteUtils.toDouble("112".getBytes()); final double oneThree = ByteUtils.toDouble("13".getBytes()); final double oneOneThree = ByteUtils.toDouble("113".getBytes()); assertTrue(oneTwo > oneOneTwo); assertTrue(oneThree > oneTwo); assertTrue(oneOneTwo < oneOneThree); assertTrue( Arrays.equals(ByteUtils.toPaddedBytes("113".getBytes()), ByteUtils.toBytes(oneOneThree))); final double min = ByteUtils.toDouble(new byte[] {(byte) 0x00}); final double mid = ByteUtils.toDouble(new byte[] {(byte) 0x8F}); final double max = ByteUtils.toDouble(new byte[] {(byte) 0xFF}); assertTrue(min < mid); assertTrue(mid < max); Double last = null; for (int i = 0; i < 256; i++) { final double current = ByteUtils.toDouble( new byte[] { (byte) i, (byte) i, (byte) i, (byte) i, (byte) i, (byte) i, (byte) i, (byte) i}); if (last != null) { assertTrue(current > last); } last = current; } } } ================================================ FILE: core/store/src/test/java/org/locationtech/geowave/core/store/adapter/statistics/histogram/NumericHistogramTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.adapter.statistics.histogram; import static org.junit.Assert.assertEquals; import java.util.Random; import org.junit.Test; public class NumericHistogramTest { Random r = new Random(347); MinimalBinDistanceHistogram stats = new MinimalBinDistanceHistogram(); FixedBinNumericHistogram stats2 = new FixedBinNumericHistogram(); @Test public void testIngest() { for (long i = 0; i < 10000; i++) { final double v = 2500 + (r.nextDouble() * 99998.0); stats.add(v); stats2.add(v); } assertEquals(0, stats.cdf(2500), 0.001); assertEquals(1.0, stats.cdf(102500), 0.001); assertEquals(0.5, stats.cdf(52500), 0.02); assertEquals(0, stats2.cdf(2500), 0.001); assertEquals(1.0, stats2.cdf(102500), 0.001); assertEquals(0.5, stats2.cdf(52500), 0.02); assertEquals(27, stats.quantile(0.25) / 1000.0, 0.1); assertEquals(52, stats.quantile(0.5) / 1000.0, 0.3); assertEquals(78, stats.quantile(0.75) / 1000.0, 0.3); assertEquals(55, stats2.quantile(0.5) / 1000.0, 1.0); assertEquals(81, stats2.quantile(0.75) / 1000.0, 0.1); } } ================================================ FILE: core/store/src/test/java/org/locationtech/geowave/core/store/api/DataStoreAddTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.api; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.locationtech.geowave.core.store.adapter.MockComponents.MockAbstractDataAdapter; import org.locationtech.geowave.core.store.index.NullIndex; import org.locationtech.geowave.core.store.memory.MemoryRequiredOptions; public class DataStoreAddTest { private DataStore dataStore; private final String MOCK_DATA_TYPE_1 = "Some Data Type"; private final String MOCK_DATA_TYPE_2 = "Another Data Type"; @Before public void createStore() { dataStore = DataStoreFactory.createDataStore(new MemoryRequiredOptions()); } @After public void tearDown() { dataStore.deleteAll(); } @Test public void addIndex_Basic() { final NullIndex index1 = new NullIndex("index1"); final NullIndex index2 = new NullIndex("index2"); final MockAbstractDataAdapter adapter = new MockAbstractDataAdapter(MOCK_DATA_TYPE_1); dataStore.addType(adapter, index1); dataStore.addIndex(MOCK_DATA_TYPE_1, index2); assertEquals(2, dataStore.getIndices(MOCK_DATA_TYPE_1).length); } @Test public void addIndex_MultiIndexAdd() { final NullIndex index1 = new NullIndex("index1"); final NullIndex index2 = new NullIndex("index2"); final NullIndex index3 = new NullIndex("index3"); final MockAbstractDataAdapter adapter = new MockAbstractDataAdapter(MOCK_DATA_TYPE_1); dataStore.addType(adapter, index1); dataStore.addIndex(MOCK_DATA_TYPE_1, index2, index3); assertEquals(3, dataStore.getIndices(MOCK_DATA_TYPE_1).length); } @Test public void addIndex_SameIndexVarArgs() { final NullIndex index1 = new NullIndex("index1"); final NullIndex index2 = new NullIndex("index2"); final MockAbstractDataAdapter adapter = new MockAbstractDataAdapter(MOCK_DATA_TYPE_1); dataStore.addType(adapter, index1); dataStore.addIndex(MOCK_DATA_TYPE_1, index2, index2, index2); assertEquals(2, dataStore.getIndices(MOCK_DATA_TYPE_1).length); } @Test public void addIndex_IndexAlreadyAdded() { final NullIndex index1 = new NullIndex("index1"); final MockAbstractDataAdapter adapter = new MockAbstractDataAdapter(MOCK_DATA_TYPE_1); dataStore.addType(adapter, index1); dataStore.addIndex(MOCK_DATA_TYPE_1, index1); assertEquals(1, dataStore.getIndices(MOCK_DATA_TYPE_1).length); } @Test public void addType_Basic() { final NullIndex index = new NullIndex("myIndex"); final MockAbstractDataAdapter adapter = new MockAbstractDataAdapter(MOCK_DATA_TYPE_1); dataStore.addType(adapter, index); final DataTypeAdapter[] registeredTypes = dataStore.getTypes(); assertEquals(1, registeredTypes.length); assertTrue(registeredTypes[0] instanceof MockAbstractDataAdapter); } @Test public void addType_MultiIndex() { final NullIndex index1 = new NullIndex("index1"); final NullIndex index2 = new NullIndex("index2"); final NullIndex index3 = new NullIndex("index3"); final MockAbstractDataAdapter adapter = new MockAbstractDataAdapter(MOCK_DATA_TYPE_1); dataStore.addType(adapter, index1, index2, index3); final DataTypeAdapter[] registeredTypes = dataStore.getTypes(); assertEquals(1, registeredTypes.length); assertTrue(registeredTypes[0] instanceof MockAbstractDataAdapter); assertEquals(3, dataStore.getIndices(MOCK_DATA_TYPE_1).length); } @Test public void addType_SameIndexVarArgs() { final NullIndex index1 = new NullIndex("index1"); final MockAbstractDataAdapter adapter = new MockAbstractDataAdapter(MOCK_DATA_TYPE_1); dataStore.addType(adapter, index1, index1, index1); final DataTypeAdapter[] registeredTypes = dataStore.getTypes(); assertEquals(1, registeredTypes.length); assertTrue(registeredTypes[0] instanceof MockAbstractDataAdapter); assertEquals(1, dataStore.getIndices(MOCK_DATA_TYPE_1).length); } @Test public void addType_MultiIndexAndMultiTypeSameAdapter() { final NullIndex mockType1Index1 = new NullIndex("mock1index1"); final NullIndex mockType1Index2 = new NullIndex("mock1index2"); final NullIndex mockType1Index3 = new NullIndex("mock1index3"); final MockAbstractDataAdapter adapter1 = new MockAbstractDataAdapter(MOCK_DATA_TYPE_1); dataStore.addType(adapter1, mockType1Index1, mockType1Index2, mockType1Index3); final NullIndex mockType2Index1 = new NullIndex("mock2index1"); final NullIndex mockType2Index2 = new NullIndex("mock2index2"); final MockAbstractDataAdapter adapter2 = new MockAbstractDataAdapter(MOCK_DATA_TYPE_2); dataStore.addType(adapter2, mockType2Index1, mockType2Index2); final DataTypeAdapter[] registeredTypes = dataStore.getTypes(); assertEquals(2, registeredTypes.length); assertTrue(registeredTypes[0] instanceof MockAbstractDataAdapter); assertTrue(registeredTypes[1] instanceof MockAbstractDataAdapter); assertEquals(3, dataStore.getIndices(MOCK_DATA_TYPE_1).length); assertEquals(2, dataStore.getIndices(MOCK_DATA_TYPE_2).length); } @Test public void createWriter_NonNullForSeenType() { final NullIndex index = new NullIndex("myIndex"); final MockAbstractDataAdapter adapter = new MockAbstractDataAdapter(MOCK_DATA_TYPE_1); dataStore.addType(adapter, index); final Writer writer = dataStore.createWriter(MOCK_DATA_TYPE_1); assertNotNull(writer); } @Test public void createWriter_SeenTypeWriteNoError() { final NullIndex index = new NullIndex("myIndex"); final MockAbstractDataAdapter adapter = new MockAbstractDataAdapter(MOCK_DATA_TYPE_1); dataStore.addType(adapter, index); final Writer writer = dataStore.createWriter(MOCK_DATA_TYPE_1); writer.write(15); writer.write(0); writer.close(); } @Test public void createWriter_NullForUnseenType() { final Writer writer = dataStore.createWriter(MOCK_DATA_TYPE_1); assertNull(writer); } @Test public void createWriter_NullForUnseenType2() { final NullIndex index = new NullIndex("myIndex"); final MockAbstractDataAdapter adapter = new MockAbstractDataAdapter(MOCK_DATA_TYPE_1); dataStore.addType(adapter, index); final Writer writer = dataStore.createWriter(MOCK_DATA_TYPE_2); assertNull(writer); } } ================================================ FILE: core/store/src/test/java/org/locationtech/geowave/core/store/api/DataStoreRemoveTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.api; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import org.locationtech.geowave.core.store.adapter.MockComponents; import org.locationtech.geowave.core.store.index.IndexImpl; import org.locationtech.geowave.core.store.memory.MemoryRequiredOptions; public class DataStoreRemoveTest { private static int counter = 0; private static final String MOCK_DATA_TYPE_1 = "Some Data Type"; private static final String MOCK_DATA_TYPE_2 = "Another Data Type"; private DataStore dataStore; @Before public void createStore() { dataStore = DataStoreFactory.createDataStore(new MemoryRequiredOptions()); final Index index = new IndexImpl( new MockComponents.MockIndexStrategy(), new MockComponents.TestIndexModel("test1")); final DataTypeAdapter adapter = new MockComponents.MockAbstractDataAdapter(MOCK_DATA_TYPE_1); dataStore.addType(adapter, index); counter++; } @After public void tearDown() { dataStore.deleteAll(); } @Test public void testRemoveType() { // given final DataTypeAdapter adapter2 = new MockComponents.MockAbstractDataAdapter(MOCK_DATA_TYPE_2); final Index index2 = new IndexImpl( new MockComponents.MockIndexStrategy(), new MockComponents.TestIndexModel("test2")); dataStore.addType(adapter2, index2); // when dataStore.removeType(adapter2.getTypeName()); // then Assert.assertEquals(1, dataStore.getTypes().length); Assert.assertEquals(MOCK_DATA_TYPE_1, dataStore.getTypes()[0].getTypeName()); } @Test public void testRemoveInvalidType() { // given // when dataStore.removeType("Adapter 2"); // then Assert.assertEquals(1, dataStore.getTypes().length); } /* * Untestable code: baseOperations.deleteAll(indexName, typeName, adapterId); just returns false * and does not actually delete anything. src: MemoryDataStoreOperations#deleteAll(table, type, * adapter, args) */ @Ignore @Test public void testDelete() { // given // when dataStore.delete(QueryBuilder.newBuilder().addTypeName(MOCK_DATA_TYPE_1).build()); // then Assert.assertEquals(0, dataStore.getTypes().length); } @Test public void testDeleteAll() { // given // when dataStore.deleteAll(); // then Assert.assertEquals(0, dataStore.getTypes().length); Assert.assertEquals(0, dataStore.getIndices().length); } @Test public void testRemoveIndexSingle() { // given final Index index2 = new IndexImpl( new MockComponents.MockIndexStrategy(), new MockComponents.TestIndexModel("test2")); final Index index3 = new IndexImpl( new MockComponents.MockIndexStrategy(), new MockComponents.TestIndexModel("test3")); final DataTypeAdapter adapter2 = new MockComponents.MockAbstractDataAdapter(MOCK_DATA_TYPE_2); dataStore.addType(adapter2, index2, index3); dataStore.addIndex(MOCK_DATA_TYPE_1, index2); // when dataStore.removeIndex(index2.getName()); // then Assert.assertEquals(1, dataStore.getIndices(MOCK_DATA_TYPE_1).length); Assert.assertEquals(1, dataStore.getIndices(MOCK_DATA_TYPE_2).length); } @Test(expected = IllegalStateException.class) public void testRemoveIndexSingleFinal() { // given // when dataStore.removeIndex("Test_test1"); // then throw new AssertionError("Last index should thrown an IllegalStateException"); } @Test public void testRemoveIndexSingleInvalid() { // given // when dataStore.removeIndex("Test_test2"); // then Assert.assertEquals(1, dataStore.getIndices(MOCK_DATA_TYPE_1).length); } /* * Untestable code: baseOperations.deleteAll(indexName, typeName, adapterId); just returns false * and does not actually delete anything. src: BaseDataStore#removeIndex(type, index) -> * MemoryDataStoreOperations#deleteAll(table, type, adapter, args) Also has the error that it * tries to delete from all adapters. Not just targeted one. */ @Ignore @Test public void testRemoveIndexDouble() { // given final DataTypeAdapter adapter2 = new MockComponents.MockAbstractDataAdapter(MOCK_DATA_TYPE_2); final Index index2 = new IndexImpl( new MockComponents.MockIndexStrategy(), new MockComponents.TestIndexModel("test2")); dataStore.addIndex(MOCK_DATA_TYPE_1, index2); dataStore.addType(adapter2, index2); // when dataStore.removeIndex(MOCK_DATA_TYPE_1, index2.getName()); // then Assert.assertEquals(1, dataStore.getIndices(MOCK_DATA_TYPE_1).length); Assert.assertEquals(1, dataStore.getIndices(MOCK_DATA_TYPE_2).length); } @Test(expected = IllegalStateException.class) public void testRemoveIndexDoubleFinal() { // given // when dataStore.removeIndex(MOCK_DATA_TYPE_1, "Test_test1"); // then throw new AssertionError(); } } ================================================ FILE: core/store/src/test/java/org/locationtech/geowave/core/store/data/field/BasicReaderWriterTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.field; import java.math.BigDecimal; import java.math.BigInteger; import java.util.Arrays; import org.junit.Assert; import org.junit.Before; import org.junit.Test; public class BasicReaderWriterTest { private Boolean booleanExpected; private Boolean[] booleanArrayExpected; private boolean[] primBooleanArrayExpected; private Boolean booleanNullExpected; private Byte byteExpected; private Short shortExpected; private Short[] shortArrayExpected; private short[] primShortArrayExpected; private Float floatExpected; private Float[] floatArrayExpected; private float[] primFloatArrayExpected; private Double doubleExpected; private Double[] doubleArrayExpected; private double[] primDoubleArrayExpected; private BigDecimal bigDecimalExpected; private Integer integerExpected; private Integer[] intArrayExpected; private int[] primIntArrayExpected; private Long longExpected; private Long[] longArrayExpected; private long[] primLongArrayExpected; private BigInteger bigIntegerExpected; private String stringExpected; private String[] stringArrayExpected; private Byte[] byteArrayExpected; private byte[] primByteArrayExpected; private byte[] defaultNullExpected; public static void main(final String[] args) { final BasicReaderWriterTest tester = new BasicReaderWriterTest(); tester.init(); tester.testBasicReadWrite(); } @Before public void init() { booleanExpected = Boolean.TRUE; booleanArrayExpected = new Boolean[] {Boolean.TRUE, null, Boolean.FALSE, null}; primBooleanArrayExpected = new boolean[] {Boolean.TRUE, Boolean.FALSE}; booleanNullExpected = Boolean.FALSE; byteExpected = Byte.MIN_VALUE; shortExpected = Short.MIN_VALUE; shortArrayExpected = new Short[] {Short.MIN_VALUE, null, Short.MAX_VALUE, null}; primShortArrayExpected = new short[] {Short.MIN_VALUE, Short.MAX_VALUE}; floatExpected = Float.MIN_VALUE; floatArrayExpected = new Float[] {null, Float.MIN_VALUE, null, Float.MAX_VALUE}; primFloatArrayExpected = new float[] {Float.MIN_VALUE, Float.MAX_VALUE}; doubleExpected = Double.MIN_VALUE; doubleArrayExpected = new Double[] {Double.MIN_VALUE, null, Double.MAX_VALUE, null}; primDoubleArrayExpected = new double[] {Double.MIN_VALUE, Double.MAX_VALUE}; bigDecimalExpected = BigDecimal.TEN; integerExpected = Integer.MIN_VALUE; intArrayExpected = new Integer[] {null, Integer.MIN_VALUE, null, Integer.MAX_VALUE}; primIntArrayExpected = new int[] {Integer.MIN_VALUE, Integer.MAX_VALUE}; longExpected = Long.MIN_VALUE; longArrayExpected = new Long[] {Long.MIN_VALUE, null, Long.MAX_VALUE, null}; primLongArrayExpected = new long[] {Long.MIN_VALUE, Long.MAX_VALUE}; bigIntegerExpected = BigInteger.valueOf(Long.MAX_VALUE); stringExpected = this.getClass().getName(); stringArrayExpected = new String[] {null, this.getClass().getName(), null, String.class.getName()}; byteArrayExpected = new Byte[] {Byte.MIN_VALUE, Byte.valueOf((byte) 55), Byte.MAX_VALUE}; primByteArrayExpected = new byte[] {Byte.MIN_VALUE, (byte) 33, Byte.MAX_VALUE}; defaultNullExpected = new byte[] {}; } @Test public void testBasicReadWrite() { byte[] value; // test Boolean reader/writer value = FieldUtils.getDefaultWriterForClass(Boolean.class).writeField(booleanExpected); final Boolean booleanActual = FieldUtils.getDefaultReaderForClass(Boolean.class).readField(value); Assert.assertEquals( "FAILED test of Boolean reader/writer", booleanExpected.booleanValue(), booleanActual.booleanValue()); // test Boolean Array reader/writer value = FieldUtils.getDefaultWriterForClass(Boolean[].class).writeField(booleanArrayExpected); final Boolean[] booleanArrayActual = FieldUtils.getDefaultReaderForClass(Boolean[].class).readField(value); Assert.assertTrue( "FAILED test of Boolean Array reader/writer", Arrays.deepEquals(booleanArrayExpected, booleanArrayActual)); // test boolean Array reader/writer value = FieldUtils.getDefaultWriterForClass(boolean[].class).writeField(primBooleanArrayExpected); final boolean[] primBooleanArrayActual = FieldUtils.getDefaultReaderForClass(boolean[].class).readField(value); Assert.assertTrue( "FAILED test of boolean Array reader/writer", Arrays.equals(primBooleanArrayExpected, primBooleanArrayActual)); // test Byte reader/writer value = FieldUtils.getDefaultWriterForClass(Byte.class).writeField(byteExpected); final Byte byteActual = FieldUtils.getDefaultReaderForClass(Byte.class).readField(value); Assert.assertEquals( "FAILED test of Byte reader/writer", byteExpected.byteValue(), byteActual.byteValue()); // test Short reader/writer value = FieldUtils.getDefaultWriterForClass(Short.class).writeField(shortExpected); final Short shortActual = FieldUtils.getDefaultReaderForClass(Short.class).readField(value); Assert.assertEquals( "FAILED test of Short reader/writer", shortExpected.shortValue(), shortActual.shortValue()); // test Short Array reader/writer value = FieldUtils.getDefaultWriterForClass(Short[].class).writeField(shortArrayExpected); final Short[] shortArrayActual = FieldUtils.getDefaultReaderForClass(Short[].class).readField(value); Assert.assertTrue( "FAILED test of Short Array reader/writer", Arrays.deepEquals(shortArrayExpected, shortArrayActual)); // test short Array reader/writer value = FieldUtils.getDefaultWriterForClass(short[].class).writeField(primShortArrayExpected); final short[] primShortArrayActual = FieldUtils.getDefaultReaderForClass(short[].class).readField(value); Assert.assertTrue( "FAILED test of short Array reader/writer", Arrays.equals(primShortArrayExpected, primShortArrayActual)); // test Float reader/writer value = FieldUtils.getDefaultWriterForClass(Float.class).writeField(floatExpected); final Float floatActual = FieldUtils.getDefaultReaderForClass(Float.class).readField(value); Assert.assertEquals("FAILED test of Float reader/writer", floatExpected, floatActual); // test Float Array reader/writer value = FieldUtils.getDefaultWriterForClass(Float[].class).writeField(floatArrayExpected); final Float[] floatArrayActual = FieldUtils.getDefaultReaderForClass(Float[].class).readField(value); Assert.assertTrue( "FAILED test of Float Array reader/writer", Arrays.deepEquals(floatArrayExpected, floatArrayActual)); // test float Array reader/writer value = FieldUtils.getDefaultWriterForClass(float[].class).writeField(primFloatArrayExpected); final float[] primFloatArrayActual = FieldUtils.getDefaultReaderForClass(float[].class).readField(value); Assert.assertTrue( "FAILED test of float Array reader/writer", Arrays.equals(primFloatArrayExpected, primFloatArrayActual)); // test Double reader/writer value = FieldUtils.getDefaultWriterForClass(Double.class).writeField(doubleExpected); final Double doubleActual = FieldUtils.getDefaultReaderForClass(Double.class).readField(value); Assert.assertEquals("FAILED test of Double reader/writer", doubleExpected, doubleActual); // test Double Array reader/writer value = FieldUtils.getDefaultWriterForClass(Double[].class).writeField(doubleArrayExpected); final Double[] doubleArrayActual = FieldUtils.getDefaultReaderForClass(Double[].class).readField(value); Assert.assertTrue( "FAILED test of Double Array reader/writer", Arrays.deepEquals(doubleArrayExpected, doubleArrayActual)); // test double Array reader/writer value = FieldUtils.getDefaultWriterForClass(double[].class).writeField(primDoubleArrayExpected); final double[] primDoubleArrayActual = FieldUtils.getDefaultReaderForClass(double[].class).readField(value); Assert.assertTrue( "FAILED test of double Array reader/writer", Arrays.equals(primDoubleArrayExpected, primDoubleArrayActual)); // test BigDecimal reader/writer value = FieldUtils.getDefaultWriterForClass(BigDecimal.class).writeField(bigDecimalExpected); final BigDecimal bigDecimalActual = FieldUtils.getDefaultReaderForClass(BigDecimal.class).readField(value); Assert.assertEquals( "FAILED test of BigDecimal reader/writer", bigDecimalExpected, bigDecimalActual); // test Integer reader/writer value = FieldUtils.getDefaultWriterForClass(Integer.class).writeField(integerExpected); final Integer integerActual = FieldUtils.getDefaultReaderForClass(Integer.class).readField(value); Assert.assertEquals("FAILED test of Integer reader/writer", integerExpected, integerActual); // test Integer Array reader/writer value = FieldUtils.getDefaultWriterForClass(Integer[].class).writeField(intArrayExpected); final Integer[] intArrayActual = FieldUtils.getDefaultReaderForClass(Integer[].class).readField(value); Assert.assertTrue( "FAILED test of Integer Array reader/writer", Arrays.deepEquals(intArrayExpected, intArrayActual)); // test int Array reader/writer value = FieldUtils.getDefaultWriterForClass(int[].class).writeField(primIntArrayExpected); final int[] primIntArrayActual = FieldUtils.getDefaultReaderForClass(int[].class).readField(value); Assert.assertTrue( "FAILED test of int Array reader/writer", Arrays.equals(primIntArrayExpected, primIntArrayActual)); // test Long reader/writer value = FieldUtils.getDefaultWriterForClass(Long.class).writeField(longExpected); final Long longActual = FieldUtils.getDefaultReaderForClass(Long.class).readField(value); Assert.assertEquals("FAILED test of Long reader/writer", longExpected, longActual); // test Long Array reader/writer value = FieldUtils.getDefaultWriterForClass(Long[].class).writeField(longArrayExpected); final Long[] longArrayActual = FieldUtils.getDefaultReaderForClass(Long[].class).readField(value); Assert.assertTrue( "FAILED test of Long Array reader/writer", Arrays.deepEquals(longArrayExpected, longArrayActual)); // test long Array reader/writer value = FieldUtils.getDefaultWriterForClass(long[].class).writeField(primLongArrayExpected); final long[] primLongArrayActual = FieldUtils.getDefaultReaderForClass(long[].class).readField(value); Assert.assertTrue( "FAILED test of long Array reader/writer", Arrays.equals(primLongArrayExpected, primLongArrayActual)); // test BigInteger reader/writer value = FieldUtils.getDefaultWriterForClass(BigInteger.class).writeField(bigIntegerExpected); final BigInteger bigIntegerActual = FieldUtils.getDefaultReaderForClass(BigInteger.class).readField(value); Assert.assertEquals( "FAILED test of BigInteger reader/writer", bigIntegerExpected, bigIntegerActual); // test String reader/writer value = FieldUtils.getDefaultWriterForClass(String.class).writeField(stringExpected); final String stringActual = FieldUtils.getDefaultReaderForClass(String.class).readField(value); Assert.assertEquals("FAILED test of String reader/writer", stringExpected, stringActual); // test String Array reader/writer value = FieldUtils.getDefaultWriterForClass(String[].class).writeField(stringArrayExpected); final String[] stringArrayActual = FieldUtils.getDefaultReaderForClass(String[].class).readField(value); Assert.assertTrue( "FAILED test of String Array reader/writer", Arrays.deepEquals(stringArrayExpected, stringArrayActual)); // test Byte [] reader/writer value = FieldUtils.getDefaultWriterForClass(Byte[].class).writeField(byteArrayExpected); final Byte[] byteArrayActual = FieldUtils.getDefaultReaderForClass(Byte[].class).readField(value); Assert.assertTrue( "FAILED test of Byte [] reader/writer", Arrays.deepEquals(byteArrayExpected, byteArrayActual)); // test byte [] reader/writer value = FieldUtils.getDefaultWriterForClass(byte[].class).writeField(primByteArrayExpected); final byte[] primByteArrayActual = FieldUtils.getDefaultReaderForClass(byte[].class).readField(value); Assert.assertTrue( "FAILED test of byte [] reader/writer", Arrays.equals(primByteArrayExpected, primByteArrayActual)); } @Test public void testNullReadWrite() { byte[] value; // test Boolean reader/writer value = FieldUtils.getDefaultWriterForClass(Boolean.class).writeField(null); final Boolean booleanNullActual = FieldUtils.getDefaultReaderForClass(Boolean.class).readField(value); Assert.assertEquals( "FAILED null test of Boolean field writer/reader", booleanNullExpected.booleanValue(), booleanNullActual.booleanValue()); // test Byte reader/writer value = FieldUtils.getDefaultWriterForClass(Byte.class).writeField(null); Assert.assertEquals( "FAILED null test of Byte field writer", defaultNullExpected.length, value.length); final Byte byteActual = FieldUtils.getDefaultReaderForClass(Byte.class).readField(value); Assert.assertEquals("FAILED null test of Byte field reader", null, byteActual); // test Short reader/writer value = FieldUtils.getDefaultWriterForClass(Short.class).writeField(null); Assert.assertEquals( "FAILED null test of Short writer", defaultNullExpected.length, value.length); final Short shortActual = FieldUtils.getDefaultReaderForClass(Short.class).readField(value); Assert.assertEquals("FAILED null test of Short reader", null, shortActual); // test Short Array reader/writer value = FieldUtils.getDefaultWriterForClass(Short[].class).writeField(null); Assert.assertEquals( "FAILED null test of Short Array writer", defaultNullExpected.length, value.length); final Short[] shortArrayActual = FieldUtils.getDefaultReaderForClass(Short[].class).readField(value); Assert.assertTrue( "FAILED test of Short Array reader", Arrays.deepEquals(null, shortArrayActual)); // test short Array reader/writer value = FieldUtils.getDefaultWriterForClass(short[].class).writeField(null); Assert.assertEquals( "FAILED null test of short Array writer", defaultNullExpected.length, value.length); final short[] primShortArrayActual = FieldUtils.getDefaultReaderForClass(short[].class).readField(value); Assert.assertTrue( "FAILED null test of short Array reader", Arrays.equals(null, primShortArrayActual)); // test Float reader/writer value = FieldUtils.getDefaultWriterForClass(Float.class).writeField(null); Assert.assertEquals( "FAILED null test of Float writer", defaultNullExpected.length, value.length); final Float floatActual = FieldUtils.getDefaultReaderForClass(Float.class).readField(value); Assert.assertEquals("FAILED null test of Float Array reader", null, floatActual); // test Float Array reader/writer value = FieldUtils.getDefaultWriterForClass(Float[].class).writeField(null); Assert.assertEquals( "FAILED null test of Float Array writer", defaultNullExpected.length, value.length); final Float[] floatArrayActual = FieldUtils.getDefaultReaderForClass(Float[].class).readField(value); Assert.assertTrue( "FAILED null test of Float Array reader", Arrays.deepEquals(null, floatArrayActual)); // // test float Array reader/writer value = FieldUtils.getDefaultWriterForClass(float[].class).writeField(null); Assert.assertEquals( "FAILED null test of float Array writer", defaultNullExpected.length, value.length); final float[] primFloatArrayActual = FieldUtils.getDefaultReaderForClass(float[].class).readField(value); Assert.assertTrue( "FAILED null test of float Array reader/writer", Arrays.equals(null, primFloatArrayActual)); // test Double reader/writer value = FieldUtils.getDefaultWriterForClass(Double.class).writeField(null); Assert.assertEquals( "FAILED null test of Double writer", defaultNullExpected.length, value.length); final Double doubleActual = FieldUtils.getDefaultReaderForClass(Double.class).readField(value); Assert.assertEquals("FAILED null test of Double reader", null, doubleActual); // test Double Array reader/writer value = FieldUtils.getDefaultWriterForClass(Double[].class).writeField(null); Assert.assertEquals( "FAILED null test of Double Array writer", defaultNullExpected.length, value.length); final Double[] doubleArrayActual = FieldUtils.getDefaultReaderForClass(Double[].class).readField(value); Assert.assertTrue( "FAILED null test of Double Array reader", Arrays.deepEquals(null, doubleArrayActual)); // test double Array reader/writer value = FieldUtils.getDefaultWriterForClass(double[].class).writeField(null); Assert.assertEquals( "FAILED null test of double Array writer", defaultNullExpected.length, value.length); final double[] primDoubleArrayActual = FieldUtils.getDefaultReaderForClass(double[].class).readField(value); Assert.assertTrue( "FAILED null test of double Array reader", Arrays.equals(null, primDoubleArrayActual)); // test BigDecimal reader/writer value = FieldUtils.getDefaultWriterForClass(BigDecimal.class).writeField(null); Assert.assertEquals( "FAILED null test of BigDecimal writer", defaultNullExpected.length, value.length); final BigDecimal bigDecimalActual = FieldUtils.getDefaultReaderForClass(BigDecimal.class).readField(value); Assert.assertEquals("FAILED null test of BigDecimal reader", null, bigDecimalActual); // test Integer reader/writer value = FieldUtils.getDefaultWriterForClass(Integer.class).writeField(null); Assert.assertEquals( "FAILED null test of Integer writer", defaultNullExpected.length, value.length); final Integer integerActual = FieldUtils.getDefaultReaderForClass(Integer.class).readField(value); Assert.assertEquals("FAILED test of Integer reader", null, integerActual); // test Integer Array reader/writer value = FieldUtils.getDefaultWriterForClass(Integer[].class).writeField(null); Assert.assertEquals( "FAILED null test of Integer Array writer", defaultNullExpected.length, value.length); final Integer[] intArrayActual = FieldUtils.getDefaultReaderForClass(Integer[].class).readField(value); Assert.assertTrue( "FAILED null test of Integer Array reader", Arrays.deepEquals(null, intArrayActual)); // test int Array reader/writer value = FieldUtils.getDefaultWriterForClass(int[].class).writeField(null); Assert.assertEquals( "FAILED null test of int Array writer", defaultNullExpected.length, value.length); final int[] primIntArrayActual = FieldUtils.getDefaultReaderForClass(int[].class).readField(value); Assert.assertTrue( "FAILED null test of int Array reader", Arrays.equals(null, primIntArrayActual)); // test Long reader/writer value = FieldUtils.getDefaultWriterForClass(Long.class).writeField(null); Assert.assertEquals( "FAILED null test of Long writer", defaultNullExpected.length, value.length); final Long longActual = FieldUtils.getDefaultReaderForClass(Long.class).readField(value); Assert.assertEquals("FAILED test of Long reader", null, longActual); // test Long Array reader/writer value = FieldUtils.getDefaultWriterForClass(Long[].class).writeField(null); Assert.assertEquals( "FAILED null test of Long Array writer", defaultNullExpected.length, value.length); final Long[] longArrayActual = FieldUtils.getDefaultReaderForClass(Long[].class).readField(value); Assert.assertTrue("FAILED test of Long Array reader", Arrays.deepEquals(null, longArrayActual)); // test long Array reader/writer value = FieldUtils.getDefaultWriterForClass(long[].class).writeField(null); Assert.assertEquals( "FAILED null test of long Array writer", defaultNullExpected.length, value.length); final long[] primLongArrayActual = FieldUtils.getDefaultReaderForClass(long[].class).readField(value); Assert.assertTrue( "FAILED null test of long Array reader/writer", Arrays.equals(null, primLongArrayActual)); // test BigInteger reader/writer value = FieldUtils.getDefaultWriterForClass(BigInteger.class).writeField(null); Assert.assertEquals( "FAILED null test of BigInteger writer", defaultNullExpected.length, value.length); final BigInteger bigIntegerActual = FieldUtils.getDefaultReaderForClass(BigInteger.class).readField(value); Assert.assertEquals("FAILED null test of BigInteger reader", null, bigIntegerActual); // test String Array reader/writer value = FieldUtils.getDefaultWriterForClass(String[].class).writeField(null); Assert.assertEquals( "FAILED null test of String Array writer", defaultNullExpected.length, value.length); final String[] stringArrayActual = FieldUtils.getDefaultReaderForClass(String[].class).readField(value); Assert.assertTrue( "FAILED null test of String Array reader/writer", Arrays.deepEquals(null, stringArrayActual)); // test Byte [] reader/writer value = FieldUtils.getDefaultWriterForClass(Byte[].class).writeField(null); Assert.assertEquals( "FAILED null test of Byte [] writer", defaultNullExpected.length, value.length); final Byte[] byteArrayActual = FieldUtils.getDefaultReaderForClass(Byte[].class).readField(value); Assert.assertTrue( "FAILED null test of Byte [] reader", Arrays.deepEquals(null, byteArrayActual)); // test byte [] reader/writer value = FieldUtils.getDefaultWriterForClass(byte[].class).writeField(null); Assert.assertEquals( "FAILED null test of byte [] writer", defaultNullExpected.length, value.length); final byte[] primByteArrayActual = FieldUtils.getDefaultReaderForClass(byte[].class).readField(value); Assert.assertTrue( "FAILED null test of byte [] reader/writer", Arrays.equals(null, primByteArrayActual)); } } ================================================ FILE: core/store/src/test/java/org/locationtech/geowave/core/store/data/visibility/JsonFieldLevelVisibilityHandlerTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.visibility; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import java.util.Map; import org.junit.Before; import org.junit.Test; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.RowBuilder; import org.locationtech.geowave.core.store.api.VisibilityHandler; import com.beust.jcommander.internal.Maps; public class JsonFieldLevelVisibilityHandlerTest { DataTypeAdapter> adapter; Object[] defaults; Map entry; final VisibilityHandler visHandler = new JsonFieldLevelVisibilityHandler("vis"); @Before public void setup() { // We're not really using this as a full data adapter, so we can ignore most of the methods adapter = new DataTypeAdapter>() { @Override public byte[] toBinary() { return null; } @Override public void fromBinary(final byte[] bytes) {} @Override public String getTypeName() { return null; } @Override public byte[] getDataId(final Map entry) { return null; } @Override public Object getFieldValue(final Map entry, final String fieldName) { return entry.get(fieldName); } @Override public Class> getDataClass() { return null; } @Override public RowBuilder> newRowBuilder( final FieldDescriptor[] outputFieldDescriptors) { return null; } @Override public FieldDescriptor[] getFieldDescriptors() { return null; } @Override public FieldDescriptor getFieldDescriptor(final String fieldName) { return null; } }; entry = Maps.newHashMap(); entry.put("pop", "pop"); entry.put("pid", "pid"); entry.put("vis", "{\"pid\":\"TS\", \"geo.*\":\"S\"}"); entry.put("geometry", "POINT(0, 0)"); } @Test public void testPIDNonDefault() { assertEquals("TS", visHandler.getVisibility(adapter, entry, "pid")); } @Test public void testPOPNonDefault() { assertNull(visHandler.getVisibility(adapter, entry, "pop")); } @Test public void testGEORegexDefault() { assertEquals("S", visHandler.getVisibility(adapter, entry, "geometry")); } @Test public void testCatchAllRegexDefault() { entry.put("vis", "{\"pid\":\"TS\", \".*\":\"U\"}"); assertEquals("U", visHandler.getVisibility(adapter, entry, "pop")); } } ================================================ FILE: core/store/src/test/java/org/locationtech/geowave/core/store/data/visibility/VisibilityExpressionTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.data.visibility; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.text.ParseException; import org.junit.Test; import com.google.common.collect.Sets; public class VisibilityExpressionTest { @Test public void testValidVisibilityExpressions() { // Basic expression final String EXPRESSION1 = "(a&b)|c"; assertTrue(VisibilityExpression.evaluate(EXPRESSION1, Sets.newHashSet("a", "b"))); assertTrue(VisibilityExpression.evaluate(EXPRESSION1, Sets.newHashSet("a", "b", "c"))); assertTrue(VisibilityExpression.evaluate(EXPRESSION1, Sets.newHashSet("c"))); assertFalse(VisibilityExpression.evaluate(EXPRESSION1, Sets.newHashSet("a"))); assertFalse(VisibilityExpression.evaluate(EXPRESSION1, Sets.newHashSet("b"))); assertFalse(VisibilityExpression.evaluate(EXPRESSION1, Sets.newHashSet("d"))); assertFalse(VisibilityExpression.evaluate(EXPRESSION1, Sets.newHashSet())); // More complex expression with white space final String EXPRESSION2 = "((a & b) | c) & (d | e)"; assertTrue(VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet("a", "b", "d"))); assertTrue(VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet("a", "b", "e"))); assertTrue(VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet("c", "d"))); assertTrue(VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet("c", "e"))); assertTrue(VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet("a", "c", "d"))); assertTrue(VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet("b", "c", "e"))); assertTrue( VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet("a", "b", "c", "d", "e"))); assertFalse(VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet("a"))); assertFalse(VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet("b"))); assertFalse(VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet("c"))); assertFalse(VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet("d"))); assertFalse(VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet("e"))); assertFalse(VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet("a", "b"))); assertFalse(VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet("a", "d"))); assertFalse(VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet("a", "e"))); assertFalse(VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet("a", "b", "c"))); assertFalse(VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet())); // Chained operators final String EXPRESSION3 = "(a&b&c)|d|e"; assertTrue(VisibilityExpression.evaluate(EXPRESSION3, Sets.newHashSet("a", "b", "c"))); assertTrue(VisibilityExpression.evaluate(EXPRESSION3, Sets.newHashSet("a", "b", "e"))); assertTrue(VisibilityExpression.evaluate(EXPRESSION3, Sets.newHashSet("c", "d"))); assertTrue(VisibilityExpression.evaluate(EXPRESSION3, Sets.newHashSet("c", "e"))); assertTrue(VisibilityExpression.evaluate(EXPRESSION3, Sets.newHashSet("a", "c", "d"))); assertTrue(VisibilityExpression.evaluate(EXPRESSION3, Sets.newHashSet("d"))); assertTrue(VisibilityExpression.evaluate(EXPRESSION3, Sets.newHashSet("e"))); assertTrue( VisibilityExpression.evaluate(EXPRESSION3, Sets.newHashSet("a", "b", "c", "d", "e"))); assertFalse(VisibilityExpression.evaluate(EXPRESSION3, Sets.newHashSet("a"))); assertFalse(VisibilityExpression.evaluate(EXPRESSION3, Sets.newHashSet("b"))); assertFalse(VisibilityExpression.evaluate(EXPRESSION3, Sets.newHashSet("c"))); assertFalse(VisibilityExpression.evaluate(EXPRESSION3, Sets.newHashSet("a", "b"))); assertFalse(VisibilityExpression.evaluate(EXPRESSION3, Sets.newHashSet("a", "c"))); assertFalse(VisibilityExpression.evaluate(EXPRESSION3, Sets.newHashSet())); // Empty expression final String EMPTY_EXPRESSION = ""; assertTrue(VisibilityExpression.evaluate(EMPTY_EXPRESSION, Sets.newHashSet("a", "b"))); assertTrue(VisibilityExpression.evaluate(EMPTY_EXPRESSION, Sets.newHashSet("a", "b", "c"))); assertTrue(VisibilityExpression.evaluate(EMPTY_EXPRESSION, Sets.newHashSet("c"))); assertTrue(VisibilityExpression.evaluate(EMPTY_EXPRESSION, Sets.newHashSet("a"))); assertTrue(VisibilityExpression.evaluate(EMPTY_EXPRESSION, Sets.newHashSet("b"))); assertTrue(VisibilityExpression.evaluate(EMPTY_EXPRESSION, Sets.newHashSet("d"))); assertTrue(VisibilityExpression.evaluate(EMPTY_EXPRESSION, Sets.newHashSet())); } @Test public void testInvalidVisibilityExpressions() { // No matching right paren final String EXPRESSION1 = "(a&b|c"; // No matching left paren final String EXPRESSION2 = "a&b)|c"; // Multiple sequential oeprators final String EXPRESSION3 = "a&|b"; // Multiple sequential operands final String EXPRESSION4 = "(a)(b)"; // No left operand final String EXPRESSION5 = "&b"; // No right operand final String EXPRESSION6 = "a&"; try { VisibilityExpression.evaluate(EXPRESSION1, Sets.newHashSet("a")); fail(); } catch (final Exception e) { // Expected assertTrue(e.getCause() instanceof ParseException); assertEquals( "Left parenthesis found with no matching right parenthesis.", e.getCause().getMessage()); } try { VisibilityExpression.evaluate(EXPRESSION2, Sets.newHashSet("a")); fail(); } catch (final Exception e) { // Expected assertTrue(e.getCause() instanceof ParseException); assertEquals( "Right parenthesis found with no matching left parenthesis.", e.getCause().getMessage()); } try { VisibilityExpression.evaluate(EXPRESSION3, Sets.newHashSet("a")); fail(); } catch (final Exception e) { // Expected assertTrue(e.getCause() instanceof ParseException); assertEquals("Multiple sequential operators.", e.getCause().getMessage()); } try { VisibilityExpression.evaluate(EXPRESSION4, Sets.newHashSet("a")); fail(); } catch (final Exception e) { // Expected assertTrue(e.getCause() instanceof ParseException); assertEquals("Multiple sequential operands with no operator.", e.getCause().getMessage()); } try { VisibilityExpression.evaluate(EXPRESSION5, Sets.newHashSet("a")); fail(); } catch (final Exception e) { // Expected assertTrue(e.getCause() instanceof ParseException); assertEquals("Operator found with no left operand.", e.getCause().getMessage()); } try { VisibilityExpression.evaluate(EXPRESSION6, Sets.newHashSet("a")); fail(); } catch (final Exception e) { // Expected assertTrue(e.getCause() instanceof ParseException); assertEquals("Operator found with no right operand.", e.getCause().getMessage()); } } @Test public void testVisibiltyComposer() { VisibilityComposer composer = new VisibilityComposer(); composer.addVisibility("a&b"); assertEquals("a&b", composer.composeVisibility()); // Adding "a" or "b" to the visibility shouldn't change it composer.addVisibility("a"); assertEquals("a&b", composer.composeVisibility()); composer.addVisibility("b"); assertEquals("a&b", composer.composeVisibility()); composer.addVisibility("a&b"); assertEquals("a&b", composer.composeVisibility()); // Adding "c" should update it composer.addVisibility("c"); assertEquals("a&b&c", composer.composeVisibility()); // Adding a complex visibility should duplicate any composer.addVisibility("(a&b)&(c&d)"); assertEquals("a&b&c&d", composer.composeVisibility()); // Any expression with an OR operator should be isolated composer.addVisibility("a&(e|(f&b))"); assertEquals("(e|(f&b))&a&b&c&d", composer.composeVisibility()); composer = new VisibilityComposer(); // Adding a complex visibility that only uses AND operators should simplify the expression composer.addVisibility("a&((b&e)&(c&d))"); assertEquals("a&b&c&d&e", composer.composeVisibility()); composer = new VisibilityComposer(); composer.addVisibility("a&b"); assertEquals("a&b", composer.composeVisibility()); final VisibilityComposer copy = new VisibilityComposer(composer); assertEquals("a&b", copy.composeVisibility()); // Adding to the copy does not affect the original copy.addVisibility("c&d"); assertEquals("a&b&c&d", copy.composeVisibility()); assertEquals("a&b", composer.composeVisibility()); } } ================================================ FILE: core/store/src/test/java/org/locationtech/geowave/core/store/flatten/BitmaskUtilsTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.flatten; import java.util.Arrays; import java.util.BitSet; import java.util.Collections; import java.util.List; import java.util.TreeSet; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; import org.junit.Assert; import org.junit.Test; public class BitmaskUtilsTest { static final BitSet zeroth = new BitSet(); static final BitSet first = new BitSet(); static final BitSet second = new BitSet(); static final BitSet third = new BitSet(); static final BitSet fourth = new BitSet(); static final BitSet fifth = new BitSet(); static final BitSet sixth = new BitSet(); static final BitSet seventh = new BitSet(); static final BitSet eighth = new BitSet(); static final BitSet composite_0_1_2 = new BitSet(); // generate bitsets static { zeroth.set(0); first.set(1); second.set(2); third.set(3); fourth.set(4); fifth.set(5); sixth.set(6); seventh.set(7); eighth.set(8); composite_0_1_2.set(0); composite_0_1_2.set(1); composite_0_1_2.set(2); } @Test public void testGenerateBitSet() { Assert.assertTrue( Arrays.equals(zeroth.toByteArray(), BitmaskUtils.generateCompositeBitmask(0))); Assert.assertTrue( Arrays.equals(eighth.toByteArray(), BitmaskUtils.generateCompositeBitmask(8))); } @Test public void testByteSize() { // confirm bitmasks are of correct (minimal) byte length Assert.assertTrue(1 == zeroth.toByteArray().length); Assert.assertTrue(2 == eighth.toByteArray().length); } @Test public void testGetOrdinal() { List positions = BitmaskUtils.getFieldPositions(zeroth.toByteArray()); Assert.assertTrue(0 == positions.get(0)); Assert.assertTrue(1 == positions.size()); positions = BitmaskUtils.getFieldPositions(first.toByteArray()); Assert.assertTrue(1 == positions.get(0)); Assert.assertTrue(1 == positions.size()); positions = BitmaskUtils.getFieldPositions(eighth.toByteArray()); Assert.assertTrue(8 == positions.get(0)); Assert.assertTrue(1 == positions.size()); } @Test public void testCompositeBitmask() { // generate composite bitmask for 3 bitmasks and ensure correctness final byte[] bitmask = BitmaskUtils.generateCompositeBitmask(new TreeSet<>(Arrays.asList(0, 1, 2))); Assert.assertTrue(BitSet.valueOf(bitmask).equals(composite_0_1_2)); } @Test public void testDecompositionOfComposite() { // decompose composite bitmask and ensure correctness final List positions = BitmaskUtils.getFieldPositions(composite_0_1_2.toByteArray()); Assert.assertTrue(positions.size() == 3); Assert.assertTrue(0 == positions.get(0)); Assert.assertTrue(1 == positions.get(1)); Assert.assertTrue(2 == positions.get(2)); } @Test public void testCompositeSortOrder() { // generate meaningless fieldInfo to transform final Object original = new Object(); // clone original fieldInfo overwriting dataValue.id with bitmask final Pair field0 = new ImmutablePair(0, original); final Pair field1 = new ImmutablePair(1, original); final Pair field2 = new ImmutablePair(2, original); final Pair field3 = new ImmutablePair(3, original); final Pair field4 = new ImmutablePair(4, original); final Pair field5 = new ImmutablePair(5, original); final Pair field6 = new ImmutablePair(6, original); final Pair field7 = new ImmutablePair(7, original); final Pair field8 = new ImmutablePair(8, original); // construct list in wrong order final List> fieldInfoList = Arrays.asList(field8, field7, field6, field5, field4, field3, field2, field1, field0); // sort in place and ensure list sorts correctly Collections.sort(fieldInfoList, new BitmaskedPairComparator()); Assert.assertTrue(field0.equals(fieldInfoList.get(0))); Assert.assertTrue(field1.equals(fieldInfoList.get(1))); Assert.assertTrue(field2.equals(fieldInfoList.get(2))); Assert.assertTrue(field3.equals(fieldInfoList.get(3))); Assert.assertTrue(field4.equals(fieldInfoList.get(4))); Assert.assertTrue(field5.equals(fieldInfoList.get(5))); Assert.assertTrue(field6.equals(fieldInfoList.get(6))); Assert.assertTrue(field7.equals(fieldInfoList.get(7))); Assert.assertTrue(field8.equals(fieldInfoList.get(8))); } } ================================================ FILE: core/store/src/test/java/org/locationtech/geowave/core/store/memory/MemoryDataStoreTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.memory; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.List; import org.junit.Test; import org.locationtech.geowave.core.index.numeric.BasicNumericDataset; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.StoreFactoryFamilySpi; import org.locationtech.geowave.core.store.adapter.MockComponents; import org.locationtech.geowave.core.store.adapter.MockComponents.MockAbstractDataAdapter; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.QueryBuilder; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.api.VisibilityHandler; import org.locationtech.geowave.core.store.api.Writer; import org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding; import org.locationtech.geowave.core.store.data.IndexedPersistenceEncoding; import org.locationtech.geowave.core.store.data.visibility.GlobalVisibilityHandler; import org.locationtech.geowave.core.store.index.CommonIndexModel; import org.locationtech.geowave.core.store.index.IndexImpl; import org.locationtech.geowave.core.store.query.constraints.DataIdQuery; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.geowave.core.store.query.filter.QueryFilter; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.locationtech.geowave.core.store.statistics.adapter.CountStatistic; import org.locationtech.geowave.core.store.statistics.adapter.CountStatistic.CountValue; import org.locationtech.geowave.core.store.statistics.field.NumericRangeStatistic; import org.locationtech.geowave.core.store.statistics.field.NumericRangeStatistic.NumericRangeValue; import com.clearspring.analytics.util.Lists; public class MemoryDataStoreTest { @Test public void test() throws IOException { final Index index = new IndexImpl(new MockComponents.MockIndexStrategy(), new MockComponents.TestIndexModel()); final String namespace = "test_" + getClass().getName(); final StoreFactoryFamilySpi storeFamily = new MemoryStoreFactoryFamily(); final MemoryRequiredOptions reqOptions = new MemoryRequiredOptions(); reqOptions.setGeoWaveNamespace(namespace); final DataStore dataStore = storeFamily.getDataStoreFactory().createStore(reqOptions); final DataStatisticsStore statsStore = storeFamily.getDataStatisticsStoreFactory().createStore(reqOptions); final DataTypeAdapter adapter = new MockComponents.MockAbstractDataAdapter(); final VisibilityHandler visHandler = new GlobalVisibilityHandler("aaa&bbb"); final List> statistics = Lists.newArrayList(); statistics.add(new CountStatistic(adapter.getTypeName())); statistics.add( new NumericRangeStatistic(adapter.getTypeName(), MockAbstractDataAdapter.INTEGER)); dataStore.addType(adapter, statistics, index); try (final Writer indexWriter = dataStore.createWriter(adapter.getTypeName())) { indexWriter.write(new Integer(25), visHandler); indexWriter.flush(); indexWriter.write(new Integer(35), visHandler); indexWriter.flush(); } // authorization check try (CloseableIterator itemIt = dataStore.query( QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName( index.getName()).addAuthorization("aaa").constraints( new TestQuery(23, 26)).build())) { assertFalse(itemIt.hasNext()); } try (CloseableIterator itemIt = dataStore.query( QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName( index.getName()).addAuthorization("aaa").addAuthorization("bbb").constraints( new TestQuery(23, 26)).build())) { assertTrue(itemIt.hasNext()); assertEquals(new Integer(25), itemIt.next()); assertFalse(itemIt.hasNext()); } try (CloseableIterator itemIt = dataStore.query( QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName( index.getName()).addAuthorization("aaa").addAuthorization("bbb").constraints( new TestQuery(23, 36)).build())) { assertTrue(itemIt.hasNext()); assertEquals(new Integer(25), itemIt.next()); assertTrue(itemIt.hasNext()); assertEquals(new Integer(35), itemIt.next()); assertFalse(itemIt.hasNext()); } try (CloseableIterator>> statsIt = statsStore.getAllStatistics(null)) { try (CloseableIterator> statisticValues = statsStore.getStatisticValues(statsIt, null, "aaa", "bbb")) { assertTrue(checkStats(statisticValues, 2, new NumericRange(25, 35))); } } try (CloseableIterator>> statsIt = statsStore.getAllStatistics(null)) { try (CloseableIterator> statisticValues = statsStore.getStatisticValues(statsIt, null)) { assertTrue(checkStats(statisticValues, 0, null)); } } dataStore.delete( QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName( index.getName()).addAuthorization("aaa").addAuthorization("bbb").constraints( new TestQuery(23, 26)).build()); try (CloseableIterator itemIt = dataStore.query( QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName( index.getName()).addAuthorization("aaa").addAuthorization("bbb").constraints( new TestQuery(23, 36)).build())) { assertTrue(itemIt.hasNext()); assertEquals(new Integer(35), itemIt.next()); assertFalse(itemIt.hasNext()); } try (CloseableIterator itemIt = dataStore.query( QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName( index.getName()).addAuthorization("aaa").addAuthorization("bbb").constraints( new TestQuery(23, 26)).build())) { assertFalse(itemIt.hasNext()); } try (CloseableIterator itemIt = dataStore.query( QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName( index.getName()).addAuthorization("aaa").addAuthorization("bbb").constraints( new DataIdQuery(adapter.getDataId(new Integer(35)))).build())) { assertTrue(itemIt.hasNext()); assertEquals(new Integer(35), itemIt.next()); } } @Test public void testMultipleIndices() throws IOException { final Index index1 = new IndexImpl( new MockComponents.MockIndexStrategy(), new MockComponents.TestIndexModel("tm1")); final Index index2 = new IndexImpl( new MockComponents.MockIndexStrategy(), new MockComponents.TestIndexModel("tm2")); final String namespace = "test2_" + getClass().getName(); final StoreFactoryFamilySpi storeFamily = new MemoryStoreFactoryFamily(); final MemoryRequiredOptions opts = new MemoryRequiredOptions(); opts.setGeoWaveNamespace(namespace); final DataStore dataStore = storeFamily.getDataStoreFactory().createStore(opts); final DataStatisticsStore statsStore = storeFamily.getDataStatisticsStoreFactory().createStore(opts); final DataTypeAdapter adapter = new MockComponents.MockAbstractDataAdapter(); final VisibilityHandler visHandler = new GlobalVisibilityHandler("aaa&bbb"); final List> statistics = Lists.newArrayList(); statistics.add( new NumericRangeStatistic(adapter.getTypeName(), MockAbstractDataAdapter.INTEGER)); dataStore.addType(adapter, statistics, index1, index2); try (final Writer indexWriter = dataStore.createWriter(adapter.getTypeName())) { indexWriter.write(new Integer(25), visHandler); indexWriter.flush(); indexWriter.write(new Integer(35), visHandler); indexWriter.flush(); } // authorization check try (CloseableIterator itemIt = dataStore.query( QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName( index2.getName()).addAuthorization("aaa").constraints( new TestQuery(23, 26)).build())) { assertFalse(itemIt.hasNext()); } try (CloseableIterator itemIt = dataStore.query( QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName( index1.getName()).addAuthorization("aaa").addAuthorization("bbb").constraints( new TestQuery(23, 26)).build())) { assertTrue(itemIt.hasNext()); assertEquals(new Integer(25), itemIt.next()); assertFalse(itemIt.hasNext()); } // pick an index try (CloseableIterator itemIt = dataStore.query( QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).addAuthorization( "aaa").addAuthorization("bbb").constraints(new TestQuery(23, 36)).build())) { assertTrue(itemIt.hasNext()); assertEquals(new Integer(25), itemIt.next()); assertTrue(itemIt.hasNext()); assertEquals(new Integer(35), itemIt.next()); assertFalse(itemIt.hasNext()); } try (CloseableIterator>> statsIt = statsStore.getAllStatistics(null)) { try (CloseableIterator> statisticValues = statsStore.getStatisticValues(statsIt, null, "aaa", "bbb")) { assertTrue(checkStats(statisticValues, 2, new NumericRange(25, 35))); } } try (CloseableIterator>> statsIt = statsStore.getAllStatistics(null)) { try (CloseableIterator> statisticValues = statsStore.getStatisticValues(statsIt, null)) { assertTrue(checkStats(statisticValues, 0, null)); } } dataStore.delete( QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).addAuthorization( "aaa").addAuthorization("bbb").constraints(new TestQuery(23, 26)).build()); try (CloseableIterator itemIt = dataStore.query( QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName( index1.getName()).addAuthorization("aaa").addAuthorization("bbb").constraints( new TestQuery(23, 36)).build())) { assertTrue(itemIt.hasNext()); assertEquals(new Integer(35), itemIt.next()); assertFalse(itemIt.hasNext()); } try (CloseableIterator itemIt = dataStore.query( QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName( index2.getName()).addAuthorization("aaa").addAuthorization("bbb").constraints( new TestQuery(23, 36)).build())) { assertTrue(itemIt.hasNext()); assertEquals(new Integer(35), itemIt.next()); assertFalse(itemIt.hasNext()); } try (CloseableIterator itemIt = dataStore.query( QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName( index1.getName()).addAuthorization("aaa").addAuthorization("bbb").constraints( new TestQuery(23, 26)).build())) { assertFalse(itemIt.hasNext()); } try (CloseableIterator itemIt = dataStore.query( QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName( index2.getName()).addAuthorization("aaa").addAuthorization("bbb").constraints( new TestQuery(23, 26)).build())) { assertFalse(itemIt.hasNext()); } try (CloseableIterator itemIt = dataStore.query( QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName( index1.getName()).addAuthorization("aaa").addAuthorization("bbb").constraints( new DataIdQuery(adapter.getDataId(new Integer(35)))).build())) { assertTrue(itemIt.hasNext()); assertEquals(new Integer(35), itemIt.next()); } try (CloseableIterator itemIt = dataStore.query( QueryBuilder.newBuilder().addTypeName(adapter.getTypeName()).indexName( index2.getName()).addAuthorization("aaa").addAuthorization("bbb").constraints( new DataIdQuery(adapter.getDataId(new Integer(35)))).build())) { assertTrue(itemIt.hasNext()); assertEquals(new Integer(35), itemIt.next()); } } private boolean checkStats( final Iterator> statIt, final int count, final NumericRange range) { boolean countPassed = false; boolean rangePassed = false; while (statIt.hasNext()) { final StatisticValue stat = statIt.next(); if ((stat instanceof CountValue)) { countPassed = (((CountValue) stat).getValue() == count); } else if ((stat instanceof NumericRangeValue)) { rangePassed = range == null ? !((NumericRangeValue) stat).isSet() : ((((NumericRangeValue) stat).getMin() == range.getMin()) && (((NumericRangeValue) stat).getMax() == range.getMax())); } } return countPassed && rangePassed; } private class TestQueryFilter implements QueryFilter { final double min, max; public TestQueryFilter(final double min, final double max) { super(); this.min = min; this.max = max; } @Override public boolean accept( final CommonIndexModel indexModel, final IndexedPersistenceEncoding persistenceEncoding) { final double min = ((CommonIndexedPersistenceEncoding) persistenceEncoding).getNumericData( indexModel.getDimensions()).getDataPerDimension()[0].getMin(); final double max = ((CommonIndexedPersistenceEncoding) persistenceEncoding).getNumericData( indexModel.getDimensions()).getDataPerDimension()[0].getMax(); return !((this.max <= min) || (this.min > max)); } @Override public byte[] toBinary() { return new byte[0]; } @Override public void fromBinary(final byte[] bytes) {} } private class TestQuery implements QueryConstraints { final double min, max; public TestQuery(final double min, final double max) { super(); this.min = min; this.max = max; } @Override public List createFilters(final Index index) { return Arrays.asList((QueryFilter) new TestQueryFilter(min, max)); } @Override public List getIndexConstraints(final Index index) { return Collections.singletonList( new BasicNumericDataset(new NumericData[] {new NumericRange(min, max)})); } @Override public byte[] toBinary() { return new byte[0]; } @Override public void fromBinary(final byte[] bytes) {} } } ================================================ FILE: core/store/src/test/java/org/locationtech/geowave/core/store/memory/MemoryStoreUtilsTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.memory; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import org.junit.Test; public class MemoryStoreUtilsTest { @Test public void testVisibility() { assertTrue( MemoryStoreUtils.isAuthorized("aaa&ccc".getBytes(), new String[] {"aaa", "bbb", "ccc"})); assertFalse(MemoryStoreUtils.isAuthorized("aaa&ccc".getBytes(), new String[] {"aaa", "bbb"})); assertTrue( MemoryStoreUtils.isAuthorized( "aaa&(ccc|eee)".getBytes(), new String[] {"aaa", "eee", "xxx"})); assertTrue( MemoryStoreUtils.isAuthorized( "aaa|(ccc&eee)".getBytes(), new String[] {"bbb", "eee", "ccc"})); assertFalse( MemoryStoreUtils.isAuthorized( "aaa|(ccc&eee)".getBytes(), new String[] {"bbb", "dddd", "ccc"})); assertTrue( MemoryStoreUtils.isAuthorized( "aaa|(ccc&eee)".getBytes(), new String[] {"aaa", "dddd", "ccc"})); assertTrue( MemoryStoreUtils.isAuthorized("aaa".getBytes(), new String[] {"aaa", "dddd", "ccc"})); assertFalse( MemoryStoreUtils.isAuthorized("xxx".getBytes(), new String[] {"aaa", "dddd", "ccc"})); } } ================================================ FILE: core/store/src/test/java/org/locationtech/geowave/core/store/query/BasicQueryByClassTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import org.junit.Test; import org.locationtech.geowave.core.index.IndexDimensionHint; import org.locationtech.geowave.core.index.IndexMetaData; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.MultiDimensionalCoordinateRanges; import org.locationtech.geowave.core.index.MultiDimensionalCoordinates; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.index.QueryRanges; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.dimension.bin.BinRange; import org.locationtech.geowave.core.index.numeric.BasicNumericDataset; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding; import org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldWriter; import org.locationtech.geowave.core.store.dimension.NumericDimensionField; import org.locationtech.geowave.core.store.index.BasicIndexModel; import org.locationtech.geowave.core.store.index.CommonIndexModel; import org.locationtech.geowave.core.store.index.CustomNameIndex; import org.locationtech.geowave.core.store.index.IndexImpl; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintData; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintSet; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintsByClass; import org.locationtech.geowave.core.store.query.filter.QueryFilter; import com.beust.jcommander.internal.Sets; public class BasicQueryByClassTest { final SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssz"); @Test public void testIntersectCasesWithPersistence() { final Index index = new CustomNameIndex( new ExampleNumericIndexStrategy(), new BasicIndexModel( new NumericDimensionField[] {new ExampleDimensionOne(), new ExampleDimensionTwo()}), "22"); final List expectedResults = new ArrayList<>(); expectedResults.add( new BasicNumericDataset( new NumericData[] { new ConstrainedIndexValue(0.3, 0.5), new ConstrainedIndexValue(0.1, 0.7)})); final ConstraintSet cs1 = new ConstraintSet(); cs1.addConstraint( ExampleDimensionOne.class, new ConstraintData(new ConstrainedIndexValue(0.3, 0.5), true)); cs1.addConstraint( ExampleDimensionTwo.class, new ConstraintData(new ConstrainedIndexValue(0.4, 0.7), true)); final ConstraintSet cs2a = new ConstraintSet(); cs2a.addConstraint( ExampleDimensionTwo.class, new ConstraintData(new ConstrainedIndexValue(0.1, 0.2), true)); final ConstraintsByClass constraints = new ConstraintsByClass(Arrays.asList(cs2a)).merge(Collections.singletonList(cs1)); assertEquals( expectedResults, constraints.getIndexConstraints(new IndexImpl(new ExampleNumericIndexStrategy(), null))); final byte[] image = new BasicQueryByClass(constraints).toBinary(); final BasicQueryByClass query = new BasicQueryByClass(); query.fromBinary(image); assertEquals(expectedResults, query.getIndexConstraints(index)); } @Test public void testDisjointCasesWithPersistence() { final List expectedResults = new ArrayList<>(); expectedResults.add( new BasicNumericDataset( new NumericData[] { new ConstrainedIndexValue(0.3, 0.7), new ConstrainedIndexValue(0.1, 2.3)})); expectedResults.add( new BasicNumericDataset( new NumericData[] { new ConstrainedIndexValue(0.3, 0.7), new ConstrainedIndexValue(3.4, 3.7)})); final ConstraintSet cs1 = new ConstraintSet(); cs1.addConstraint( ExampleDimensionOne.class, new ConstraintData(new ConstrainedIndexValue(0.3, 0.5), true)); cs1.addConstraint( ExampleDimensionOne.class, new ConstraintData(new ConstrainedIndexValue(0.4, 0.7), true)); final ConstraintSet cs2a = new ConstraintSet(); cs2a.addConstraint( ExampleDimensionTwo.class, new ConstraintData(new ConstrainedIndexValue(0.1, 0.2), true)); cs2a.addConstraint( ExampleDimensionTwo.class, new ConstraintData(new ConstrainedIndexValue(2.1, 2.3), true)); final ConstraintSet cs2b = new ConstraintSet(); cs2b.addConstraint( ExampleDimensionTwo.class, new ConstraintData(new ConstrainedIndexValue(3.4, 3.7), true)); final ConstraintsByClass constraints = new ConstraintsByClass(Arrays.asList(cs2a, cs2b)).merge(Collections.singletonList(cs1)); assertEquals( expectedResults, constraints.getIndexConstraints(new IndexImpl(new ExampleNumericIndexStrategy(), null))); final byte[] image = new BasicQueryByClass(constraints).toBinary(); final BasicQueryByClass query = new BasicQueryByClass(); query.fromBinary(image); final Index index = new CustomNameIndex( new ExampleNumericIndexStrategy(), new BasicIndexModel( new NumericDimensionField[] {new ExampleDimensionOne(), new ExampleDimensionTwo()}), "22"); assertEquals(expectedResults, query.getIndexConstraints(index)); final List filters = query.createFilters(index); assertEquals(1, filters.size()); final Map fieldIdToValueMap = new HashMap<>(); fieldIdToValueMap.put("one", new ConstrainedIndexValue(0.4, 0.4)); fieldIdToValueMap.put("two", new ConstrainedIndexValue(0.5, 0.5)); final CommonIndexModel model = null; assertTrue( filters.get(0).accept( model, new CommonIndexedPersistenceEncoding( (short) 1, StringUtils.stringToBinary("data"), StringUtils.stringToBinary("partition"), StringUtils.stringToBinary("sort"), 1, // duplicate count new MultiFieldPersistentDataset(fieldIdToValueMap), null))); fieldIdToValueMap.put("one", new ConstrainedIndexValue(0.1, 0.1)); assertFalse( filters.get(0).accept( model, new CommonIndexedPersistenceEncoding( (short) 1, StringUtils.stringToBinary("data"), StringUtils.stringToBinary("partition"), StringUtils.stringToBinary("sort"), 1, // duplicate count new MultiFieldPersistentDataset(fieldIdToValueMap), null))); fieldIdToValueMap.put("one", new ConstrainedIndexValue(0.4, 0.4)); fieldIdToValueMap.put("two", new ConstrainedIndexValue(5.0, 5.0)); assertFalse( filters.get(0).accept( model, new CommonIndexedPersistenceEncoding( (short) 1, StringUtils.stringToBinary("data"), StringUtils.stringToBinary("partition"), StringUtils.stringToBinary("sort"), 1, // duplicate count new MultiFieldPersistentDataset(fieldIdToValueMap), null))); /** Tests the 'OR' Case */ fieldIdToValueMap.put("two", new ConstrainedIndexValue(3.5, 3.5)); assertTrue( filters.get(0).accept( model, new CommonIndexedPersistenceEncoding( (short) 1, StringUtils.stringToBinary("data"), StringUtils.stringToBinary("partition"), StringUtils.stringToBinary("sort"), 1, // duplicate count new MultiFieldPersistentDataset(fieldIdToValueMap), null))); } public static class ExampleNumericIndexStrategy implements NumericIndexStrategy { @Override public byte[] toBinary() { return null; } @Override public void fromBinary(final byte[] bytes) {} @Override public NumericDimensionDefinition[] getOrderedDimensionDefinitions() { return new NumericDimensionDefinition[] { new ExampleDimensionOne(), new ExampleDimensionTwo()}; } @Override public String getId() { return "test-bqt"; } @Override public double[] getHighestPrecisionIdRangePerDimension() { return null; } @Override public List createMetaData() { return Collections.emptyList(); } @Override public MultiDimensionalCoordinateRanges[] getCoordinateRangesPerDimension( final MultiDimensionalNumericData dataRange, final IndexMetaData... hints) { return null; } @Override public QueryRanges getQueryRanges( final MultiDimensionalNumericData indexedRange, final IndexMetaData... hints) { return null; } @Override public QueryRanges getQueryRanges( final MultiDimensionalNumericData indexedRange, final int maxEstimatedRangeDecomposition, final IndexMetaData... hints) { return null; } @Override public InsertionIds getInsertionIds(final MultiDimensionalNumericData indexedData) { return null; } @Override public InsertionIds getInsertionIds( final MultiDimensionalNumericData indexedData, final int maxEstimatedDuplicateIds) { return null; } @Override public MultiDimensionalNumericData getRangeForId( final byte[] partitionKey, final byte[] sortKey) { return null; } @Override public byte[][] getInsertionPartitionKeys(final MultiDimensionalNumericData insertionData) { return null; } @Override public byte[][] getQueryPartitionKeys( final MultiDimensionalNumericData queryData, final IndexMetaData... hints) { return null; } @Override public MultiDimensionalCoordinates getCoordinatesPerDimension( final byte[] partitionKey, final byte[] sortKey) { return null; } @Override public int getPartitionKeyLength() { return 0; } } public static class ConstrainedIndexValue extends NumericRange { /** */ private static final long serialVersionUID = 1L; public ConstrainedIndexValue(final double min, final double max) { super(min, max); // } } public static class ExampleDimensionOne implements NumericDimensionField { public ExampleDimensionOne() {} @Override public double getRange() { return 10; } @Override public double normalize(final double value) { return value; } @Override public double denormalize(final double value) { return value; } @Override public BinRange[] getNormalizedRanges(final NumericData range) { return new BinRange[] {new BinRange(range.getMin(), range.getMax())}; } @Override public NumericRange getDenormalizedRange(final BinRange range) { return new NumericRange(range.getNormalizedMin(), range.getNormalizedMax()); } @Override public int getFixedBinIdSize() { return 0; } @Override public NumericRange getBounds() { return null; } @Override public NumericData getFullRange() { return new NumericRange(0, 10); } @Override public byte[] toBinary() { return new byte[0]; } @Override public void fromBinary(final byte[] bytes) {} @Override public NumericData getNumericData(final ConstrainedIndexValue dataElement) { return dataElement; } @Override public String getFieldName() { return "one"; } @Override public FieldWriter getWriter() { return null; } @Override public FieldReader getReader() { return null; } @Override public NumericDimensionDefinition getBaseDefinition() { return this; } @Override public boolean isCompatibleWith(final Class clazz) { return ConstrainedIndexValue.class.isAssignableFrom(clazz); } @Override public Class getFieldClass() { return ConstrainedIndexValue.class; } @Override public Set getDimensionHints() { return Sets.newHashSet(); } } public static class ExampleDimensionTwo extends ExampleDimensionOne { public ExampleDimensionTwo() { super(); } @Override public String getFieldName() { return "two"; } } public static class ExampleDimensionThree extends ExampleDimensionOne { public ExampleDimensionThree() { super(); } @Override public String getFieldName() { return "three"; } } } ================================================ FILE: core/store/src/test/java/org/locationtech/geowave/core/store/query/aggregate/AbstractAggregationTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.aggregate; import java.util.List; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.api.DataTypeAdapter; public abstract class AbstractAggregationTest { /** * Aggregate given objects into a given aggregation. * * Internally, this splits the objectsToAggregate and gives it to separate aggregations, and * returns the merged results. * * @param aggregation The aggregation to give data to for testing. * @param objectsToAggregate The test data to feed into the aggregation * @return The results of aggregating the data. */ @SuppressWarnings({"unchecked", "rawtypes"}) protected

R aggregateObjects( final DataTypeAdapter adapter, final Aggregation aggregation, final List objectsToAggregate) { final byte[] aggregationBytes = PersistenceUtils.toBinary(aggregation); final byte[] aggregationParameters = PersistenceUtils.toBinary(aggregation.getParameters()); final Aggregation agg1 = (Aggregation) PersistenceUtils.fromBinary(aggregationBytes); final Aggregation agg2 = (Aggregation) PersistenceUtils.fromBinary(aggregationBytes); agg1.setParameters((P) PersistenceUtils.fromBinary(aggregationParameters)); agg2.setParameters((P) PersistenceUtils.fromBinary(aggregationParameters)); for (int i = 0; i < objectsToAggregate.size(); i++) { if ((i % 2) == 0) { agg1.aggregate(adapter, objectsToAggregate.get(i)); } else { agg2.aggregate(adapter, objectsToAggregate.get(i)); } } final byte[] agg1ResultBinary = agg1.resultToBinary(agg1.getResult()); final byte[] agg2ResultBinary = agg2.resultToBinary(agg2.getResult()); final R agg1Result = agg1.resultFromBinary(agg1ResultBinary); final R agg2Result = agg2.resultFromBinary(agg2ResultBinary); return aggregation.merge(agg1Result, agg2Result); } } ================================================ FILE: core/store/src/test/java/org/locationtech/geowave/core/store/query/aggregate/AbstractCommonIndexAggregationTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.aggregate; import java.util.List; import org.locationtech.geowave.core.store.adapter.MockComponents; import org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding; import org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset; import org.locationtech.geowave.core.store.data.PersistentDataset; import org.locationtech.geowave.core.store.data.SingleFieldPersistentDataset; import com.google.common.collect.Lists; public abstract class AbstractCommonIndexAggregationTest extends AbstractAggregationTest { public static List generateObjects(final int count) { final List objects = Lists.newArrayListWithCapacity(count); for (int i = 0; i < count; i++) { final String dataId = "entry" + i; final PersistentDataset commonData = new MultiFieldPersistentDataset<>(); commonData.addValue("value", new MockComponents.TestIndexFieldType(i)); objects.add( new CommonIndexedPersistenceEncoding( (short) 0, dataId.getBytes(), new byte[0], new byte[0], 0, commonData, new SingleFieldPersistentDataset())); } return objects; } } ================================================ FILE: core/store/src/test/java/org/locationtech/geowave/core/store/query/aggregate/BinningAggregationOptionsTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.aggregate; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.nullValue; import static org.junit.Assert.assertThat; import org.junit.Test; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.api.BinningStrategy; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.entities.GeoWaveRow; public class BinningAggregationOptionsTest { @Test public void testSerialization() { BinningAggregationOptions opts = new BinningAggregationOptions<>(new byte[0], null, null, 1234); assertThat(opts.baseBytes, is(new byte[0])); assertThat(opts.baseParamBytes, is(nullValue())); assertThat(opts.binningStrategy, is(nullValue())); assertThat(opts.maxBins, is(1234)); byte[] serialized = PersistenceUtils.toBinary(opts); BinningAggregationOptions roundtripped = (BinningAggregationOptions) PersistenceUtils.fromBinary(serialized); assertThat(opts.baseBytes, is(roundtripped.baseBytes)); assertThat(opts.baseParamBytes, is(roundtripped.baseParamBytes)); assertThat(opts.binningStrategy, is(roundtripped.binningStrategy)); assertThat(opts.maxBins, is(roundtripped.maxBins)); final BinningStrategy blankStrategy = new BinningStrategy() { @Override public ByteArray[] getBins( final DataTypeAdapter type, final T entry, final GeoWaveRow... rows) { return new ByteArray[0]; } @Override public byte[] toBinary() { return new byte[0]; } @Override public void fromBinary(final byte[] bytes) { } }; opts = new BinningAggregationOptions<>( new byte[] {0xC, 0xA, 0xF, 0xE, 0xB, 0xA, 0xB, 0xE}, new byte[0], blankStrategy, Integer.MAX_VALUE); serialized = PersistenceUtils.toBinary(opts); roundtripped = (BinningAggregationOptions) PersistenceUtils.fromBinary(serialized); assertThat(opts.baseBytes, is(roundtripped.baseBytes)); assertThat(opts.baseParamBytes, is(notNullValue())); assertThat(opts.binningStrategy, is(notNullValue())); assertThat(opts.maxBins, is(roundtripped.maxBins)); } } ================================================ FILE: core/store/src/test/java/org/locationtech/geowave/core/store/query/aggregate/BinningAggregationTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.aggregate; import static org.junit.Assert.assertEquals; import java.util.Map; import java.util.UUID; import org.junit.Test; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.api.BinningStrategy; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import com.google.common.collect.ImmutableMap; public class BinningAggregationTest extends AbstractAggregationTest { // place all entries into separate bins. private static final BinningStrategy randomBinStrategy = new BinningStrategy() { @Override public byte[] toBinary() { return new byte[0]; } @Override public void fromBinary(final byte[] bytes) {} @Override public ByteArray[] getBins( final DataTypeAdapter type, final T entry, final GeoWaveRow... rows) { return new ByteArray[] {new ByteArray(UUID.randomUUID().toString())}; } }; @Test public void testAggregate() { final BinningAggregation agg = new BinningAggregation<>(new CountAggregation(), randomBinStrategy, -1); agg.aggregate(null, null); Map result = agg.getResult(); assertEquals(1, result.size()); agg.aggregate(null, null); result = agg.getResult(); assertEquals(2, result.size()); agg.clearResult(); agg.aggregate(null, null); result = agg.getResult(); assertEquals(1, result.size()); } @Test public void testResultSerialization() { final BinningAggregation agg = new BinningAggregation<>(new CountAggregation(), randomBinStrategy, -1); agg.aggregate(null, null); agg.aggregate(null, null); final Map result = agg.getResult(); final byte[] serResult = agg.resultToBinary(result); final Map deserResult = agg.resultFromBinary(serResult); // must iterate through both in case one is simply a subset of the other. for (final Map.Entry resEntry : result.entrySet()) { assertEquals(resEntry.getValue(), deserResult.get(resEntry.getKey())); } for (final Map.Entry deserEntry : result.entrySet()) { assertEquals(deserEntry.getValue(), result.get(deserEntry.getKey())); } } @Test public void testMerge() { final BinningAggregation agg = new BinningAggregation<>(new CountAggregation(), randomBinStrategy, -1); final Map res1 = ImmutableMap.of(new ByteArray("0"), 3L, new ByteArray("1"), 2L); final Map res2 = ImmutableMap.of(new ByteArray("0"), 2L, new ByteArray("1"), 3L); // relies on CountAggregation#merge, which adds the values. final Map merged = agg.merge(res1, res2); assertEquals(5L, merged.get(new ByteArray("0")).longValue()); assertEquals(5L, merged.get(new ByteArray("1")).longValue()); } @Test public void testFullSerialization() { final BinningAggregation agg = new BinningAggregation<>(new CountAggregation(), randomBinStrategy, -1); final byte[] serialized = PersistenceUtils.toBinary(agg); final BinningAggregationOptions params = agg.getParameters(); final BinningAggregation roundtrip = (BinningAggregation) PersistenceUtils.fromBinary( serialized); roundtrip.setParameters(params); // ensure that roundtrip can still properly instantiate the objects that it needs to on the fly. final Map res1 = ImmutableMap.of(new ByteArray("0"), 3L, new ByteArray("1"), 2L); final Map res2 = ImmutableMap.of(new ByteArray("0"), 2L, new ByteArray("1"), 3L); final Map merged = roundtrip.merge(res1, res2); assertEquals(5L, merged.get(new ByteArray("0")).longValue()); assertEquals(5L, merged.get(new ByteArray("1")).longValue()); roundtrip.aggregate(null, null); roundtrip.aggregate(null, null); roundtrip.aggregate(null, null); assertEquals(3, roundtrip.getResult().size()); } @Test public void testMaxBins() { final BinningAggregation agg = new BinningAggregation<>(new CountAggregation(), randomBinStrategy, -1); for (int i = 0; i < 12336; i++) { agg.aggregate(null, null); } assertEquals(12336, agg.getResult().size()); final BinningAggregation boundedAgg = new BinningAggregation<>(new CountAggregation(), randomBinStrategy, 12); for (int i = 0; i < 2000; i++) { boundedAgg.aggregate(null, null); } assertEquals(12, boundedAgg.getResult().size()); } } ================================================ FILE: core/store/src/test/java/org/locationtech/geowave/core/store/query/aggregate/CountAggregationTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.aggregate; import static org.junit.Assert.assertEquals; import java.util.List; import org.junit.Test; import org.locationtech.geowave.core.store.data.CommonIndexedPersistenceEncoding; public class CountAggregationTest extends AbstractCommonIndexAggregationTest { @Test public void testCountAggregation() { final Long expectedCount = 42L; final List encodings = generateObjects(expectedCount.intValue()); final Long result = aggregateObjects(null, new CountAggregation(), encodings); assertEquals(expectedCount, result); } } ================================================ FILE: core/store/src/test/java/org/locationtech/geowave/core/store/query/filter/DistributedQueryFilterTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.List; import org.junit.Test; import org.locationtech.geowave.core.index.numeric.BasicNumericDataset; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.index.numeric.NumericValue; import org.locationtech.geowave.core.store.dimension.NumericDimensionField; import org.locationtech.geowave.core.store.query.BasicQueryByClassTest; import org.locationtech.geowave.core.store.query.filter.BasicQueryFilter.BasicQueryCompareOperation; public class DistributedQueryFilterTest { @Test public void test() { List filters = new ArrayList<>(); filters.add( new BasicQueryFilter( new BasicNumericDataset(new NumericData[] {new NumericValue(0.4)}), new NumericDimensionField[] {new BasicQueryByClassTest.ExampleDimensionOne()}, BasicQueryCompareOperation.CONTAINS)); filters.add(new DedupeFilter()); FilterList list = new FilterList(false, filters); list.fromBinary(list.toBinary()); assertFalse(list.logicalAnd); assertEquals( ((BasicQueryFilter) list.filters.get(0)).compareOp, BasicQueryCompareOperation.CONTAINS); assertEquals( ((BasicQueryFilter) list.filters.get(0)).constraints, new BasicNumericDataset(new NumericData[] {new NumericRange(0.4, 0.4)})); filters = new ArrayList<>(); filters.add( new BasicQueryFilter( new BasicNumericDataset(new NumericData[] {new NumericValue(0.5)}), new NumericDimensionField[] {new BasicQueryByClassTest.ExampleDimensionOne()}, BasicQueryCompareOperation.INTERSECTS)); filters.add(new DedupeFilter()); list = new FilterList(true, filters); list.fromBinary(list.toBinary()); assertTrue(list.logicalAnd); assertEquals( ((BasicQueryFilter) list.filters.get(0)).compareOp, BasicQueryCompareOperation.INTERSECTS); assertEquals( ((BasicQueryFilter) list.filters.get(0)).constraints, new BasicNumericDataset(new NumericData[] {new NumericRange(0.5, 0.5)})); } } ================================================ FILE: core/store/src/test/java/org/locationtech/geowave/core/store/query/filter/expression/FilterExpressionTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.net.URI; import java.net.URISyntaxException; import java.util.Date; import org.junit.Test; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.adapter.AbstractDataTypeAdapterTest.TestType; import org.locationtech.geowave.core.store.adapter.AbstractDataTypeAdapterTest.TestTypeBasicDataAdapter; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.query.filter.expression.ComparisonOperator.CompareOp; import org.locationtech.geowave.core.store.query.filter.expression.numeric.Abs; import org.locationtech.geowave.core.store.query.filter.expression.numeric.Add; import org.locationtech.geowave.core.store.query.filter.expression.numeric.Divide; import org.locationtech.geowave.core.store.query.filter.expression.numeric.Multiply; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericComparisonOperator; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldValue; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericLiteral; import org.locationtech.geowave.core.store.query.filter.expression.numeric.Subtract; import org.locationtech.geowave.core.store.query.filter.expression.text.Concat; import org.locationtech.geowave.core.store.query.filter.expression.text.Contains; import org.locationtech.geowave.core.store.query.filter.expression.text.EndsWith; import org.locationtech.geowave.core.store.query.filter.expression.text.StartsWith; import org.locationtech.geowave.core.store.query.filter.expression.text.TextFieldValue; import org.locationtech.geowave.core.store.query.filter.expression.text.TextLiteral; import com.google.common.collect.Lists; public class FilterExpressionTest { private static final double EPSILON = 0.0000001; @Test public void testNumericExpressions() { final DataTypeAdapter adapter = new TestTypeBasicDataAdapter(); final TestType entry = new TestType("test", 1.3, 5, true); final TestType entryNulls = new TestType(null, null, null, null); final NumericLiteral doubleLit = NumericLiteral.of(0.5); final NumericLiteral integerLit = NumericLiteral.of(1); final NumericFieldValue doubleField = NumericFieldValue.of("doubleField"); final NumericFieldValue intField = NumericFieldValue.of("intField"); assertEquals(1.3, (double) doubleField.evaluateValue(adapter, entry), EPSILON); assertEquals(5, intField.evaluateValue(adapter, entry).intValue()); // Test comparisons assertTrue(doubleLit.isLessThan(integerLit).evaluate(adapter, entry)); assertFalse(integerLit.isLessThan(doubleLit).evaluate(adapter, entry)); assertTrue(doubleField.isLessThan(1.5).evaluate(adapter, entry)); assertFalse(doubleField.isLessThan(doubleLit).evaluate(adapter, entry)); assertFalse(doubleField.isLessThan(integerLit).evaluate(adapter, entry)); assertTrue(doubleField.isLessThan(intField).evaluate(adapter, entry)); assertFalse(intField.isLessThan(doubleField).evaluate(adapter, entry)); assertTrue(doubleLit.isGreaterThan(0).evaluate(adapter, entry)); assertFalse(doubleLit.isGreaterThan(1).evaluate(adapter, entry)); assertTrue(intField.isGreaterThan(1.0).evaluate(adapter, entry)); assertTrue(intField.isGreaterThan(doubleLit).evaluate(adapter, entry)); assertTrue(intField.isGreaterThan(integerLit).evaluate(adapter, entry)); assertFalse(intField.isGreaterThan(6).evaluate(adapter, entry)); assertTrue(intField.isGreaterThan(doubleField).evaluate(adapter, entry)); assertFalse(doubleField.isGreaterThan(intField).evaluate(adapter, entry)); assertTrue(integerLit.isGreaterThanOrEqualTo(0).evaluate(adapter, entry)); assertTrue(integerLit.isGreaterThanOrEqualTo(integerLit).evaluate(adapter, entry)); assertFalse(integerLit.isGreaterThanOrEqualTo(2).evaluate(adapter, entry)); assertTrue(doubleLit.isLessThanOrEqualTo(1).evaluate(adapter, entry)); assertTrue(doubleLit.isLessThanOrEqualTo(0.5).evaluate(adapter, entry)); assertFalse(doubleLit.isLessThanOrEqualTo(0).evaluate(adapter, entry)); assertTrue(doubleLit.isEqualTo(0.5).evaluate(adapter, entry)); assertFalse(doubleLit.isEqualTo(0.4).evaluate(adapter, entry)); assertTrue(doubleLit.isNotEqualTo(0.4).evaluate(adapter, entry)); assertFalse(doubleLit.isNotEqualTo(0.5).evaluate(adapter, entry)); assertFalse(doubleLit.isNull().evaluate(adapter, entry)); assertFalse(integerLit.isNull().evaluate(adapter, entry)); assertFalse(doubleField.isNull().evaluate(adapter, entry)); assertFalse(intField.isNull().evaluate(adapter, entry)); assertTrue(doubleField.isNull().evaluate(adapter, entryNulls)); assertTrue(intField.isNull().evaluate(adapter, entryNulls)); assertTrue(doubleLit.isNotNull().evaluate(adapter, entry)); assertTrue(integerLit.isNotNull().evaluate(adapter, entry)); assertTrue(doubleField.isNotNull().evaluate(adapter, entry)); assertTrue(intField.isNotNull().evaluate(adapter, entry)); assertFalse(doubleField.isNotNull().evaluate(adapter, entryNulls)); assertFalse(intField.isNotNull().evaluate(adapter, entryNulls)); assertFalse(doubleField.isLessThan(null).evaluate(adapter, entry)); assertFalse(doubleField.isGreaterThan(null).evaluate(adapter, entry)); assertFalse(doubleField.isLessThanOrEqualTo(null).evaluate(adapter, entry)); assertFalse(doubleField.isGreaterThanOrEqualTo(null).evaluate(adapter, entry)); assertFalse(doubleField.isEqualTo(null).evaluate(adapter, entry)); assertTrue(doubleField.isNotEqualTo(null).evaluate(adapter, entry)); assertTrue(doubleField.isEqualTo(intField).evaluate(adapter, entryNulls)); assertFalse(doubleField.isEqualTo(doubleLit).evaluate(adapter, entryNulls)); assertFalse(doubleField.isNotEqualTo(null).evaluate(adapter, entryNulls)); assertTrue(doubleField.isNotEqualTo(doubleLit).evaluate(adapter, entryNulls)); assertTrue(doubleLit.isBetween(0, 1).evaluate(adapter, entry)); assertFalse(doubleLit.isBetween(integerLit, intField).evaluate(adapter, entry)); assertTrue(doubleField.isBetween(doubleLit, intField).evaluate(adapter, entry)); assertFalse(doubleField.isBetween(doubleLit, intField).evaluate(adapter, entryNulls)); assertFalse(doubleLit.isBetween(integerLit, intField).evaluate(adapter, entryNulls)); assertFalse(doubleLit.isBetween(intField, integerLit).evaluate(adapter, entryNulls)); assertFalse(intField.isBetween(doubleLit, integerLit).evaluate(adapter, entry)); assertTrue(integerLit.add(1).isLiteral()); assertFalse(intField.add(1).isLiteral()); assertTrue(integerLit.add(doubleLit).isLiteral()); assertFalse(integerLit.add(doubleField).isLiteral()); assertTrue(doubleLit.abs().isLiteral()); assertFalse(doubleField.abs().isLiteral()); // Test math assertNull(doubleField.abs().evaluateValue(adapter, entryNulls)); assertEquals(5.3, (double) NumericLiteral.of(-5.3).abs().evaluateValue(null, null), EPSILON); assertEquals(5.3, (double) NumericLiteral.of(5.3).abs().evaluateValue(null, null), EPSILON); assertEquals( 2.7, (double) doubleField.abs().evaluateValue(adapter, new TestType("test", -2.7, 5, true)), EPSILON); assertEquals( 5, (double) intField.abs().evaluateValue(adapter, new TestType("test", -2.7, 5, true)), EPSILON); assertEquals( 28, (double) NumericLiteral.of(5).add(15).divideBy(4).multiplyBy(8).subtract(12).evaluateValue( null, null), EPSILON); assertNull(doubleField.add(1).evaluateValue(adapter, entryNulls)); assertNull(doubleLit.add(intField).evaluateValue(adapter, entryNulls)); assertNull(doubleField.add(intField).evaluateValue(adapter, entryNulls)); // Test complex // ((1.3 + 0.8) * (5 - 1)) / 3.2 assertEquals( 2.625, (double) doubleField.add(0.8).multiplyBy(intField.subtract(integerLit)).divideBy( 3.2).evaluateValue(adapter, entry), EPSILON); try { integerLit.add("test"); fail(); } catch (RuntimeException e) { // Expected } // Test serialization byte[] bytes = PersistenceUtils.toBinary(doubleField.add(5)); final Add add = (Add) PersistenceUtils.fromBinary(bytes); assertTrue(add.getExpression1() instanceof NumericFieldValue); assertEquals("doubleField", ((NumericFieldValue) add.getExpression1()).getFieldName()); assertTrue(add.getExpression2() instanceof NumericLiteral); assertEquals(5L, ((Number) ((NumericLiteral) add.getExpression2()).getValue()).longValue()); bytes = PersistenceUtils.toBinary(doubleField.subtract(5)); final Subtract subtract = (Subtract) PersistenceUtils.fromBinary(bytes); assertTrue(subtract.getExpression1() instanceof NumericFieldValue); assertEquals("doubleField", ((NumericFieldValue) subtract.getExpression1()).getFieldName()); assertTrue(subtract.getExpression2() instanceof NumericLiteral); assertEquals( 5L, ((Number) ((NumericLiteral) subtract.getExpression2()).getValue()).longValue()); bytes = PersistenceUtils.toBinary(doubleField.multiplyBy(5)); final Multiply multiply = (Multiply) PersistenceUtils.fromBinary(bytes); assertTrue(multiply.getExpression1() instanceof NumericFieldValue); assertEquals("doubleField", ((NumericFieldValue) multiply.getExpression1()).getFieldName()); assertTrue(multiply.getExpression2() instanceof NumericLiteral); assertEquals( 5L, ((Number) ((NumericLiteral) multiply.getExpression2()).getValue()).longValue()); bytes = PersistenceUtils.toBinary(doubleField.divideBy(null)); final Divide divide = (Divide) PersistenceUtils.fromBinary(bytes); assertTrue(divide.getExpression1() instanceof NumericFieldValue); assertEquals("doubleField", ((NumericFieldValue) divide.getExpression1()).getFieldName()); assertTrue(divide.getExpression2() instanceof NumericLiteral); assertNull(((NumericLiteral) divide.getExpression2()).getValue()); bytes = PersistenceUtils.toBinary(doubleField.abs()); final Abs abs = (Abs) PersistenceUtils.fromBinary(bytes); assertTrue(abs.getExpression() instanceof NumericFieldValue); assertEquals("doubleField", ((NumericFieldValue) abs.getExpression()).getFieldName()); bytes = PersistenceUtils.toBinary(doubleField.isLessThan(5)); NumericComparisonOperator compareOp = (NumericComparisonOperator) PersistenceUtils.fromBinary(bytes); assertEquals(CompareOp.LESS_THAN, compareOp.getCompareOp()); assertTrue(compareOp.getExpression1() instanceof NumericFieldValue); assertEquals("doubleField", ((NumericFieldValue) compareOp.getExpression1()).getFieldName()); assertTrue(compareOp.getExpression2() instanceof NumericLiteral); assertEquals( 5L, ((Number) ((NumericLiteral) compareOp.getExpression2()).getValue()).longValue()); bytes = PersistenceUtils.toBinary(doubleField.isLessThanOrEqualTo(5)); compareOp = (NumericComparisonOperator) PersistenceUtils.fromBinary(bytes); assertEquals(CompareOp.LESS_THAN_OR_EQUAL, compareOp.getCompareOp()); assertTrue(compareOp.getExpression1() instanceof NumericFieldValue); assertEquals("doubleField", ((NumericFieldValue) compareOp.getExpression1()).getFieldName()); assertTrue(compareOp.getExpression2() instanceof NumericLiteral); assertEquals( 5L, ((Number) ((NumericLiteral) compareOp.getExpression2()).getValue()).longValue()); bytes = PersistenceUtils.toBinary(doubleField.isGreaterThan(5)); compareOp = (NumericComparisonOperator) PersistenceUtils.fromBinary(bytes); assertEquals(CompareOp.GREATER_THAN, compareOp.getCompareOp()); assertTrue(compareOp.getExpression1() instanceof NumericFieldValue); assertEquals("doubleField", ((NumericFieldValue) compareOp.getExpression1()).getFieldName()); assertTrue(compareOp.getExpression2() instanceof NumericLiteral); assertEquals( 5L, ((Number) ((NumericLiteral) compareOp.getExpression2()).getValue()).longValue()); bytes = PersistenceUtils.toBinary(doubleField.isGreaterThanOrEqualTo(5)); compareOp = (NumericComparisonOperator) PersistenceUtils.fromBinary(bytes); assertEquals(CompareOp.GREATER_THAN_OR_EQUAL, compareOp.getCompareOp()); assertTrue(compareOp.getExpression1() instanceof NumericFieldValue); assertEquals("doubleField", ((NumericFieldValue) compareOp.getExpression1()).getFieldName()); assertTrue(compareOp.getExpression2() instanceof NumericLiteral); assertEquals( 5L, ((Number) ((NumericLiteral) compareOp.getExpression2()).getValue()).longValue()); bytes = PersistenceUtils.toBinary(doubleField.isEqualTo(5)); compareOp = (NumericComparisonOperator) PersistenceUtils.fromBinary(bytes); assertEquals(CompareOp.EQUAL_TO, compareOp.getCompareOp()); assertTrue(compareOp.getExpression1() instanceof NumericFieldValue); assertEquals("doubleField", ((NumericFieldValue) compareOp.getExpression1()).getFieldName()); assertTrue(compareOp.getExpression2() instanceof NumericLiteral); assertEquals( 5L, ((Number) ((NumericLiteral) compareOp.getExpression2()).getValue()).longValue()); bytes = PersistenceUtils.toBinary(doubleField.isNotEqualTo(5)); compareOp = (NumericComparisonOperator) PersistenceUtils.fromBinary(bytes); assertEquals(CompareOp.NOT_EQUAL_TO, compareOp.getCompareOp()); assertTrue(compareOp.getExpression1() instanceof NumericFieldValue); assertEquals("doubleField", ((NumericFieldValue) compareOp.getExpression1()).getFieldName()); assertTrue(compareOp.getExpression2() instanceof NumericLiteral); assertEquals( 5L, ((Number) ((NumericLiteral) compareOp.getExpression2()).getValue()).longValue()); bytes = PersistenceUtils.toBinary(doubleField.isBetween(5, 10)); final Between between = (Between) PersistenceUtils.fromBinary(bytes); assertTrue(between.getValue() instanceof NumericFieldValue); assertEquals("doubleField", ((NumericFieldValue) between.getValue()).getFieldName()); assertTrue(between.getLowerBound() instanceof NumericLiteral); assertEquals(5L, ((Number) ((NumericLiteral) between.getLowerBound()).getValue()).longValue()); assertTrue(between.getUpperBound() instanceof NumericLiteral); assertEquals(10L, ((Number) ((NumericLiteral) between.getUpperBound()).getValue()).longValue()); bytes = PersistenceUtils.toBinary(doubleField.isNull()); final IsNull isNull = (IsNull) PersistenceUtils.fromBinary(bytes); assertTrue(isNull.getExpression() instanceof NumericFieldValue); assertEquals("doubleField", ((NumericFieldValue) isNull.getExpression()).getFieldName()); bytes = PersistenceUtils.toBinary(doubleField.isNotNull()); final IsNotNull isNotNull = (IsNotNull) PersistenceUtils.fromBinary(bytes); assertTrue(isNotNull.getExpression() instanceof NumericFieldValue); assertEquals("doubleField", ((NumericFieldValue) isNotNull.getExpression()).getFieldName()); try { NumericFieldValue.of("name").evaluateValue(adapter, entry); fail(); } catch (RuntimeException e) { // expected } } @Test public void testTextExpressions() { final DataTypeAdapter adapter = new TestTypeBasicDataAdapter(); final TestType entry = new TestType("test", 1.3, 5, true); final TestType entryNulls = new TestType(null, null, null, null); final TextLiteral textLit = TextLiteral.of("text"); final TextLiteral valueLit = TextLiteral.of("value"); final TextFieldValue textField = TextFieldValue.of("name"); assertEquals("test", textField.evaluateValue(adapter, entry)); assertNull(textField.evaluateValue(adapter, entryNulls)); // Test comparisons assertTrue(textLit.isLessThan(valueLit).evaluate(adapter, entry)); assertFalse(valueLit.isLessThan(textLit).evaluate(adapter, entry)); assertTrue(textLit.isLessThan("tfxt").evaluate(adapter, entry)); assertFalse(textLit.isLessThan("text").evaluate(adapter, entry)); assertTrue(textField.isLessThan(textLit).evaluate(adapter, entry)); assertFalse(valueLit.isLessThan(TextFieldValue.of("name")).evaluate(adapter, entry)); assertFalse(textField.isLessThan(textLit).evaluate(adapter, entryNulls)); assertFalse(textLit.isGreaterThan(valueLit).evaluate(adapter, entry)); assertTrue(valueLit.isGreaterThan(textLit).evaluate(adapter, entry)); assertFalse(textLit.isGreaterThan("text").evaluate(adapter, entry)); assertTrue(textLit.isGreaterThan("tdxt").evaluate(adapter, entry)); assertFalse(textField.isGreaterThan(textLit).evaluate(adapter, entry)); assertTrue(valueLit.isGreaterThan(TextFieldValue.of("name")).evaluate(adapter, entry)); assertFalse(textField.isGreaterThan(textLit).evaluate(adapter, entryNulls)); assertTrue(textLit.isLessThanOrEqualTo(valueLit).evaluate(adapter, entry)); assertFalse(valueLit.isLessThanOrEqualTo(textLit).evaluate(adapter, entry)); assertTrue(textLit.isLessThanOrEqualTo("tfxt").evaluate(adapter, entry)); assertFalse(textLit.isLessThanOrEqualTo("test").evaluate(adapter, entry)); assertTrue(textField.isLessThanOrEqualTo(textLit).evaluate(adapter, entry)); assertFalse(valueLit.isLessThanOrEqualTo(textField).evaluate(adapter, entry)); assertTrue(valueLit.isLessThanOrEqualTo("value").evaluate(adapter, entry)); assertFalse(textLit.isGreaterThanOrEqualTo(valueLit).evaluate(adapter, entry)); assertTrue(valueLit.isGreaterThanOrEqualTo(textLit).evaluate(adapter, entry)); assertTrue(textLit.isGreaterThanOrEqualTo("text").evaluate(adapter, entry)); assertTrue(textLit.isGreaterThanOrEqualTo("tdxt").evaluate(adapter, entry)); assertFalse(textField.isGreaterThanOrEqualTo(textLit).evaluate(adapter, entry)); assertTrue(valueLit.isGreaterThanOrEqualTo(textField).evaluate(adapter, entry)); assertTrue(textField.isGreaterThanOrEqualTo("test").evaluate(adapter, entry)); assertTrue(textField.isEqualTo("test").evaluate(adapter, entry)); assertFalse(textField.isEqualTo("TEST").evaluate(adapter, entry)); assertTrue(textField.isEqualTo("TEST", true).evaluate(adapter, entry)); assertFalse(textField.isEqualTo(textLit).evaluate(adapter, entry)); assertFalse(textField.isNotEqualTo("test").evaluate(adapter, entry)); assertTrue(textField.isNotEqualTo("TEST").evaluate(adapter, entry)); assertFalse(textField.isNotEqualTo("TEST", true).evaluate(adapter, entry)); assertTrue(textField.isNotEqualTo("TFST", true).evaluate(adapter, entry)); assertTrue(textField.isNotEqualTo(textLit).evaluate(adapter, entry)); assertFalse(textLit.isNull().evaluate(adapter, entry)); assertFalse(valueLit.isNull().evaluate(adapter, entry)); assertFalse(textField.isNull().evaluate(adapter, entry)); assertTrue(textField.isNull().evaluate(adapter, entryNulls)); assertTrue(textLit.isNotNull().evaluate(adapter, entry)); assertTrue(valueLit.isNotNull().evaluate(adapter, entry)); assertTrue(textField.isNotNull().evaluate(adapter, entry)); assertFalse(textField.isNotNull().evaluate(adapter, entryNulls)); assertFalse(textField.isLessThan(null).evaluate(adapter, entry)); assertFalse(textField.isGreaterThan(null).evaluate(adapter, entry)); assertFalse(textField.isLessThanOrEqualTo(null).evaluate(adapter, entry)); assertFalse(textField.isGreaterThanOrEqualTo(null).evaluate(adapter, entry)); assertFalse(textField.isEqualTo(null).evaluate(adapter, entry)); assertTrue(textField.isNotEqualTo(null).evaluate(adapter, entry)); assertTrue(textField.isEqualTo(textField).evaluate(adapter, entryNulls)); assertFalse(textField.isEqualTo(textLit).evaluate(adapter, entryNulls)); assertFalse(textField.isNotEqualTo(null).evaluate(adapter, entryNulls)); assertTrue(textField.isNotEqualTo(valueLit).evaluate(adapter, entryNulls)); assertTrue(textField.isBetween("a", "z").evaluate(adapter, entry)); assertFalse(textLit.isBetween("u", "z").evaluate(adapter, entry)); assertTrue(textLit.isBetween(textField, valueLit).evaluate(adapter, entry)); assertFalse(textField.isBetween(textLit, valueLit).evaluate(adapter, entryNulls)); assertFalse(textLit.isBetween(valueLit, textField).evaluate(adapter, entryNulls)); assertFalse(textLit.isBetween(textField, valueLit).evaluate(adapter, entryNulls)); assertFalse(valueLit.isBetween(textLit, textField).evaluate(adapter, entry)); assertTrue(textLit.isLiteral()); assertFalse(textField.isLiteral()); assertTrue(textLit.concat(valueLit).isLiteral()); assertFalse(textLit.concat(textField).isLiteral()); assertFalse(textField.concat(textLit).isLiteral()); // Test functions assertEquals("textvalue", textLit.concat(valueLit).evaluateValue(adapter, entry)); assertEquals("text", textLit.concat(textField).evaluateValue(adapter, entryNulls)); assertEquals("text", textField.concat(textLit).evaluateValue(adapter, entryNulls)); assertEquals("text", textLit.concat(null).evaluateValue(adapter, entry)); assertEquals("text1.5", textLit.concat(1.5).evaluateValue(adapter, entry)); assertTrue(textLit.contains("ex").evaluate(adapter, entry)); assertFalse(textLit.contains("EX").evaluate(adapter, entry)); assertTrue(textLit.contains("EX", true).evaluate(adapter, entry)); assertFalse(textField.contains(null).evaluate(adapter, entry)); assertFalse(textField.contains("es").evaluate(adapter, entryNulls)); assertTrue(textField.contains("test").evaluate(adapter, entry)); assertTrue(textLit.startsWith("tex").evaluate(adapter, entry)); assertFalse(textLit.startsWith("TEX").evaluate(adapter, entry)); assertTrue(textLit.startsWith("TEX", true).evaluate(adapter, entry)); assertFalse(textField.startsWith(null).evaluate(adapter, entry)); assertFalse(textField.startsWith("tes").evaluate(adapter, entryNulls)); assertTrue(textField.startsWith("test").evaluate(adapter, entry)); assertTrue(textLit.endsWith("xt").evaluate(adapter, entry)); assertFalse(textLit.endsWith("XT").evaluate(adapter, entry)); assertTrue(textLit.endsWith("XT", true).evaluate(adapter, entry)); assertFalse(textField.endsWith(null).evaluate(adapter, entry)); assertFalse(textField.endsWith("st").evaluate(adapter, entryNulls)); assertTrue(textField.endsWith("test").evaluate(adapter, entry)); // Test serialization byte[] bytes = PersistenceUtils.toBinary(textField.concat("test")); final Concat concat = (Concat) PersistenceUtils.fromBinary(bytes); assertTrue(concat.getExpression1() instanceof TextFieldValue); assertEquals("name", ((TextFieldValue) concat.getExpression1()).getFieldName()); assertTrue(concat.getExpression2() instanceof TextLiteral); assertEquals("test", (String) ((TextLiteral) concat.getExpression2()).getValue()); bytes = PersistenceUtils.toBinary(textField.contains("test", true)); final Contains contains = (Contains) PersistenceUtils.fromBinary(bytes); assertTrue(contains.isIgnoreCase()); assertTrue(contains.getExpression1() instanceof TextFieldValue); assertEquals("name", ((TextFieldValue) contains.getExpression1()).getFieldName()); assertTrue(contains.getExpression2() instanceof TextLiteral); assertEquals("test", (String) ((TextLiteral) contains.getExpression2()).getValue()); bytes = PersistenceUtils.toBinary(textField.endsWith("test")); final EndsWith endsWith = (EndsWith) PersistenceUtils.fromBinary(bytes); assertFalse(endsWith.isIgnoreCase()); assertTrue(endsWith.getExpression1() instanceof TextFieldValue); assertEquals("name", ((TextFieldValue) endsWith.getExpression1()).getFieldName()); assertTrue(endsWith.getExpression2() instanceof TextLiteral); assertEquals("test", (String) ((TextLiteral) endsWith.getExpression2()).getValue()); bytes = PersistenceUtils.toBinary(textField.startsWith(null)); final StartsWith startsWith = (StartsWith) PersistenceUtils.fromBinary(bytes); assertFalse(startsWith.isIgnoreCase()); assertTrue(startsWith.getExpression1() instanceof TextFieldValue); assertEquals("name", ((TextFieldValue) startsWith.getExpression1()).getFieldName()); assertTrue(startsWith.getExpression2() instanceof TextLiteral); assertNull(((TextLiteral) startsWith.getExpression2()).getValue()); } @Test public void testBooleanExpressions() { final DataTypeAdapter adapter = new TestTypeBasicDataAdapter(); final TestType entry = new TestType("test", 1.3, 5, true); final TestType entryFalse = new TestType("test", 1.3, 0, false); final TestType entryNulls = new TestType(null, null, null, null); final BooleanLiteral trueLit = BooleanLiteral.of(true); final BooleanLiteral falseLit = BooleanLiteral.of(false); final BooleanLiteral stringLit = BooleanLiteral.of("test"); final BooleanLiteral nullLit = BooleanLiteral.of(null); final BooleanLiteral numberTrueLit = BooleanLiteral.of(1); final BooleanLiteral numberFalseLit = BooleanLiteral.of(0); final BooleanFieldValue booleanField = BooleanFieldValue.of("boolField"); final BooleanFieldValue booleanIntField = BooleanFieldValue.of("intField"); final BooleanFieldValue booleanStrField = BooleanFieldValue.of("name"); assertTrue(trueLit.evaluate(adapter, entry)); assertFalse(falseLit.evaluate(adapter, entry)); assertTrue(stringLit.evaluate(adapter, entry)); assertFalse(nullLit.evaluate(adapter, entry)); assertTrue(numberTrueLit.evaluate(adapter, entry)); assertFalse(numberFalseLit.evaluate(adapter, entry)); assertTrue(booleanField.evaluate(adapter, entry)); assertFalse(booleanField.evaluate(adapter, entryNulls)); assertTrue(trueLit.and(stringLit).evaluate(adapter, entry)); assertFalse(falseLit.and(trueLit).evaluate(adapter, entry)); assertTrue(falseLit.or(trueLit).evaluate(adapter, entry)); assertTrue(trueLit.isEqualTo(true).evaluate(adapter, entry)); assertFalse(trueLit.isEqualTo(false).evaluate(adapter, entry)); assertTrue(falseLit.isNotEqualTo(true).evaluate(adapter, entry)); assertFalse(falseLit.isNotEqualTo(false).evaluate(adapter, entry)); assertTrue(booleanStrField.evaluate(adapter, entry)); assertFalse(booleanStrField.evaluate(adapter, entryNulls)); assertFalse(booleanField.evaluate(adapter, entryFalse)); assertTrue(booleanIntField.evaluate(adapter, entry)); assertFalse(booleanIntField.evaluate(adapter, entryFalse)); assertFalse(booleanIntField.evaluate(adapter, entryNulls)); } @Test public void testFilters() { final DataTypeAdapter adapter = new TestTypeBasicDataAdapter(); final TestType entry = new TestType("test", 1.3, 5, true); final NumericFieldValue doubleField = NumericFieldValue.of("doubleField"); final NumericFieldValue intField = NumericFieldValue.of("intField"); final TextFieldValue textField = TextFieldValue.of("name"); // Test And assertTrue( doubleField.isLessThan(2).and(textField.concat("oreo").contains("store")).evaluate( adapter, entry)); assertFalse( intField.isGreaterThan(doubleField).and(intField.isGreaterThan(10)).evaluate( adapter, entry)); assertFalse(doubleField.isEqualTo(intField).and(intField.isNotNull()).evaluate(adapter, entry)); assertFalse(textField.contains("val").and(intField.isLessThan(0)).evaluate(adapter, entry)); // Test Or assertTrue( doubleField.isLessThan(2).or(textField.concat("oreo").contains("store")).evaluate( adapter, entry)); assertTrue( intField.isGreaterThan(doubleField).or(intField.isGreaterThan(10)).evaluate( adapter, entry)); assertTrue(doubleField.isEqualTo(intField).or(intField.isNotNull()).evaluate(adapter, entry)); assertFalse(textField.contains("val").or(intField.isLessThan(0)).evaluate(adapter, entry)); // Test Not assertFalse(Filter.not(doubleField.isLessThan(2)).evaluate(adapter, entry)); assertFalse( Filter.not( doubleField.isLessThan(2).and(textField.concat("oreo").contains("store"))).evaluate( adapter, entry)); assertTrue( Filter.not(intField.isGreaterThan(doubleField).and(intField.isGreaterThan(10))).evaluate( adapter, entry)); assertTrue( Filter.not(doubleField.isEqualTo(intField).and(intField.isNotNull())).evaluate( adapter, entry)); assertTrue( Filter.not(textField.contains("val").and(intField.isLessThan(0))).evaluate(adapter, entry)); // Test include/exclude assertTrue(Filter.include().evaluate(null, null)); assertFalse(Filter.exclude().evaluate(null, null)); // Test serialization byte[] bytes = PersistenceUtils.toBinary(textField.contains("test").and(intField.isLessThan(1L))); final And and = (And) PersistenceUtils.fromBinary(bytes); assertEquals(2, and.getChildren().length); assertTrue(and.getChildren()[0] instanceof Contains); assertTrue(((Contains) and.getChildren()[0]).getExpression1() instanceof TextFieldValue); assertEquals( "name", ((TextFieldValue) ((Contains) and.getChildren()[0]).getExpression1()).getFieldName()); assertTrue(((Contains) and.getChildren()[0]).getExpression2() instanceof TextLiteral); assertEquals( "test", (String) ((TextLiteral) ((Contains) and.getChildren()[0]).getExpression2()).getValue()); assertTrue(and.getChildren()[1] instanceof NumericComparisonOperator); assertEquals( CompareOp.LESS_THAN, ((NumericComparisonOperator) and.getChildren()[1]).getCompareOp()); assertTrue( ((NumericComparisonOperator) and.getChildren()[1]).getExpression1() instanceof NumericFieldValue); assertEquals( "intField", ((NumericFieldValue) ((NumericComparisonOperator) and.getChildren()[1]).getExpression1()).getFieldName()); assertTrue( ((NumericComparisonOperator) and.getChildren()[1]).getExpression2() instanceof NumericLiteral); assertEquals( 1.0, (double) ((NumericLiteral) ((NumericComparisonOperator) and.getChildren()[1]).getExpression2()).getValue(), EPSILON); bytes = PersistenceUtils.toBinary(textField.contains("test").or(intField.isLessThan(1L))); final Or or = (Or) PersistenceUtils.fromBinary(bytes); assertEquals(2, or.getChildren().length); assertTrue(or.getChildren()[0] instanceof Contains); assertTrue(((Contains) or.getChildren()[0]).getExpression1() instanceof TextFieldValue); assertEquals( "name", ((TextFieldValue) ((Contains) or.getChildren()[0]).getExpression1()).getFieldName()); assertTrue(((Contains) or.getChildren()[0]).getExpression2() instanceof TextLiteral); assertEquals( "test", (String) ((TextLiteral) ((Contains) or.getChildren()[0]).getExpression2()).getValue()); assertTrue(or.getChildren()[1] instanceof NumericComparisonOperator); assertEquals( CompareOp.LESS_THAN, ((NumericComparisonOperator) or.getChildren()[1]).getCompareOp()); assertTrue( ((NumericComparisonOperator) or.getChildren()[1]).getExpression1() instanceof NumericFieldValue); assertEquals( "intField", ((NumericFieldValue) ((NumericComparisonOperator) or.getChildren()[1]).getExpression1()).getFieldName()); assertTrue( ((NumericComparisonOperator) or.getChildren()[1]).getExpression2() instanceof NumericLiteral); assertEquals( 1.0, (double) ((NumericLiteral) ((NumericComparisonOperator) and.getChildren()[1]).getExpression2()).getValue(), EPSILON); bytes = PersistenceUtils.toBinary(Filter.include()); assertTrue(PersistenceUtils.fromBinary(bytes) instanceof Include); bytes = PersistenceUtils.toBinary(Filter.exclude()); assertTrue(PersistenceUtils.fromBinary(bytes) instanceof Exclude); bytes = PersistenceUtils.toBinary(Filter.not(textField.contains("test"))); final Not not = (Not) PersistenceUtils.fromBinary(bytes); assertTrue(not.getFilter() instanceof Contains); assertTrue(((Contains) not.getFilter()).getExpression1() instanceof TextFieldValue); assertEquals( "name", ((TextFieldValue) ((Contains) not.getFilter()).getExpression1()).getFieldName()); assertTrue(((Contains) not.getFilter()).getExpression2() instanceof TextLiteral); assertEquals( "test", (String) ((TextLiteral) ((Contains) not.getFilter()).getExpression2()).getValue()); } @Test public void testInvalidComparisons() throws URISyntaxException { final TextLiteral textLit = TextLiteral.of("text"); final NumericLiteral doubleLit = NumericLiteral.of(0.5); final NumericLiteral integerLit = NumericLiteral.of(1); final GenericLiteral dateLit = GenericLiteral.of(new Date(100)); final GenericLiteral dateLit2 = GenericLiteral.of(new Date(500)); final GenericLiteral uriLit = GenericLiteral.of(new URI("test")); final GenericLiteral nonComparable = GenericLiteral.of(Lists.newArrayList()); try { doubleLit.isGreaterThan(textLit).evaluate(null, null); fail(); } catch (RuntimeException e) { // Expected } try { textLit.isGreaterThan(doubleLit).evaluate(null, null); } catch (RuntimeException e) { // Expected } try { textLit.isLessThan(dateLit).evaluate(null, null); } catch (RuntimeException e) { // Expected } try { doubleLit.isBetween("test", 1).evaluate(null, null); fail(); } catch (RuntimeException e) { // Expected } try { doubleLit.isBetween(0, "test").evaluate(null, null); fail(); } catch (RuntimeException e) { // Expected } try { integerLit.isBetween("test", "test2").evaluate(null, null); fail(); } catch (RuntimeException e) { // Expected } try { doubleLit.isBetween(dateLit2, uriLit).evaluate(null, null); fail(); } catch (RuntimeException e) { // Expected } } } ================================================ FILE: core/store/src/test/java/org/locationtech/geowave/core/store/query/filter/expression/FilterRangeTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.filter.expression; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.util.List; import org.junit.Test; import com.google.common.collect.Lists; public class FilterRangeTest { @Test public void testMergeIntRanges() { List> intRanges = Lists.newArrayList( FilterRange.of(3, 40, true, true, true), FilterRange.of(1, 45, true, true, true), FilterRange.of(2, 50, true, true, true), FilterRange.of(70, 75, true, true, true), FilterRange.of(100, 200, true, true, true), FilterRange.of(75, 90, true, true, true), FilterRange.of(80, 85, true, true, true)); List> merged = FilterRange.mergeRanges(intRanges); assertEquals(3, merged.size()); assertFalse(merged.get(0).isFullRange()); assertEquals(1, (int) merged.get(0).getStart()); assertEquals(50, (int) merged.get(0).getEnd()); assertFalse(merged.get(1).isFullRange()); assertEquals(70, (int) merged.get(1).getStart()); assertEquals(90, (int) merged.get(1).getEnd()); assertFalse(merged.get(2).isFullRange()); assertEquals(100, (int) merged.get(2).getStart()); assertEquals(200, (int) merged.get(2).getEnd()); intRanges = Lists.newArrayList( FilterRange.of(3, 40, true, true, true), FilterRange.of(1, 45, true, true, true), FilterRange.of(2, 50, true, true, true), FilterRange.of(null, 75, true, true, true), FilterRange.of(100, 200, true, true, true), FilterRange.of(75, 90, true, true, true), FilterRange.of(80, 85, true, true, true)); merged = FilterRange.mergeRanges(intRanges); assertEquals(2, merged.size()); assertFalse(merged.get(0).isFullRange()); assertNull(merged.get(0).getStart()); assertEquals(90, (int) merged.get(0).getEnd()); assertFalse(merged.get(1).isFullRange()); assertEquals(100, (int) merged.get(1).getStart()); assertEquals(200, (int) merged.get(1).getEnd()); intRanges = Lists.newArrayList( FilterRange.of(3, 40, true, true, true), FilterRange.of(1, 45, true, true, true), FilterRange.of(2, 50, true, true, true), FilterRange.of(70, null, true, true, true), FilterRange.of(100, 200, true, true, true), FilterRange.of(75, 90, true, true, true), FilterRange.of(80, 85, true, true, true)); merged = FilterRange.mergeRanges(intRanges); assertEquals(2, merged.size()); assertFalse(merged.get(0).isFullRange()); assertEquals(1, (int) merged.get(0).getStart()); assertEquals(50, (int) merged.get(0).getEnd()); assertFalse(merged.get(1).isFullRange()); assertEquals(70, (int) merged.get(1).getStart()); assertNull(merged.get(1).getEnd()); intRanges = Lists.newArrayList( FilterRange.of(3, 40, true, true, true), FilterRange.of(1, 45, true, true, true), FilterRange.of(2, 50, true, true, true), FilterRange.of(70, null, true, true, true), FilterRange.of(null, 200, true, true, true), FilterRange.of(75, 90, true, true, true), FilterRange.of(80, 85, true, true, true)); merged = FilterRange.mergeRanges(intRanges); assertEquals(1, merged.size()); assertTrue(merged.get(0).isFullRange()); assertNull(merged.get(0).getStart()); assertNull(merged.get(0).getEnd()); } @Test public void testIntersectIntRanges() { List> intRanges1 = Lists.newArrayList( FilterRange.of(0, 2, true, true, true), FilterRange.of(5, 10, true, true, true), FilterRange.of(13, 23, true, true, true), FilterRange.of(24, 25, true, true, true)); List> intRanges2 = Lists.newArrayList( FilterRange.of(1, 5, true, true, true), FilterRange.of(8, 12, true, true, true), FilterRange.of(15, 18, true, true, true), FilterRange.of(20, 24, true, true, true)); List> intersected = FilterRange.intersectRanges(intRanges1, intRanges2); assertEquals(6, intersected.size()); assertEquals(1, (int) intersected.get(0).getStart()); assertEquals(2, (int) intersected.get(0).getEnd()); assertEquals(5, (int) intersected.get(1).getStart()); assertEquals(5, (int) intersected.get(1).getEnd()); assertEquals(8, (int) intersected.get(2).getStart()); assertEquals(10, (int) intersected.get(2).getEnd()); assertEquals(15, (int) intersected.get(3).getStart()); assertEquals(18, (int) intersected.get(3).getEnd()); assertEquals(20, (int) intersected.get(4).getStart()); assertEquals(23, (int) intersected.get(4).getEnd()); assertEquals(24, (int) intersected.get(5).getStart()); assertEquals(24, (int) intersected.get(5).getEnd()); intRanges1 = Lists.newArrayList(FilterRange.of(null, null, true, true, true)); intersected = FilterRange.intersectRanges(intRanges1, intRanges2); assertEquals(4, intersected.size()); assertEquals(1, (int) intersected.get(0).getStart()); assertEquals(5, (int) intersected.get(0).getEnd()); assertEquals(8, (int) intersected.get(1).getStart()); assertEquals(12, (int) intersected.get(1).getEnd()); assertEquals(15, (int) intersected.get(2).getStart()); assertEquals(18, (int) intersected.get(2).getEnd()); assertEquals(20, (int) intersected.get(3).getStart()); assertEquals(24, (int) intersected.get(3).getEnd()); intRanges1 = Lists.newArrayList( FilterRange.of(0, 2, true, true, true), FilterRange.of(5, 10, true, true, true), FilterRange.of(13, 23, true, true, true), FilterRange.of(24, 25, true, true, true)); intRanges2 = Lists.newArrayList(FilterRange.of(null, null, true, true, true)); intersected = FilterRange.intersectRanges(intRanges1, intRanges2); assertEquals(4, intersected.size()); assertEquals(0, (int) intersected.get(0).getStart()); assertEquals(2, (int) intersected.get(0).getEnd()); assertEquals(5, (int) intersected.get(1).getStart()); assertEquals(10, (int) intersected.get(1).getEnd()); assertEquals(13, (int) intersected.get(2).getStart()); assertEquals(23, (int) intersected.get(2).getEnd()); assertEquals(24, (int) intersected.get(3).getStart()); assertEquals(25, (int) intersected.get(3).getEnd()); intRanges1 = Lists.newArrayList(FilterRange.of(null, null, true, true, true)); intRanges2 = Lists.newArrayList(FilterRange.of(null, null, true, true, true)); intersected = FilterRange.intersectRanges(intRanges1, intRanges2); assertEquals(1, intersected.size()); assertNull(intersected.get(0).getStart()); assertNull(intersected.get(0).getEnd()); intRanges1 = Lists.newArrayList( FilterRange.of(1, 5, true, true, true), FilterRange.of(8, 10, true, true, true)); intRanges2 = Lists.newArrayList( FilterRange.of(15, 18, true, true, true), FilterRange.of(20, 24, true, true, true)); intersected = FilterRange.intersectRanges(intRanges1, intRanges2); assertEquals(0, intersected.size()); } @Test public void testMergeStringRanges() {} } ================================================ FILE: core/store/src/test/java/org/locationtech/geowave/core/store/query/gwql/AbstractGWQLTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import org.locationtech.geowave.core.store.StoreFactoryFamilySpi; import org.locationtech.geowave.core.store.adapter.BasicDataTypeAdapter; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.adapter.annotation.GeoWaveDataType; import org.locationtech.geowave.core.store.adapter.annotation.GeoWaveField; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.index.AttributeDimensionalityTypeProvider; import org.locationtech.geowave.core.store.memory.MemoryRequiredOptions; import org.locationtech.geowave.core.store.memory.MemoryStoreFactoryFamily; import org.locationtech.geowave.core.store.query.gwql.parse.GWQLParser; public abstract class AbstractGWQLTest { protected DataStore createDataStore() { return createDataStore(createDefaultAdapter(), "pop"); } protected DataStore createDataStore(final DataTypeAdapter adapter, final String indexField) { final StoreFactoryFamilySpi storeFamily = new MemoryStoreFactoryFamily(); final MemoryRequiredOptions opts = new MemoryRequiredOptions(); opts.setGeoWaveNamespace("test_" + getClass().getName()); final DataStore dataStore = storeFamily.getDataStoreFactory().createStore(opts); final FieldDescriptor descriptor = adapter.getFieldDescriptor(indexField); final Index index = AttributeDimensionalityTypeProvider.createIndexForDescriptor(adapter, descriptor, null); dataStore.addType(adapter, index); return dataStore; } protected DataTypeAdapter createDefaultAdapter() { return BasicDataTypeAdapter.newAdapter("type", DefaultGWQLTestType.class, "pid"); } protected void assertInvalidStatement( final DataStore dataStore, final String statement, final String expectedMessage) { try { GWQLParser.parseStatement(dataStore, statement); fail(); } catch (GWQLParseException e) { // expected assertTrue( e.getMessage() + " does not contain " + expectedMessage, e.getMessage().contains(expectedMessage)); } } @GeoWaveDataType protected static class DefaultGWQLTestType { @GeoWaveField private String pid; @GeoWaveField private Long pop; @GeoWaveField private String comment; public DefaultGWQLTestType() {} public DefaultGWQLTestType(final String pid, final Long pop, final String comment) { this.pid = pid; this.pop = pop; this.comment = comment; } } } ================================================ FILE: core/store/src/test/java/org/locationtech/geowave/core/store/query/gwql/DeleteStatementTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.text.ParseException; import org.junit.Test; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.query.filter.expression.And; import org.locationtech.geowave.core.store.query.filter.expression.ComparisonOperator.CompareOp; import org.locationtech.geowave.core.store.query.filter.expression.Filter; import org.locationtech.geowave.core.store.query.filter.expression.Or; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericBetween; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericComparisonOperator; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldValue; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericLiteral; import org.locationtech.geowave.core.store.query.filter.expression.text.TextComparisonOperator; import org.locationtech.geowave.core.store.query.filter.expression.text.TextFieldValue; import org.locationtech.geowave.core.store.query.filter.expression.text.TextLiteral; import org.locationtech.geowave.core.store.query.gwql.parse.GWQLParser; import org.locationtech.geowave.core.store.query.gwql.statement.DeleteStatement; import org.locationtech.geowave.core.store.query.gwql.statement.Statement; public class DeleteStatementTest extends AbstractGWQLTest { @Test public void testInvalidStatements() { final DataStore dataStore = createDataStore(); // Missing from assertInvalidStatement(dataStore, "DELETE", "expecting FROM"); // Missing type name assertInvalidStatement(dataStore, "DELETE FROM", "missing IDENTIFIER"); // Missing from assertInvalidStatement(dataStore, "DELETE type", "missing FROM"); // Nonexistent type assertInvalidStatement(dataStore, "DELETE FROM nonexistent", "No type named nonexistent"); // Missing filter assertInvalidStatement(dataStore, "DELETE FROM type WHERE", "mismatched input ''"); } @Test public void testValidStatements() { final DataStore dataStore = createDataStore(); GWQLParser.parseStatement(dataStore, "DELETE FROM type"); GWQLParser.parseStatement(dataStore, "DELETE FROM type WHERE pop < 1"); GWQLParser.parseStatement(dataStore, "DELETE FROM type WHERE pid BETWEEN 'a' AND 'b'"); GWQLParser.parseStatement(dataStore, "DELETE FROM type WHERE strStartsWith(pop::text, '50')"); GWQLParser.parseStatement(dataStore, "DELETE FROM type WHERE ((((pop < 1))))"); } @Test public void testDelete() throws ParseException, IOException { final DataStore dataStore = createDataStore(); final String statement = "DELETE FROM type"; final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof DeleteStatement); final DeleteStatement deleteStatement = (DeleteStatement) gwStatement; assertNotNull(deleteStatement.getAdapter()); assertEquals("type", deleteStatement.getAdapter().getTypeName()); assertNull(deleteStatement.getFilter()); } @Test public void testComplexStatement() { final DataStore dataStore = createDataStore(); final Statement statement = GWQLParser.parseStatement( dataStore, "DELETE FROM type " + "WHERE (pop < 1) " + "AND ((pop > 48 OR pid > 'a') AND (pop BETWEEN 0 AND 10 OR pid <= 'b'))"); assertTrue(statement instanceof DeleteStatement); final DeleteStatement deleteStatement = (DeleteStatement) statement; assertNotNull(deleteStatement.getAdapter()); assertEquals("type", deleteStatement.getAdapter().getTypeName()); assertNotNull(deleteStatement.getFilter()); final Filter filter = deleteStatement.getFilter(); assertTrue(filter instanceof And); And andFilter = (And) filter; assertTrue(andFilter.getChildren().length == 2); assertTrue(andFilter.getChildren()[0] instanceof NumericComparisonOperator); NumericComparisonOperator compareOp = (NumericComparisonOperator) andFilter.getChildren()[0]; assertTrue(compareOp.getCompareOp().equals(CompareOp.LESS_THAN)); assertTrue(compareOp.getExpression1() instanceof NumericFieldValue); assertEquals("pop", ((NumericFieldValue) compareOp.getExpression1()).getFieldName()); assertTrue(compareOp.getExpression2() instanceof NumericLiteral); assertEquals(1, ((NumericLiteral) compareOp.getExpression2()).getValue(), 0.00001); assertTrue(andFilter.getChildren()[1] instanceof And); andFilter = (And) andFilter.getChildren()[1]; assertTrue(andFilter.getChildren().length == 2); assertTrue(andFilter.getChildren()[0] instanceof Or); Or orFilter = (Or) andFilter.getChildren()[0]; assertTrue(orFilter.getChildren().length == 2); assertTrue(orFilter.getChildren()[0] instanceof NumericComparisonOperator); final NumericComparisonOperator numericCompare = (NumericComparisonOperator) orFilter.getChildren()[0]; assertTrue(numericCompare.getExpression1() instanceof NumericFieldValue); assertEquals("pop", ((NumericFieldValue) numericCompare.getExpression1()).getFieldName()); assertTrue(numericCompare.getExpression2() instanceof NumericLiteral); assertEquals( 48, ((NumericLiteral) numericCompare.getExpression2()).evaluateValue(null), 0.00001); assertTrue(orFilter.getChildren()[1] instanceof TextComparisonOperator); TextComparisonOperator textCompareOp = (TextComparisonOperator) orFilter.getChildren()[1]; assertTrue(textCompareOp.getCompareOp().equals(CompareOp.GREATER_THAN)); assertTrue(textCompareOp.getExpression1() instanceof TextFieldValue); assertEquals("pid", ((TextFieldValue) textCompareOp.getExpression1()).getFieldName()); assertTrue(textCompareOp.getExpression2() instanceof TextLiteral); assertEquals("a", ((TextLiteral) textCompareOp.getExpression2()).getValue()); assertTrue(andFilter.getChildren()[1] instanceof Or); orFilter = (Or) andFilter.getChildren()[1]; assertTrue(orFilter.getChildren().length == 2); assertTrue(orFilter.getChildren()[0] instanceof NumericBetween); NumericBetween between = (NumericBetween) orFilter.getChildren()[0]; assertTrue(between.getValue() instanceof NumericFieldValue); assertEquals("pop", ((NumericFieldValue) between.getValue()).getFieldName()); assertTrue(between.getLowerBound() instanceof NumericLiteral); assertEquals(0, ((NumericLiteral) between.getLowerBound()).getValue(), 0.00001); assertTrue(between.getUpperBound() instanceof NumericLiteral); assertEquals(10, ((NumericLiteral) between.getUpperBound()).getValue(), 0.00001); assertTrue(orFilter.getChildren()[1] instanceof TextComparisonOperator); textCompareOp = (TextComparisonOperator) orFilter.getChildren()[1]; assertTrue(textCompareOp.getCompareOp().equals(CompareOp.LESS_THAN_OR_EQUAL)); assertTrue(textCompareOp.getExpression1() instanceof TextFieldValue); assertEquals("pid", ((TextFieldValue) textCompareOp.getExpression1()).getFieldName()); assertTrue(textCompareOp.getExpression2() instanceof TextLiteral); assertEquals("b", ((TextLiteral) textCompareOp.getExpression2()).getValue()); } @Test public void testDeleteWithFilter() throws ParseException, IOException { final DataStore dataStore = createDataStore(); final String statement = "DELETE FROM type WHERE pop BETWEEN 1000 AND 2000 and pid > 'abc'"; final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof DeleteStatement); final DeleteStatement deleteStatement = (DeleteStatement) gwStatement; assertNotNull(deleteStatement.getAdapter()); assertEquals("type", deleteStatement.getAdapter().getTypeName()); assertNotNull(deleteStatement.getFilter()); final Filter filter = deleteStatement.getFilter(); assertTrue(filter instanceof And); final And andFilter = (And) filter; assertTrue(andFilter.getChildren().length == 2); assertTrue(andFilter.getChildren()[0] instanceof NumericBetween); assertTrue(andFilter.getChildren()[1] instanceof TextComparisonOperator); } } ================================================ FILE: core/store/src/test/java/org/locationtech/geowave/core/store/query/gwql/GWQLParserTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import org.antlr.v4.runtime.CharStreams; import org.antlr.v4.runtime.CommonTokenStream; import org.antlr.v4.runtime.TokenStream; import org.junit.Test; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.query.filter.expression.And; import org.locationtech.geowave.core.store.query.filter.expression.ComparisonOperator.CompareOp; import org.locationtech.geowave.core.store.query.filter.expression.Filter; import org.locationtech.geowave.core.store.query.filter.expression.IsNotNull; import org.locationtech.geowave.core.store.query.filter.expression.IsNull; import org.locationtech.geowave.core.store.query.filter.expression.Not; import org.locationtech.geowave.core.store.query.filter.expression.numeric.Abs; import org.locationtech.geowave.core.store.query.filter.expression.numeric.Add; import org.locationtech.geowave.core.store.query.filter.expression.numeric.Divide; import org.locationtech.geowave.core.store.query.filter.expression.numeric.Multiply; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericComparisonOperator; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericFieldValue; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericLiteral; import org.locationtech.geowave.core.store.query.filter.expression.numeric.Subtract; import org.locationtech.geowave.core.store.query.filter.expression.text.Concat; import org.locationtech.geowave.core.store.query.filter.expression.text.Contains; import org.locationtech.geowave.core.store.query.filter.expression.text.EndsWith; import org.locationtech.geowave.core.store.query.filter.expression.text.StartsWith; import org.locationtech.geowave.core.store.query.filter.expression.text.TextBinaryPredicate; import org.locationtech.geowave.core.store.query.filter.expression.text.TextComparisonOperator; import org.locationtech.geowave.core.store.query.filter.expression.text.TextFieldValue; import org.locationtech.geowave.core.store.query.filter.expression.text.TextLiteral; import org.locationtech.geowave.core.store.query.gwql.parse.GWQLLexer; import org.locationtech.geowave.core.store.query.gwql.parse.GWQLParser; import org.locationtech.geowave.core.store.query.gwql.statement.SelectStatement; import org.locationtech.geowave.core.store.query.gwql.statement.Statement; public class GWQLParserTest extends AbstractGWQLTest { @Test public void testFilters() { final DataStore dataStore = createDataStore(); String statement = "SELECT * FROM type WHERE pop IS NULL"; Statement gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); SelectStatement selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); Filter filter = selectStatement.getFilter(); assertTrue(filter instanceof IsNull); assertTrue(((IsNull) filter).getExpression() instanceof NumericFieldValue); assertEquals("pop", ((NumericFieldValue) ((IsNull) filter).getExpression()).getFieldName()); statement = "SELECT * FROM type WHERE pop IS NOT NULL"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof IsNotNull); assertTrue(((IsNotNull) filter).getExpression() instanceof NumericFieldValue); assertEquals("pop", ((NumericFieldValue) ((IsNotNull) filter).getExpression()).getFieldName()); statement = "SELECT * FROM type WHERE NOT pop IS NOT NULL"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof Not); final Not not = (Not) filter; assertTrue(not.getFilter() instanceof IsNotNull); filter = not.getFilter(); assertTrue(((IsNotNull) filter).getExpression() instanceof NumericFieldValue); assertEquals("pop", ((NumericFieldValue) ((IsNotNull) filter).getExpression()).getFieldName()); } @Test public void testInvalidFilters() { final DataStore dataStore = createDataStore(); assertInvalidStatement( dataStore, "SELECT * FROM type WHERE pop > pid", "Comparison operators can only be used on comparable expressions"); assertInvalidStatement( dataStore, "SELECT * FROM type WHERE pop < pid", "Comparison operators can only be used on comparable expressions"); assertInvalidStatement( dataStore, "SELECT * FROM type WHERE pop >= pid", "Comparison operators can only be used on comparable expressions"); assertInvalidStatement( dataStore, "SELECT * FROM type WHERE pop <= pid", "Comparison operators can only be used on comparable expressions"); assertInvalidStatement( dataStore, "SELECT * FROM type WHERE pop BETWEEN pid AND comment", "The BETWEEN operation is only supported for comparable expressions"); assertInvalidStatement( dataStore, "SELECT * FROM type WHERE nonexistent > 5", "Field nonexistent did not exist in the specified type"); assertInvalidStatement( dataStore, "SELECT * FROM type WHERE pid + pid > 5", "Math operations require numeric expressions"); assertInvalidStatement( dataStore, "SELECT * FROM type WHERE pid - pid > 5", "Math operations require numeric expressions"); assertInvalidStatement( dataStore, "SELECT * FROM type WHERE pid * pid > 5", "Math operations require numeric expressions"); assertInvalidStatement( dataStore, "SELECT * FROM type WHERE pid / pid > 5", "Math operations require numeric expressions"); assertInvalidStatement( dataStore, "SELECT * FROM type WHERE nonexistent(pid) > 5", "No expression function was found with the name: nonexistent"); assertInvalidStatement( dataStore, "SELECT * FROM type WHERE nonexistent(pid)", "No predicate function was found with the name: nonexistent"); assertInvalidStatement( dataStore, "SELECT * FROM type WHERE pid nonexistent pid", "No 'nonexistent' operator was found"); assertInvalidStatement( dataStore, "SELECT * FROM type WHERE pid::nonexistent > 5", "Type 'nonexistent' is undefined"); } @Test public void testExpressionFunctions() { final DataStore dataStore = createDataStore(); final String statement = "SELECT * FROM type WHERE abs(pop) > 10 AND strStartsWith(concat(pid, 'value'), 'abc')"; final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); final SelectStatement selectStatement = (SelectStatement) gwStatement; assertFalse(selectStatement.isAggregation()); assertNotNull(selectStatement.getAdapter()); assertEquals("type", selectStatement.getAdapter().getTypeName()); assertNotNull(selectStatement.getFilter()); final Filter filter = selectStatement.getFilter(); assertTrue(filter instanceof And); final And and = (And) filter; assertEquals(2, and.getChildren().length); assertTrue(and.getChildren()[0] instanceof NumericComparisonOperator); final NumericComparisonOperator compareOp = (NumericComparisonOperator) and.getChildren()[0]; assertTrue(compareOp.getCompareOp().equals(CompareOp.GREATER_THAN)); assertTrue(compareOp.getExpression1() instanceof Abs); assertTrue(((Abs) compareOp.getExpression1()).getExpression() instanceof NumericFieldValue); assertEquals( "pop", ((NumericFieldValue) ((Abs) compareOp.getExpression1()).getExpression()).getFieldName()); assertTrue(compareOp.getExpression2() instanceof NumericLiteral); assertEquals(10.0, ((NumericLiteral) compareOp.getExpression2()).getValue(), 0.00001); assertTrue(and.getChildren()[1] instanceof StartsWith); final StartsWith startsWith = (StartsWith) and.getChildren()[1]; assertTrue(startsWith.getExpression1() instanceof Concat); assertTrue(((Concat) startsWith.getExpression1()).getExpression1() instanceof TextFieldValue); assertEquals( "pid", ((TextFieldValue) ((Concat) startsWith.getExpression1()).getExpression1()).getFieldName()); assertTrue(((Concat) startsWith.getExpression1()).getExpression2() instanceof TextLiteral); assertEquals( "value", ((TextLiteral) ((Concat) startsWith.getExpression1()).getExpression2()).getValue()); assertTrue(startsWith.getExpression2() instanceof TextLiteral); assertEquals("abc", ((TextLiteral) startsWith.getExpression2()).getValue()); } @Test public void testTextLiterals() { assertEquals("POINT(1 1)", parseTextLiteral("'POINT(1 1)'").getValue()); assertEquals("can't brea'k", parseTextLiteral("'can''t brea''k'").getValue()); assertEquals("can't break", parseTextLiteral("'can\\'t break'").getValue()); assertEquals("can''t break", parseTextLiteral("'can\\'''t break'").getValue()); assertEquals("can't\tbreak\n", parseTextLiteral("'can''t\tbreak\n'").getValue()); assertEquals("can't\\break", parseTextLiteral("'can''t\\\\break'").getValue()); } private TextLiteral parseTextLiteral(final String text) { final GWQLLexer lexer = new GWQLLexer(CharStreams.fromString(text)); final TokenStream tokenStream = new CommonTokenStream(lexer); final GWQLParser parser = new GWQLParser(tokenStream); return parser.textLiteral().value; } @Test public void testTextPredicateFunctions() { final DataStore dataStore = createDataStore(); String statement = "SELECT * FROM type WHERE strStartsWith(pid, 'val')"; Statement gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); SelectStatement selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); Filter filter = selectStatement.getFilter(); assertTrue(filter instanceof StartsWith); TextBinaryPredicate predicate = (TextBinaryPredicate) filter; assertFalse(predicate.isIgnoreCase()); assertTrue(predicate.getExpression1() instanceof TextFieldValue); assertEquals("pid", ((TextFieldValue) predicate.getExpression1()).getFieldName()); assertTrue(predicate.getExpression2() instanceof TextLiteral); assertEquals("val", ((TextLiteral) predicate.getExpression2()).getValue()); statement = "SELECT * FROM type WHERE strStartsWith(pid, 'val', true)"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof StartsWith); predicate = (TextBinaryPredicate) filter; assertTrue(predicate.isIgnoreCase()); assertTrue(predicate.getExpression1() instanceof TextFieldValue); assertEquals("pid", ((TextFieldValue) predicate.getExpression1()).getFieldName()); assertTrue(predicate.getExpression2() instanceof TextLiteral); assertEquals("val", ((TextLiteral) predicate.getExpression2()).getValue()); statement = "SELECT * FROM type WHERE strEndsWith(pid, 'val')"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof EndsWith); predicate = (TextBinaryPredicate) filter; assertFalse(predicate.isIgnoreCase()); assertTrue(predicate.getExpression1() instanceof TextFieldValue); assertEquals("pid", ((TextFieldValue) predicate.getExpression1()).getFieldName()); assertTrue(predicate.getExpression2() instanceof TextLiteral); assertEquals("val", ((TextLiteral) predicate.getExpression2()).getValue()); statement = "SELECT * FROM type WHERE strEndsWith(pid, 'val', true)"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof EndsWith); predicate = (TextBinaryPredicate) filter; assertTrue(predicate.isIgnoreCase()); assertTrue(predicate.getExpression1() instanceof TextFieldValue); assertEquals("pid", ((TextFieldValue) predicate.getExpression1()).getFieldName()); assertTrue(predicate.getExpression2() instanceof TextLiteral); assertEquals("val", ((TextLiteral) predicate.getExpression2()).getValue()); statement = "SELECT * FROM type WHERE strContains(pid, 'val')"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof Contains); predicate = (TextBinaryPredicate) filter; assertFalse(predicate.isIgnoreCase()); assertTrue(predicate.getExpression1() instanceof TextFieldValue); assertEquals("pid", ((TextFieldValue) predicate.getExpression1()).getFieldName()); assertTrue(predicate.getExpression2() instanceof TextLiteral); assertEquals("val", ((TextLiteral) predicate.getExpression2()).getValue()); statement = "SELECT * FROM type WHERE strContains(pid, 'val', true)"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof Contains); predicate = (TextBinaryPredicate) filter; assertTrue(predicate.isIgnoreCase()); assertTrue(predicate.getExpression1() instanceof TextFieldValue); assertEquals("pid", ((TextFieldValue) predicate.getExpression1()).getFieldName()); assertTrue(predicate.getExpression2() instanceof TextLiteral); assertEquals("val", ((TextLiteral) predicate.getExpression2()).getValue()); } @Test public void testMathExpression() { final DataStore dataStore = createDataStore(); String statement = "SELECT * FROM type WHERE pop + 5 > 25"; Statement gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); SelectStatement selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); Filter filter = selectStatement.getFilter(); assertTrue(filter instanceof NumericComparisonOperator); NumericComparisonOperator compare = (NumericComparisonOperator) filter; assertEquals(CompareOp.GREATER_THAN, compare.getCompareOp()); assertTrue(compare.getExpression1() instanceof Add); Add add = (Add) compare.getExpression1(); assertTrue(add.getExpression1() instanceof NumericFieldValue); assertEquals("pop", ((NumericFieldValue) add.getExpression1()).getFieldName()); assertTrue(add.getExpression2() instanceof NumericLiteral); assertEquals(5, ((NumericLiteral) add.getExpression2()).getValue(), 0.000001); assertTrue(compare.getExpression2() instanceof NumericLiteral); assertEquals(25, ((NumericLiteral) compare.getExpression2()).getValue(), 0.000001); statement = "SELECT * FROM type WHERE pop - 5 > 25"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof NumericComparisonOperator); compare = (NumericComparisonOperator) filter; assertEquals(CompareOp.GREATER_THAN, compare.getCompareOp()); assertTrue(compare.getExpression1() instanceof Subtract); Subtract subtract = (Subtract) compare.getExpression1(); assertTrue(subtract.getExpression1() instanceof NumericFieldValue); assertEquals("pop", ((NumericFieldValue) subtract.getExpression1()).getFieldName()); assertTrue(subtract.getExpression2() instanceof NumericLiteral); assertEquals(5, ((NumericLiteral) subtract.getExpression2()).getValue(), 0.000001); assertTrue(compare.getExpression2() instanceof NumericLiteral); assertEquals(25, ((NumericLiteral) compare.getExpression2()).getValue(), 0.000001); statement = "SELECT * FROM type WHERE pop * 5 > 25"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof NumericComparisonOperator); compare = (NumericComparisonOperator) filter; assertEquals(CompareOp.GREATER_THAN, compare.getCompareOp()); assertTrue(compare.getExpression1() instanceof Multiply); Multiply multiply = (Multiply) compare.getExpression1(); assertTrue(multiply.getExpression1() instanceof NumericFieldValue); assertEquals("pop", ((NumericFieldValue) multiply.getExpression1()).getFieldName()); assertTrue(multiply.getExpression2() instanceof NumericLiteral); assertEquals(5, ((NumericLiteral) multiply.getExpression2()).getValue(), 0.000001); assertTrue(compare.getExpression2() instanceof NumericLiteral); assertEquals(25, ((NumericLiteral) compare.getExpression2()).getValue(), 0.000001); statement = "SELECT * FROM type WHERE pop / 5 > 25"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof NumericComparisonOperator); compare = (NumericComparisonOperator) filter; assertEquals(CompareOp.GREATER_THAN, compare.getCompareOp()); assertTrue(compare.getExpression1() instanceof Divide); Divide divide = (Divide) compare.getExpression1(); assertTrue(divide.getExpression1() instanceof NumericFieldValue); assertEquals("pop", ((NumericFieldValue) divide.getExpression1()).getFieldName()); assertTrue(divide.getExpression2() instanceof NumericLiteral); assertEquals(5, ((NumericLiteral) divide.getExpression2()).getValue(), 0.000001); assertTrue(compare.getExpression2() instanceof NumericLiteral); assertEquals(25, ((NumericLiteral) compare.getExpression2()).getValue(), 0.000001); // Test order of operations // (pop + ((5 * (pop - 8)) / 6)) statement = "SELECT * FROM type WHERE pop + 5 * (pop - 8) / 6 > 25"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof NumericComparisonOperator); compare = (NumericComparisonOperator) filter; assertEquals(CompareOp.GREATER_THAN, compare.getCompareOp()); assertTrue(compare.getExpression1() instanceof Add); add = (Add) compare.getExpression1(); assertTrue(add.getExpression1() instanceof NumericFieldValue); assertEquals("pop", ((NumericFieldValue) add.getExpression1()).getFieldName()); assertTrue(add.getExpression2() instanceof Divide); divide = (Divide) add.getExpression2(); assertTrue(divide.getExpression1() instanceof Multiply); multiply = (Multiply) divide.getExpression1(); assertTrue(multiply.getExpression1() instanceof NumericLiteral); assertEquals(5, ((NumericLiteral) multiply.getExpression1()).getValue(), 0.000001); assertTrue(multiply.getExpression2() instanceof Subtract); subtract = (Subtract) multiply.getExpression2(); assertTrue(subtract.getExpression1() instanceof NumericFieldValue); assertEquals("pop", ((NumericFieldValue) subtract.getExpression1()).getFieldName()); assertTrue(subtract.getExpression2() instanceof NumericLiteral); assertEquals(8, ((NumericLiteral) subtract.getExpression2()).getValue(), 0.000001); assertTrue(divide.getExpression2() instanceof NumericLiteral); assertEquals(6, ((NumericLiteral) divide.getExpression2()).getValue(), 0.000001); assertTrue(compare.getExpression2() instanceof NumericLiteral); assertEquals(25, ((NumericLiteral) compare.getExpression2()).getValue(), 0.000001); } @Test public void testComparisonOperators() { final DataStore dataStore = createDataStore(); String statement = "SELECT * FROM type WHERE pop > 25"; Statement gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); SelectStatement selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); Filter filter = selectStatement.getFilter(); assertTrue(filter instanceof NumericComparisonOperator); NumericComparisonOperator compare = (NumericComparisonOperator) filter; assertEquals(CompareOp.GREATER_THAN, compare.getCompareOp()); assertTrue(compare.getExpression1() instanceof NumericFieldValue); assertEquals("pop", ((NumericFieldValue) compare.getExpression1()).getFieldName()); assertTrue(compare.getExpression2() instanceof NumericLiteral); assertEquals(25, ((NumericLiteral) compare.getExpression2()).getValue(), 0.000001); statement = "SELECT * FROM type WHERE pop >= 25"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof NumericComparisonOperator); compare = (NumericComparisonOperator) filter; assertEquals(CompareOp.GREATER_THAN_OR_EQUAL, compare.getCompareOp()); assertTrue(compare.getExpression1() instanceof NumericFieldValue); assertEquals("pop", ((NumericFieldValue) compare.getExpression1()).getFieldName()); assertTrue(compare.getExpression2() instanceof NumericLiteral); assertEquals(25, ((NumericLiteral) compare.getExpression2()).getValue(), 0.000001); statement = "SELECT * FROM type WHERE pop < 25"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof NumericComparisonOperator); compare = (NumericComparisonOperator) filter; assertEquals(CompareOp.LESS_THAN, compare.getCompareOp()); assertTrue(compare.getExpression1() instanceof NumericFieldValue); assertEquals("pop", ((NumericFieldValue) compare.getExpression1()).getFieldName()); assertTrue(compare.getExpression2() instanceof NumericLiteral); assertEquals(25, ((NumericLiteral) compare.getExpression2()).getValue(), 0.000001); statement = "SELECT * FROM type WHERE pop <= 25"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof NumericComparisonOperator); compare = (NumericComparisonOperator) filter; assertEquals(CompareOp.LESS_THAN_OR_EQUAL, compare.getCompareOp()); assertTrue(compare.getExpression1() instanceof NumericFieldValue); assertEquals("pop", ((NumericFieldValue) compare.getExpression1()).getFieldName()); assertTrue(compare.getExpression2() instanceof NumericLiteral); assertEquals(25, ((NumericLiteral) compare.getExpression2()).getValue(), 0.000001); statement = "SELECT * FROM type WHERE pop = 25"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof NumericComparisonOperator); compare = (NumericComparisonOperator) filter; assertEquals(CompareOp.EQUAL_TO, compare.getCompareOp()); assertTrue(compare.getExpression1() instanceof NumericFieldValue); assertEquals("pop", ((NumericFieldValue) compare.getExpression1()).getFieldName()); assertTrue(compare.getExpression2() instanceof NumericLiteral); assertEquals(25, ((NumericLiteral) compare.getExpression2()).getValue(), 0.000001); statement = "SELECT * FROM type WHERE pop <> 25"; gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); filter = selectStatement.getFilter(); assertTrue(filter instanceof NumericComparisonOperator); compare = (NumericComparisonOperator) filter; assertEquals(CompareOp.NOT_EQUAL_TO, compare.getCompareOp()); assertTrue(compare.getExpression1() instanceof NumericFieldValue); assertEquals("pop", ((NumericFieldValue) compare.getExpression1()).getFieldName()); assertTrue(compare.getExpression2() instanceof NumericLiteral); assertEquals(25, ((NumericLiteral) compare.getExpression2()).getValue(), 0.000001); } @Test public void testCasting() { final DataStore dataStore = createDataStore(); String statement = "SELECT * FROM type WHERE pop::text = '15'"; Statement gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); SelectStatement selectStatement = (SelectStatement) gwStatement; assertNotNull(selectStatement.getFilter()); Filter filter = selectStatement.getFilter(); assertTrue(filter instanceof TextComparisonOperator); final TextComparisonOperator textCompare = (TextComparisonOperator) filter; assertEquals(CompareOp.EQUAL_TO, textCompare.getCompareOp()); assertTrue(textCompare.getExpression1() instanceof TextFieldValue); assertEquals("pop", ((TextFieldValue) textCompare.getExpression1()).getFieldName()); assertTrue(textCompare.getExpression2() instanceof TextLiteral); } } ================================================ FILE: core/store/src/test/java/org/locationtech/geowave/core/store/query/gwql/SelectStatementTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.query.gwql; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.text.ParseException; import org.junit.Test; import org.locationtech.geowave.core.store.adapter.BasicDataTypeAdapter; import org.locationtech.geowave.core.store.adapter.annotation.GeoWaveDataType; import org.locationtech.geowave.core.store.adapter.annotation.GeoWaveField; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.query.filter.expression.And; import org.locationtech.geowave.core.store.query.filter.expression.Filter; import org.locationtech.geowave.core.store.query.filter.expression.numeric.NumericBetween; import org.locationtech.geowave.core.store.query.filter.expression.text.TextComparisonOperator; import org.locationtech.geowave.core.store.query.gwql.parse.GWQLParser; import org.locationtech.geowave.core.store.query.gwql.statement.SelectStatement; import org.locationtech.geowave.core.store.query.gwql.statement.Statement; public class SelectStatementTest extends AbstractGWQLTest { @Test public void testInvalidStatements() { final DataStore dataStore = createDataStore(); // Missing from assertInvalidStatement(dataStore, "SELECT *", "expecting FROM"); // Missing store and type name assertInvalidStatement(dataStore, "SELECT * FROM", "missing IDENTIFIER"); // Missing everything assertInvalidStatement(dataStore, "SELECT", "expecting {'*', IDENTIFIER}"); // All columns and single selector assertInvalidStatement(dataStore, "SELECT *, pop FROM type", "expecting FROM"); // All columns and aggregation selector assertInvalidStatement(dataStore, "SELECT *, agg(column) FROM type", "expecting FROM"); // Nonexistent type assertInvalidStatement(dataStore, "SELECT * FROM nonexistent", "No type named nonexistent"); // No selectors assertInvalidStatement(dataStore, "SELECT FROM type", "expecting {'*', IDENTIFIER}"); // Aggregation and non aggregation selectors assertInvalidStatement(dataStore, "SELECT agg(*), pop FROM type", "expecting '('"); // No where filter assertInvalidStatement(dataStore, "SELECT * FROM type WHERE", "mismatched input ''"); // No limit count assertInvalidStatement(dataStore, "SELECT * FROM type LIMIT", "missing INTEGER"); // Non-integer limit count assertInvalidStatement(dataStore, "SELECT * FROM type LIMIT 1.5", "expecting INTEGER"); // Missing column alias assertInvalidStatement(dataStore, "SELECT pop AS FROM type", "expecting IDENTIFIER"); } @Test public void testValidStatements() { final DataStore dataStore = createDataStore(); GWQLParser.parseStatement(dataStore, "SELECT * FROM type"); GWQLParser.parseStatement(dataStore, "SELECT * FROM type LIMIT 1"); GWQLParser.parseStatement(dataStore, "SELECT * FROM type WHERE pop < 1"); GWQLParser.parseStatement(dataStore, "SELECT * FROM type WHERE pop > 1 LIMIT 1"); GWQLParser.parseStatement(dataStore, "SELECT a, b FROM type"); GWQLParser.parseStatement(dataStore, "SELECT a, b FROM type LIMIT 1"); GWQLParser.parseStatement(dataStore, "SELECT a, b FROM type WHERE pop < 1"); GWQLParser.parseStatement(dataStore, "SELECT a, b FROM type WHERE pop > 1 LIMIT 2"); GWQLParser.parseStatement(dataStore, "SELECT a AS a_alt, b FROM type"); GWQLParser.parseStatement(dataStore, "SELECT a AS a_alt, b FROM type LIMIT 1"); GWQLParser.parseStatement(dataStore, "SELECT a AS a_alt, b FROM type WHERE pop < 1"); GWQLParser.parseStatement(dataStore, "SELECT a AS a_alt, b FROM type WHERE pop > 1 LIMIT 2"); GWQLParser.parseStatement(dataStore, "SELECT SUM(a) FROM type"); GWQLParser.parseStatement(dataStore, "SELECT SUM(a) FROM type LIMIT 1"); GWQLParser.parseStatement(dataStore, "SELECT SUM(a) FROM type WHERE pop < 1"); GWQLParser.parseStatement(dataStore, "SELECT SUM(a) FROM type WHERE pop > 1 LIMIT 3"); GWQLParser.parseStatement(dataStore, "SELECT SUM(a) AS sum FROM type"); GWQLParser.parseStatement(dataStore, "SELECT SUM(a) AS sum FROM type LIMIT 1"); GWQLParser.parseStatement(dataStore, "SELECT SUM(a) AS sum FROM type WHERE pop < 1"); GWQLParser.parseStatement(dataStore, "SELECT SUM(a) AS sum FROM type WHERE pop > 1 LIMIT 3"); GWQLParser.parseStatement(dataStore, "SELECT COUNT(*) FROM type"); GWQLParser.parseStatement(dataStore, "SELECT COUNT(*) FROM type LIMIT 1"); GWQLParser.parseStatement(dataStore, "SELECT COUNT(*) FROM type WHERE pop < 1"); GWQLParser.parseStatement(dataStore, "SELECT COUNT(*) FROM type WHERE pop > 1 LIMIT 4"); GWQLParser.parseStatement(dataStore, "SELECT SUM(a), COUNT(*) FROM type"); GWQLParser.parseStatement(dataStore, "SELECT SUM(a), COUNT(*) FROM type LIMIT 1"); GWQLParser.parseStatement(dataStore, "SELECT SUM(a), COUNT(*) FROM type WHERE pop < 1"); GWQLParser.parseStatement(dataStore, "SELECT SUM(a), COUNT(*) FROM type WHERE pop > 1 LIMIT 4"); } @Test public void testAllColumns() throws ParseException, IOException { final DataStore dataStore = createDataStore(); final String statement = "SELECT * FROM type"; final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); final SelectStatement selectStatement = (SelectStatement) gwStatement; assertFalse(selectStatement.isAggregation()); assertNotNull(selectStatement.getAdapter()); assertEquals("type", selectStatement.getAdapter().getTypeName()); assertNull(selectStatement.getFilter()); } @Test public void testAllColumnsWithFilter() throws ParseException, IOException { final DataStore dataStore = createDataStore(); final String statement = "SELECT * FROM type WHERE pop BETWEEN 1000 AND 2000 and pid > 'abc'"; final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); final SelectStatement selectStatement = (SelectStatement) gwStatement; assertFalse(selectStatement.isAggregation()); assertNotNull(selectStatement.getAdapter()); assertEquals("type", selectStatement.getAdapter().getTypeName()); assertNotNull(selectStatement.getFilter()); Filter filter = selectStatement.getFilter(); assertTrue(filter instanceof And); And andFilter = (And) filter; assertTrue(andFilter.getChildren().length == 2); assertTrue(andFilter.getChildren()[0] instanceof NumericBetween); assertTrue(andFilter.getChildren()[1] instanceof TextComparisonOperator); assertNull(selectStatement.getLimit()); } @Test public void testAllColumnsWithFilterAndLimit() throws ParseException, IOException { final DataStore dataStore = createDataStore(); final String statement = "SELECT * FROM type WHERE pop BETWEEN 1000 AND 2000 and pid > 'abc' LIMIT 1"; final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); final SelectStatement selectStatement = (SelectStatement) gwStatement; assertFalse(selectStatement.isAggregation()); assertNotNull(selectStatement.getAdapter()); assertEquals("type", selectStatement.getAdapter().getTypeName()); assertNotNull(selectStatement.getFilter()); Filter filter = selectStatement.getFilter(); assertTrue(filter instanceof And); And andFilter = (And) filter; assertTrue(andFilter.getChildren().length == 2); assertTrue(andFilter.getChildren()[0] instanceof NumericBetween); assertTrue(andFilter.getChildren()[1] instanceof TextComparisonOperator); assertNotNull(selectStatement.getLimit()); assertEquals(1, selectStatement.getLimit().intValue()); } @Test public void testAggregation() { final DataStore dataStore = createDataStore(); final String statement = "SELECT sum(pop) FROM type"; final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); final SelectStatement selectStatement = (SelectStatement) gwStatement; assertTrue(selectStatement.isAggregation()); assertNotNull(selectStatement.getAdapter()); assertEquals("type", selectStatement.getAdapter().getTypeName()); assertNotNull(selectStatement.getSelectors()); assertTrue(selectStatement.getSelectors().size() == 1); assertTrue(selectStatement.getSelectors().get(0) instanceof AggregationSelector); AggregationSelector selector = (AggregationSelector) selectStatement.getSelectors().get(0); assertNull(selector.alias()); assertEquals("sum", selector.functionName()); assertEquals(1, selector.functionArgs().length); assertEquals("pop", selector.functionArgs()[0]); assertNull(selectStatement.getFilter()); } @Test public void testAggregationAlias() { final DataStore dataStore = createDataStore(); final String statement = "SELECT sum(pop) AS total FROM type"; final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); final SelectStatement selectStatement = (SelectStatement) gwStatement; assertTrue(selectStatement.isAggregation()); assertNotNull(selectStatement.getAdapter()); assertEquals("type", selectStatement.getAdapter().getTypeName()); assertNotNull(selectStatement.getSelectors()); assertTrue(selectStatement.getSelectors().size() == 1); assertTrue(selectStatement.getSelectors().get(0) instanceof AggregationSelector); AggregationSelector selector = (AggregationSelector) selectStatement.getSelectors().get(0); assertEquals("total", selector.alias()); assertEquals("sum", selector.functionName()); assertEquals(1, selector.functionArgs().length); assertEquals("pop", selector.functionArgs()[0]); assertNull(selectStatement.getFilter()); } @Test public void testColumnSubset() { final DataStore dataStore = createDataStore(); final String statement = "SELECT pop, start, end FROM type"; final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); final SelectStatement selectStatement = (SelectStatement) gwStatement; assertFalse(selectStatement.isAggregation()); assertNotNull(selectStatement.getAdapter()); assertEquals("type", selectStatement.getAdapter().getTypeName()); assertNotNull(selectStatement.getSelectors()); assertTrue(selectStatement.getSelectors().size() == 3); assertTrue(selectStatement.getSelectors().get(0) instanceof ColumnSelector); ColumnSelector selector = (ColumnSelector) selectStatement.getSelectors().get(0); assertNull(selector.alias()); assertEquals("pop", selector.columnName()); assertTrue(selectStatement.getSelectors().get(1) instanceof ColumnSelector); selector = (ColumnSelector) selectStatement.getSelectors().get(1); assertNull(selector.alias()); assertEquals("start", selector.columnName()); assertTrue(selectStatement.getSelectors().get(2) instanceof ColumnSelector); selector = (ColumnSelector) selectStatement.getSelectors().get(2); assertNull(selector.alias()); assertEquals("end", selector.columnName()); assertNull(selectStatement.getFilter()); } @Test public void testColumnSubsetWithAliases() { final DataStore dataStore = createDataStore(); final String statement = "SELECT pop AS pop_alt, start, end AS end_alt FROM type"; final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); final SelectStatement selectStatement = (SelectStatement) gwStatement; assertFalse(selectStatement.isAggregation()); assertNotNull(selectStatement.getAdapter()); assertEquals("type", selectStatement.getAdapter().getTypeName()); assertNotNull(selectStatement.getSelectors()); assertTrue(selectStatement.getSelectors().size() == 3); assertTrue(selectStatement.getSelectors().get(0) instanceof ColumnSelector); ColumnSelector selector = (ColumnSelector) selectStatement.getSelectors().get(0); assertEquals("pop_alt", selector.alias()); assertEquals("pop", selector.columnName()); assertTrue(selectStatement.getSelectors().get(1) instanceof ColumnSelector); selector = (ColumnSelector) selectStatement.getSelectors().get(1); assertNull(selector.alias()); assertEquals("start", selector.columnName()); assertTrue(selectStatement.getSelectors().get(2) instanceof ColumnSelector); selector = (ColumnSelector) selectStatement.getSelectors().get(2); assertEquals("end_alt", selector.alias()); assertEquals("end", selector.columnName()); assertNull(selectStatement.getFilter()); } @Test public void testUnconventionalNaming() { final DataStore dataStore = createDataStore( BasicDataTypeAdapter.newAdapter("ty-p3", UnconventionalNameType.class, "pid"), "a-1"); final String statement = "SELECT [a-1], `b-2`, \"c-3\" FROM [ty-p3]"; final Statement gwStatement = GWQLParser.parseStatement(dataStore, statement); assertTrue(gwStatement instanceof SelectStatement); final SelectStatement selectStatement = (SelectStatement) gwStatement; assertFalse(selectStatement.isAggregation()); assertNotNull(selectStatement.getAdapter()); assertEquals("ty-p3", selectStatement.getAdapter().getTypeName()); assertNotNull(selectStatement.getSelectors()); assertTrue(selectStatement.getSelectors().size() == 3); assertTrue(selectStatement.getSelectors().get(0) instanceof ColumnSelector); ColumnSelector selector = (ColumnSelector) selectStatement.getSelectors().get(0); assertNull(selector.alias()); assertEquals("a-1", selector.columnName()); assertTrue(selectStatement.getSelectors().get(1) instanceof ColumnSelector); selector = (ColumnSelector) selectStatement.getSelectors().get(1); assertNull(selector.alias()); assertEquals("b-2", selector.columnName()); assertTrue(selectStatement.getSelectors().get(2) instanceof ColumnSelector); selector = (ColumnSelector) selectStatement.getSelectors().get(2); assertNull(selector.alias()); assertEquals("c-3", selector.columnName()); assertNull(selectStatement.getFilter()); } @GeoWaveDataType protected static class UnconventionalNameType { @GeoWaveField(name = "pid") private String pid; @GeoWaveField(name = "a-1") private Long a1; @GeoWaveField(name = "b-2") private Long b2; @GeoWaveField(name = "c-3") private Long c3; public UnconventionalNameType() {} public UnconventionalNameType(final String pid, final Long a1, final Long b2, final Long c3) { this.pid = pid; this.a1 = a1; this.b2 = b2; this.c3 = c3; } } } ================================================ FILE: core/store/src/test/java/org/locationtech/geowave/core/store/statistics/index/PartitionsStatisticTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.index; import static org.junit.Assert.assertEquals; import java.util.Arrays; import org.junit.Assert; import org.junit.Test; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.entities.GeoWaveKey; import org.locationtech.geowave.core.store.entities.GeoWaveKeyImpl; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.entities.GeoWaveRowImpl; import org.locationtech.geowave.core.store.entities.GeoWaveValue; import org.locationtech.geowave.core.store.statistics.index.PartitionsStatistic.PartitionsValue; public class PartitionsStatisticTest { static final long base = 7l; static int counter = 0; private GeoWaveKey genKey(final long id) { final InsertionIds insertionIds = new InsertionIds( new byte[] {(byte) (counter++ % 32)}, Arrays.asList( StringUtils.stringToBinary(String.format("\12%5h", base + id) + "20030f89"))); return GeoWaveKeyImpl.createKeys(insertionIds, new byte[] {}, (short) 0)[0]; } @Test public void testIngest() { final PartitionsStatistic statistic = new PartitionsStatistic(); final PartitionsValue value = statistic.createEmpty(); for (long i = 0; i < 10000; i++) { final GeoWaveRow row = new GeoWaveRowImpl(genKey(i), new GeoWaveValue[] {}); value.entryIngested(null, 1, row); } assertEquals(32, value.getValue().size()); for (byte i = 0; i < 32; i++) { Assert.assertTrue(value.getValue().contains(new ByteArray(new byte[] {i}))); } } } ================================================ FILE: core/store/src/test/java/org/locationtech/geowave/core/store/statistics/index/RowRangeHistogramStatisticTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.statistics.index; import static org.junit.Assert.assertEquals; import java.util.Arrays; import org.junit.Test; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.entities.GeoWaveKey; import org.locationtech.geowave.core.store.entities.GeoWaveKeyImpl; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.entities.GeoWaveRowImpl; import org.locationtech.geowave.core.store.entities.GeoWaveValue; import org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic.RowRangeHistogramValue; public class RowRangeHistogramStatisticTest { static final long base = 7l; private GeoWaveKey genKey(final long id) { final InsertionIds insertionIds = new InsertionIds( Arrays.asList( StringUtils.stringToBinary(String.format("\12%5h", base + id) + "20030f89"))); return GeoWaveKeyImpl.createKeys(insertionIds, new byte[] {}, (short) 0)[0]; } @Test public void testIngest() { final RowRangeHistogramStatistic stats = new RowRangeHistogramStatistic("indexName"); final RowRangeHistogramValue value = stats.createEmpty(); for (long i = 0; i < 10000; i++) { final GeoWaveRow row = new GeoWaveRowImpl(genKey(i), new GeoWaveValue[] {}); value.entryIngested(null, 1, row); } System.out.println(stats.toString()); assertEquals(1.0, value.cdf(genKey(10000).getSortKey()), 0.00001); assertEquals(0.0, value.cdf(genKey(0).getSortKey()), 0.00001); assertEquals(0.5, value.cdf(genKey(5000).getSortKey()), 0.04); final RowRangeHistogramValue value2 = stats.createEmpty(); for (long j = 10000; j < 20000; j++) { final GeoWaveRow row = new GeoWaveRowImpl(genKey(j), new GeoWaveValue[] {}); value2.entryIngested(null, 1, row); } assertEquals(0.0, value2.cdf(genKey(10000).getSortKey()), 0.00001); value.merge(value2); assertEquals(0.5, value.cdf(genKey(10000).getSortKey()), 0.15); value2.fromBinary(value.toBinary()); assertEquals(0.5, value2.cdf(genKey(10000).getSortKey()), 0.15); } } ================================================ FILE: core/store/src/test/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi ================================================ org.locationtech.geowave.core.store.TestStorePersistableRegistry ================================================ FILE: core/store/src/test/resources/META-INF/services/org.locationtech.geowave.core.store.index.IndexFieldMapperRegistrySPI ================================================ org.locationtech.geowave.core.store.adapter.MockRegisteredIndexFieldMappers ================================================ FILE: deploy/Jenkinsfile ================================================ #!groovy node ('master') { def maven = tool 'maven' stage('Parameterize') { if(!params.overwrite_parameters || "${params.overwrite_parameters}" == "Yes") { properties( [ [$class: 'BuildDiscarderProperty', strategy: [$class: 'LogRotator', artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '5']], [$class: 'ParametersDefinitionProperty', parameterDefinitions: [ [$class: 'StringParameterDefinition', name: 'rpm_bucket', defaultValue: 'geowave-rpms', description: 'Bucket for rpms'], [$class: 'StringParameterDefinition', name: 'cache_bucket', defaultValue: 'geowave', description: 'Root GeoWave Bucket'], [$class: 'StringParameterDefinition', name: 'notebook_bucket', defaultValue: 'geowave-notebooks', description: 'Bucket for notebooks'], [$class: 'StringParameterDefinition', name: 'third_party_deps_path', defaultValue: 'https://s3.amazonaws.com/geowave/third-party-downloads', description: 'URL to third party downloads directory (No trailing slash)'], [$class: 'StringParameterDefinition', name: 'LOCAL_REPO_DIR', defaultValue: '/jenkins/gw-repo/snapshots', description: 'Path on the local filesystem to the repo'], [$class: 'StringParameterDefinition', name: 'build_type', defaultValue: 'clean install', description: 'Maven build type. To publish to central change to deploy. Credentials must be added.'], [$class: 'StringParameterDefinition', name: 'install4j_home', defaultValue: '/opt/install4j7/', description: 'Home directory of install4j, required if building standalone installers'], [$class: 'ChoiceParameterDefinition', name: 'overwrite_parameters', choices: 'No\nYes', description: 'Set to yes to reset parameters to defaults'], ] ], disableConcurrentBuilds(), pipelineTriggers([cron('@daily')]) ] ) currentBuild.result = 'ABORTED' error('Parameters Reset') } } // Setting java home for the withMaven block jdk = tool name: 'JDK18' env.JAVA_HOME = "${jdk}" // The following grabs the EC2 role from the instance for things like S3 Access stage('Retrieve Role From IAM'){ sh """ ROLE=`curl -s http://169.254.169.254/latest/meta-data/iam/security-credentials/` AWS_ACCESS_KEY_ID=`curl -s http://169.254.169.254/latest/meta-data/iam/security-credentials/\${ROLE} | awk '/AccessKeyId/ {print \$3}' | sed 's/[^0-9A-Z]*//g'` AWS_SECRET_ACCESS_KEY=`curl -s http://169.254.169.254/latest/meta-data/iam/security-credentials/\${ROLE} | awk '/SecretAccessKey/ {print \$3}' | sed 's/[^0-9A-Za-z/+=]*//g'` AWS_SESSION_TOKEN=`curl -s http://169.254.169.254/latest/meta-data/iam/security-credentials/\${ROLE} | awk '/Token/ {print \$3}' | sed 's/[^0-9A-Za-z/+=]*//g'` AWS_DEFAULT_REGION='us-east-1' M2_HOME='/var/jenkins_home/.m2' export AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY AWS_SESSION_TOKEN AWS_DEFAULT_REGION M2_HOME """ } stage('Wipe workspace'){ deleteDir() } stage('Checkout'){ // Get GeoWave source from SCM checkout scm } // Mark the create docker image 'stage' stage('Create Docker Image'){ // Build the docker container sh 'docker build -t locationtech/geowave-centos7-java8-build -f deploy/packaging/docker/geowave-centos7-java8-build.dockerfile deploy/packaging/docker' sh 'docker build -t locationtech/geowave-centos7-rpm-build -f deploy/packaging/docker/geowave-centos7-rpm-build.dockerfile deploy/packaging/docker' sh "docker build -t locationtech/geowave-centos7-publish -f deploy/packaging/docker/geowave-centos7-publish.dockerfile --build-arg third_party_deps_path=${ params.third_party_deps_path } deploy/packaging/docker" } stage('Maven Build/Deploy'){ dir("${env.WORKSPACE}/docker-root") { sh """ if [[ ! -z \$(aws s3api head-object --bucket ${params.cache_bucket} --key mvn-cache/mvn-repo-cache-latest.tar.gz) ]]; then aws s3 cp s3://${params.cache_bucket}/mvn-cache/mvn-repo-cache-latest.tar.gz . --quiet tar xfz mvn-repo-cache-latest.tar.gz rm mvn-repo-cache-latest.tar.gz fi """ } withMaven( maven: 'maven', mavenLocalRepo: "${env.WORKSPACE}/docker-root/.m2/repository/", mavenOpts: "-Xmx2g -Xms1g", options: [junitPublisher(disabled: true), findbugsPublisher(disabled: true)]) { sh "cd dev-resources;mvn ${params.build_type} -DskipTests -Dspotbugs.skip -Dformatter.skip;cd ../" sh "mvn ${params.build_type} -DskipTests -Dspotbugs.skip -Dformatter.skip" } } stage('Clean local space'){ // Local Cleanup - cleanup all but noarch directories sh "sudo rm -rf ${params.LOCAL_REPO_DIR}/geowave/dev-jars/*" sh "sudo rm -rf ${params.LOCAL_REPO_DIR}/geowave/release-jars/*" sh "sudo rm -rf ${params.LOCAL_REPO_DIR}/geowave/dev/SRPMS/*" sh "sudo rm -rf ${params.LOCAL_REPO_DIR}/geowave/release/SRPMS/*" sh "sudo rm -rf ${params.LOCAL_REPO_DIR}/geowave/dev/TARBALL/*" sh "sudo rm -rf ${params.LOCAL_REPO_DIR}/geowave/release/TARBALL/*" } stage('Clean S3'){ withMaven( maven: 'maven', mavenLocalRepo: "${env.WORKSPACE}/docker-root/.m2/repository/", mavenOpts: "-Xmx2g -Xms1g", options: [junitPublisher(disabled: true), findbugsPublisher(disabled: true)]) { sh "deploy/packaging/docker/init.sh" // S3 Cleanup sh "python deploy/scripts/clean-up.py ${env.WORKSPACE}" } } // Mark the build artifacts 'stage'.... stage('Build and Publish RPMs'){ withEnv(["INSTALL4J_HOME=${ params.install4j_home }","LOCAL_REPO_DIR=${ params.LOCAL_REPO_DIR }","GEOWAVE_RPM_BUCKET=${ params.rpm_bucket }","GEOWAVE_BUCKET=${ params.cache_bucket }"]) { sh "echo 'INSTALL4J_HOME=${INSTALL4J_HOME}'" sh "echo 'params.install4j_home=${params.install4j_home}'" sh "deploy/packaging/docker/docker-build-rpms.sh" } } // Deploy to geowave-rpms stage('Deploying to S3'){ def build_type = readFile('deploy/target/build-type.txt').trim() if ( build_type == "dev" ) { echo 'Build type determined as Dev.' sh "aws s3 sync --quiet --acl public-read ${params.LOCAL_REPO_DIR}/geowave/dev/ s3://${params.rpm_bucket}/dev/" sh "aws s3 sync --quiet --acl public-read ${params.LOCAL_REPO_DIR}/geowave/dev-jars/ s3://${params.rpm_bucket}/dev-jars/" } else if (build_type == "release" ) { echo 'Build type determined as Release.' sh "aws s3 sync --quiet --acl public-read ${params.LOCAL_REPO_DIR}/geowave/release/ s3://${params.rpm_bucket}/release/" sh "aws s3 sync --quiet --acl public-read ${params.LOCAL_REPO_DIR}/geowave/release-jars/ s3://${params.rpm_bucket}/release-jars/" } else { error("ERROR: Could not determine build type. Unable to upload rpm's.") } } stage('Bundle Maven Cache'){ def build_type = readFile('deploy/target/build-type.txt').trim() if ( build_type == "dev" ) { dir("${env.WORKSPACE}/docker-root") { sh "tar czf ${env.WORKSPACE}/deploy/target/mvn-repo-cache-latest.tar.gz .m2" sh "aws s3 cp ${env.WORKSPACE}/deploy/target/mvn-repo-cache-latest.tar.gz s3://${params.cache_bucket}/mvn-cache/mvn-repo-cache-latest.tar.gz --quiet" } } else if (build_type == "release" ){ def version = readFile('deploy/target/version.txt').trim() dir("${env.WORKSPACE}/docker-root") { sh "tar czf ${env.WORKSPACE}/deploy/target/mvn-repo-cache-${version}.tar.gz .m2" sh "aws s3 cp ${env.WORKSPACE}/deploy/target/mvn-repo-cache-${version}.tar.gz s3://${params.cache_bucket}/mvn-cache/mvn-repo-cache-${version}.tar.gz --quiet" } } else { error("ERROR: Could not determine build type. Unable to upload maven cache") } } stage("Local cleanup") { sh "sudo rm -rf ${params.LOCAL_REPO_DIR}/geowave/dev-jars/*" sh "sudo rm -rf ${params.LOCAL_REPO_DIR}/geowave/release-jars/*" sh "sudo rm -rf ${params.LOCAL_REPO_DIR}/geowave/dev/SRPMS/*" sh "sudo rm -rf ${params.LOCAL_REPO_DIR}/geowave/release/SRPMS/*" sh "sudo rm -rf ${params.LOCAL_REPO_DIR}/geowave/dev/TARBALL/*" sh "sudo rm -rf ${params.LOCAL_REPO_DIR}/geowave/release/TARBALL/*" } } ================================================ FILE: deploy/packaging/docker/.gitignore ================================================ build-args-matrix.sh ================================================ FILE: deploy/packaging/docker/README.md ================================================ ## Step #1: Configure a Docker build host A host to run the GeoWave build containers needs just Docker, Git and the Unzip commands available. Tested Docker configurations are shown below but any OS capable of running Docker containers should work. ### Redhat7/CentOS7 Docker Build Host ``` sudo yum -y install docker git unzip sudo systemctl start docker sudo systemctl enable docker ``` ### Ubuntu 14.04 Build Host ``` sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 36A1D7869245C8950F966E92D8576A8BA88D21E9 sudo sh -c "echo deb https://get.docker.com/ubuntu docker main > /etc/apt/sources.list.d/docker.list" sudo apt-get update sudo apt-get -y install lxc-docker git unzip ``` ### Docker Test Before continuing test that Docker is available with the `sudo docker info` command ## Step #2: GeoWave Source Code From the docker build host we're going to clone the GeoWave repo and then by using volume mounts we'll allow the various containers to build and/or package the code without the need to then copy the finished artifacts back out of the container. ``` git clone --depth 1 https://github.com/locationtech/geowave.git ``` ## Step #3: Create Docker Images for Building We'll eventually publish these images, until then you'll have to build them locally ``` pushd geowave/deploy/packaging/docker sudo docker build -t locationtech/geowave-centos7-java8-build -f geowave-centos7-java8-build.dockerfile . sudo docker build -t locationtech/geowave-centos7-rpm-build -f geowave-centos7-rpm-build.dockerfile . popd ``` ## Step #4: Build GeoWave Artifacts and RPMs The docker-build-rpms script will coordinate a series of container builds resulting in finished jar and rpm artifacts built for each of the desired build configurations (ex: cdh5 or apache). ``` export WORKSPACE="$(pwd)/geowave" export SKIP_TESTS="-Dfindbugs.skip=true -Dformatter.skip=true -DskipITs=true -DskipTests=true" # (Optional) sudo chown -R $(whoami) geowave/deploy/packaging geowave/deploy/packaging/docker/docker-build-rpms.sh ``` After the docker-build-rpms.sh command has finished the rpms can be found in the `geowave/deploy/packaging/rpm/centos/7/RPMS/noarch/` directory adjusting the version of the OS as needed. ================================================ FILE: deploy/packaging/docker/build-args-matrix.sh.example ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- #!/bin/bash # # Custom build args matrix config file # Remove the .example from the name of the file and add/remove/update the build args as desired BUILD_ARGS_MATRIX=( "" "--P cloudera" ) ================================================ FILE: deploy/packaging/docker/build-rpm/build-rpm.sh ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- #!/bin/bash # # This script will build a single set of rpms for a given configuration # # This script runs with a volume mount to $WORKSPACE, this ensures that any signal failure will leave all of the files $WORKSPACE editable by the host trap 'chmod -R 777 $WORKSPACE/deploy/packaging/rpm' EXIT trap 'chmod -R 777 $WORKSPACE/deploy/packaging/rpm && exit' ERR # Set a default version VENDOR_VERSION=apache if [ ! -z "$BUILD_ARGS" ]; then VENDOR_VERSION=$(echo "$BUILD_ARGS" | grep -oi "vendor.version=\w*" | sed "s/vendor.version=//g") fi # Get the version GEOWAVE_VERSION=$(cat $WORKSPACE/deploy/target/version.txt) GEOWAVE_RPM_VERSION=$(cat $WORKSPACE/deploy/target/rpm_version.txt) echo "---------------------------------------------------------------" echo " Building RPM with the following settings" echo "---------------------------------------------------------------" echo "GEOWAVE_VERSION=${GEOWAVE_VERSION}" echo "GEOWAVE_RPM_VERSION=${GEOWAVE_RPM_VERSION}" echo "BUILD_SUFFIX=${BUILD_SUFFIX}" echo "TIME_TAG=${TIME_TAG}" echo "BUILD_ARGS=${BUILD_ARGS}" echo "VENDOR_VERSION=${VENDOR_VERSION}" echo "---------------------------------------------------------------" # Ensure mounted volume permissions are OK for access chown -R root:root $WORKSPACE/deploy/packaging/rpm # Now make sure the host can easily modify/delete generated artifacts chmod -R 777 $WORKSPACE/deploy/packaging/rpm # Staging Artifacts for Build cd $WORKSPACE/deploy/packaging/rpm/centos/7/SOURCES if [ $BUILD_SUFFIX = "common" ] then rm -f *.gz *.jar cp /usr/src/geowave/target/site-${GEOWAVE_VERSION}.tar.gz . cp /usr/src/geowave/docs/target/manpages-${GEOWAVE_VERSION}.tar.gz . cp /usr/src/geowave/deploy/target/*${GEOWAVE_VERSION}.tar.gz . else rm -f *.gz *.jar if [[ ! -f deploy-geowave-accumulo-to-hdfs.sh ]]; then # Copy the template for accumulo to sources cp ${WORKSPACE}/deploy/packaging/docker/build-rpm/deploy-geowave-to-hdfs.sh.template deploy-geowave-accumulo-to-hdfs.sh # Replace the tokens appropriately for accumulo sed -i -e s/'$DATASTORE_TOKEN'/accumulo/g deploy-geowave-accumulo-to-hdfs.sh sed -i -e s/'$DATASTORE_USER_TOKEN'/accumulo/g deploy-geowave-accumulo-to-hdfs.sh fi if [[ ! -f deploy-geowave-hbase-to-hdfs.sh ]]; then # Copy the template for hbase to sources cp ${WORKSPACE}/deploy/packaging/docker/build-rpm/deploy-geowave-to-hdfs.sh.template deploy-geowave-hbase-to-hdfs.sh # Replace the tokens appropriately for hbase sed -i -e s/'$DATASTORE_TOKEN'/hbase/g deploy-geowave-hbase-to-hdfs.sh sed -i -e s/'$DATASTORE_USER_TOKEN'/hbase/g deploy-geowave-hbase-to-hdfs.sh fi cp /usr/src/geowave/deploy/target/*${GEOWAVE_VERSION}-${VENDOR_VERSION}.jar . # Copy Accumulo Jars find /usr/src/geowave/deploy/target/ -type f -name "*${GEOWAVE_VERSION}-${VENDOR_VERSION}-accumulo*.jar" -exec cp {} . \; fi cd .. # Build $WORKSPACE/deploy/packaging/rpm/centos/7/rpm.sh --command build-${BUILD_SUFFIX} --vendor-version $VENDOR_VERSION --geowave-version $GEOWAVE_VERSION --geowave-rpm-version $GEOWAVE_RPM_VERSION --time-tag $TIME_TAG ================================================ FILE: deploy/packaging/docker/build-rpm/build-services-rpm.sh ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- #!/bin/bash # # This script will create the geowave services rpms # # This script runs with a volume mount to $WORKSPACE, this ensures that any signal failure will leave all of the files $WORKSPACE editable by the host trap 'chmod -R 777 $WORKSPACE' EXIT trap 'chmod -R 777 $WORKSPACE && exit' ERR set -e # Set a default version VENDOR_VERSION=apache if [ ! -z "$BUILD_ARGS" ]; then VENDOR_VERSION=$(echo "$BUILD_ARGS" | grep -oi "vendor.version=\w*" | sed "s/vendor.version=//g") fi declare -A ARGS while [ $# -gt 0 ]; do # Trim the first two chars off of the arg name ex: --foo case "$1" in *) NAME="${1:2}"; shift; ARGS[$NAME]="$1" ;; esac shift done GEOWAVE_VERSION=$(cat $WORKSPACE/deploy/target/version.txt) GEOWAVE_RPM_VERSION=$(cat $WORKSPACE/deploy/target/rpm_version.txt) FPM_SCRIPTS="${WORKSPACE}/deploy/packaging/docker/build-rpm/fpm_scripts" GEOWAVE_DIR="/usr/local/geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}" GEOSERVER_VERSION=$(cat $WORKSPACE/deploy/target/geoserver_version.txt) echo "---------------------------------------------------------------" echo " Building Services RPMS with the following settings" echo "---------------------------------------------------------------" echo "GEOWAVE_VERSION=${GEOWAVE_VERSION}" echo "GEOWAVE_RPM_VERSION=${GEOWAVE_RPM_VERSION}" echo "GEOSERVER_VERSION=${GEOSERVER_VERSION}" echo "TIME_TAG=${TIME_TAG}" echo "BUILD_ARGS=${BUILD_ARGS}" echo "VENDOR_VERSION=${VENDOR_VERSION}" echo "---------------------------------------------------------------" set -x #Make a tmp directory and work out of there if [ ! -d 'services_tmp' ]; then mkdir services_tmp fi cd services_tmp #grab the geoserver war file and tomcat tarball #Check if the files already exists before grabbing them if [ ! -f geoserver-$GEOSERVER_VERSION-war.zip ]; then echo "Downloading geoserver-$GEOSERVER_VERSION-war" if [[ $(curl -I --write-out %{http_code} --silent --output /dev/null https://s3.amazonaws.com/geowave/third-party-downloads/geoserver/geoserver-$GEOSERVER_VERSION-war.zip) == 200 ]]; then echo "Downloading from Geoserver Bucket" wget -q https://s3.amazonaws.com/geowave/third-party-downloads/geoserver/geoserver-$GEOSERVER_VERSION-war.zip else echo "Downloading from Geoserver.org" wget -q https://build.geoserver.org/geoserver/release/$GEOSERVER_VERSION/geoserver-$GEOSERVER_VERSION-war.zip aws s3 cp geoserver-$GEOSERVER_VERSION-war.zip s3://geowave/third-party-downloads/geoserver/geoserver-$GEOSERVER_VERSION-war.zip fi fi if [ ! -f apache-tomcat-8.5.20.tar.gz ]; then echo "Downloading tomcat-8.5.20" wget -q https://s3.amazonaws.com/geowave/third-party-downloads/tomcat/apache-tomcat-8.5.20.tar.gz tar xzf apache-tomcat-8.5.20.tar.gz && mv apache-tomcat-8.5.20 tomcat8 #Prep the tomcat8 directory for packaging rm -rf tomcat8/webapps/* #put in root page redirect mkdir tomcat8/webapps/ROOT echo "<% response.sendRedirect(\"/geoserver\"); %>" > tomcat8/webapps/ROOT/index.jsp fi #Check if the RPM directory exists. If not create it DIRECTORY="$WORKSPACE/${ARGS[buildroot]}/RPM/${ARGS[arch]}" if [ ! -d $DIRECTORY ]; then mkdir -p $WORKSPACE/${ARGS[buildroot]}/RPM/${ARGS[arch]} fi # Ensure mounted volume permissions are OK for access chmod -R 777 $WORKSPACE/deploy echo "Creating tomcat rpm" #Create the gwtomcat_tools.sh script cp ${FPM_SCRIPTS}/gwtomcat_tools.sh.template ${FPM_SCRIPTS}/gwtomcat_tools.sh sed -i -e s/GEOWAVE_VERSION=\"temp\"/GEOWAVE_VERSION=\"${GEOWAVE_VERSION}\"/g ${FPM_SCRIPTS}/gwtomcat_tools.sh sed -i -e s/VENDOR_VERSION=\"temp\"/VENDOR_VERSION=\"${VENDOR_VERSION}\"/g ${FPM_SCRIPTS}/gwtomcat_tools.sh fpm -s dir -t rpm -n "geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-gwtomcat" -v ${GEOWAVE_RPM_VERSION} -a ${ARGS[arch]} \ -p geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-gwtomcat.$TIME_TAG.noarch.rpm --rpm-os linux --license "Apache Version 2.0" \ -d java-1.8.0-openjdk \ -d geowave-${GEOWAVE_VERSION}-core \ -d geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-tools \ --iteration $TIME_TAG \ --vendor "geowave" \ --description "Apache Tomcat is an open source software implementation of the Java Servlet and JavaServer Pages technologies." \ --url "http://tomcat.apache.org/" \ --directories ${GEOWAVE_DIR}/tomcat8 \ --post-install ${FPM_SCRIPTS}/gwtomcat_post_install.sh \ --pre-uninstall ${FPM_SCRIPTS}/gwtomcat_pre_uninstall.sh \ --post-uninstall ${FPM_SCRIPTS}/gwtomcat_post_uninstall.sh \ ${FPM_SCRIPTS}/gwtomcat_tools.sh=${GEOWAVE_DIR}/tomcat8/bin/gwtomcat_tools.sh \ ${FPM_SCRIPTS}/gwtomcat=/etc/init.d/gwtomcat \ ${FPM_SCRIPTS}/gwtomcat_logrotate=/etc/logrotate.d/gwtomcat \ tomcat8/=${GEOWAVE_DIR}/tomcat8/ #clean up the tmp scripts and move the rpm to the right place to be indexed echo "created tomcat rpm" rm -f ${FPM_SCRIPTS}/gwtomcat_tools.sh cp geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-gwtomcat.$TIME_TAG.noarch.rpm $WORKSPACE/${ARGS[buildroot]}/RPMS/${ARGS[arch]}/geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-gwtomcat.${TIME_TAG}.noarch.rpm #grab the rest services war file echo "Copy REST Services file" cp $WORKSPACE/services/rest/target/*${GEOWAVE_VERSION}-${VENDOR_VERSION}.war restservices.war # Copy accumulo 1.7 restservices war file if [[ -f $WORKSPACE/services/rest/target/geowave-restservices-${GEOWAVE_VERSION}-${VENDOR_VERSION}-accumulo1.7.war ]]; then cp $WORKSPACE/services/rest/target/geowave-restservices-${GEOWAVE_VERSION}-${VENDOR_VERSION}-accumulo1.7.war $WORKSPACE/${ARGS[buildroot]}/SOURCES/geowave-restservices-${GEOWAVE_VERSION}-${VENDOR_VERSION}-accumulo1.7.war fi #get geoserver the war files ready #unpack it in tmp dir unzip -o geoserver-$GEOSERVER_VERSION-war.zip geoserver.war mkdir tmp && cd tmp jar -xf ../geoserver.war rm -rf data/layergroups/* rm -rf data/workspaces/* mkdir data/workspaces/geowave cp $WORKSPACE/${ARGS[buildroot]}/SOURCES/geowave-geoserver-${GEOWAVE_VERSION}-${VENDOR_VERSION}.jar WEB-INF/lib/ cp $WORKSPACE/${ARGS[buildroot]}/SOURCES/default.xml data/workspaces/ cp $WORKSPACE/${ARGS[buildroot]}/SOURCES/namespace.xml data/workspaces/geowave/ cp $WORKSPACE/${ARGS[buildroot]}/SOURCES/workspace.xml data/workspaces/geowave/ #package the war file jar -cf geoserver.war * mv geoserver.war ../ cd .. rm -rf tmp echo "Creating Geoserver and services rpm" fpm -s dir -t rpm -n "geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-gwgeoserver" -v ${GEOWAVE_RPM_VERSION} -a ${ARGS[arch]} \ -p geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-gwgeoserver.$TIME_TAG.noarch.rpm --rpm-os linux --license "GNU General Public License Version 2.0" \ -d geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-gwtomcat \ --iteration $TIME_TAG \ --vendor geowave --description "GeoServer is an open source server for sharing geospatial data." \ --url "https://geoserver.org/" \ ${FPM_SCRIPTS}/gwgeoserver_logrotate=/etc/logrotate.d/gwgeoserver \ geoserver.war=${GEOWAVE_DIR}/tomcat8/webapps/geoserver.war fpm -s dir -t rpm -n "geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-restservices" -v ${GEOWAVE_RPM_VERSION} -a ${ARGS[arch]} \ -p geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-restservices.$TIME_TAG.noarch.rpm --rpm-os linux --license "Apache Version 2.0" \ -d geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-gwtomcat \ --iteration $TIME_TAG \ --vendor geowave --description "Geowave rest services rpm. This deploys the Geowave services WAR file to the Tomcat server." \ --url "https://locationtech.github.io/geowave" \ restservices.war=${GEOWAVE_DIR}/tomcat8/webapps/restservices.war fpm -s dir -t rpm -n "geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-grpc" -a ${ARGS[arch]} \ -p geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-grpc.$TIME_TAG.noarch.rpm \ -v ${GEOWAVE_RPM_VERSION} \ -d java-1.8.0-openjdk \ -d geowave-${GEOWAVE_VERSION}-core \ -d geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-tools \ --post-install ${FPM_SCRIPTS}/gwgrpc_post_install.sh \ --post-uninstall ${FPM_SCRIPTS}/gwgrpc_post_uninstall.sh \ --iteration $TIME_TAG \ --vendor geowave --description "Geowave gRPC service" \ --url "https://locationtech.github.io/geowave" \ ${FPM_SCRIPTS}/gwgrpc.environment=/etc/geowave/gwgrpc \ ${FPM_SCRIPTS}/gwgrpc_logrotate=/etc/logrotate.d/gwgrpc \ ${FPM_SCRIPTS}/gwgrpc.rsyslog=/etc/rsyslog.d/gwgrpc.conf \ ${FPM_SCRIPTS}/gwgrpc.service=/etc/systemd/system/gwgrpc.service #Move the rpms to the repo to indexed later cp geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-gwgeoserver.$TIME_TAG.noarch.rpm $WORKSPACE/${ARGS[buildroot]}/RPMS/${ARGS[arch]}/geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-gwgeoserver.$TIME_TAG.noarch.rpm cp geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-restservices.$TIME_TAG.noarch.rpm $WORKSPACE/${ARGS[buildroot]}/RPMS/${ARGS[arch]}/geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-restservices.$TIME_TAG.noarch.rpm cp geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-grpc.$TIME_TAG.noarch.rpm $WORKSPACE/${ARGS[buildroot]}/RPMS/${ARGS[arch]}/geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}-grpc.$TIME_TAG.noarch.rpm # Move the restservices war to the repo cp restservices.war $WORKSPACE/${ARGS[buildroot]}/SOURCES/geowave-restservices-${GEOWAVE_VERSION}-${VENDOR_VERSION}.war #Clean up tmp files rm -rf geoserver.war rm -rf restservices.war #Go back to where we started from cd $WORKSPACE ================================================ FILE: deploy/packaging/docker/build-rpm/deploy-geowave-to-hdfs.sh.template ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- #!/bin/bash # # Upload datastore jar into HDFS # Attempt to use a variety of common HDFS root usernames an optional user arg will override # # deploy-geowave-to-hdfs.sh [--user HDFS_ROOT_USERNAME] # # Test for installed apps required to run this script dependency_tests() { REQUIRED_APPS=('hadoop') for app in "${REQUIRED_APPS[@]}" do type $app >/dev/null 2>&1 || { echo >&2 "$0 needs the $app command to be installed . Aborting."; exit 1; } done } read_dom () { local IFS=\> read -d \< ENTITY CONTENT } # Sanity check of environment dependency_tests # Start detecting the other required settings SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" DATASTORE_USER=$DATASTORE_USER_TOKEN # Parse any arguments passed to the script declare -A ARGS while [ $# -gt 0 ]; do case "$1" in *) NAME="${1:2}"; shift; ARGS[$NAME]="$1" ;; esac shift done determine_hdfs_user() { # Various usernames distros configure to be the one with "root" HDFS permissions HADOOP_USERS=('hdfs' 'hadoop' 'cloudera-scm') if [ ! -z ${ARGS[user]} ]; then # Use custom user if provided HADOOP_USERS=( ${ARGS[user]} ) fi HADOOP_USER= for user in "${HADOOP_USERS[@]}" do getent passwd $user > /dev/null if [ $? -eq 0 ] ; then HADOOP_USER=$user break fi done if [ ! -z $HADOOP_USER ]; then echo $HADOOP_USER else echo >&2 "Cannot determine user account to use for HDFS, tried '${HADOOP_USERS[@]}'. Aborting." exit 1 fi } HDFS_USER=$(determine_hdfs_user) parseVersion() { echo $(cat "$SCRIPT_DIR/geowave-$DATASTORE_TOKEN-build.properties" | grep "project.version=" | sed -e 's/"//g' -e 's/-SNAPSHOT//g' -e 's/project.version=//g') } # Test to see if datastore has been initialized by looking at hdfs contents determine_$DATASTORE_TOKEN_hdfs_root() { DATASTORE_ROOT_DIRS=('/$DATASTORE_TOKEN' '/user/$DATASTORE_TOKEN' '/apps/$DATASTORE_TOKEN') ROOT_DIR= for dir in "${DATASTORE_ROOT_DIRS[@]}" do su $HDFS_USER -c "hadoop fs -ls $dir" > /dev/null if [ $? -eq 0 ] ; then ROOT_DIR=$dir break fi done if [ ! -z $ROOT_DIR ]; then echo $ROOT_DIR else echo >&2 "$DATASTORE_TOKEN application directory not found in HDFS, tried '${DATASTORE_ROOT_DIRS[@]}'. Aborting." exit 1 fi } # To support concurrent version and vendor installs we're naming the directory that contains the iterator with # both the vendor and application version so we can support things like 0.8.7-cdh5, 0.8.7-cdh6, 0.8.8-hdp2 etc. determine_vendor_version() { while [ $# -gt 0 ]; do ARG="${1:2}" KEY="${ARG%%=*}" VALUE="${ARG#*=}" case "$KEY" in "vendor.version") echo "$VALUE" ;; *) # Do nothing esac shift done } BUILD_ARGS_KEY="project.build.args=" BUILD_ARGS_VAL=$(cat $SCRIPT_DIR/geowave-$DATASTORE_TOKEN-build.properties | grep "$BUILD_ARGS_KEY" | sed -e "s/$BUILD_ARGS_KEY//") VENDOR_VERSION=$(determine_vendor_version $BUILD_ARGS_VAL) if [ ! -z $VENDOR_VERSION ]; then VENDOR_VERSION="$(parseVersion)-$VENDOR_VERSION" else VENDOR_VERSION="$(parseVersion)" fi DATASTORE_LIB_DIR="$(determine_$DATASTORE_TOKEN_hdfs_root)/lib" GEOWAVE_DATASTORE_HOME=/usr/local/geowave-$VENDOR_VERSION/$DATASTORE_TOKEN # Check to see if lib directory is already present su $DATASTORE_USER -c "hadoop fs -ls $DATASTORE_LIB_DIR" if [ $? -ne 0 ]; then # Try creating su $HDFS_USER -c "hadoop fs -mkdir -p $DATASTORE_LIB_DIR" if [ $? -ne 0 ]; then echo >&2 "Unable to create $DATASTORE_LIB_DIR directory in hdfs. Aborting."; exit 1; fi fi # Check to see if the library is already present and remove if so (put will not replace) su $HDFS_USER -c "hadoop fs -ls $DATASTORE_LIB_DIR/geowave-$DATASTORE_TOKEN-$VENDOR_VERSION.jar" if [ $? -eq 0 ]; then su $HDFS_USER -c "hadoop fs -rm $DATASTORE_LIB_DIR/geowave-$DATASTORE_TOKEN-$VENDOR_VERSION.jar" fi # Upload library to hdfs su $HDFS_USER -c "hadoop fs -put $GEOWAVE_DATASTORE_HOME/geowave-$DATASTORE_TOKEN-$VENDOR_VERSION.jar $DATASTORE_LIB_DIR/geowave-$DATASTORE_TOKEN-$VENDOR_VERSION.jar" if [ $? -ne 0 ]; then echo >&2 "Unable to upload geowave-$DATASTORE_TOKEN-$VENDOR_VERSION.jar into hdfs. Aborting."; exit 1; fi # Also upload the build metadata file for ease of inspection su $HDFS_USER -c "hadoop fs -ls $DATASTORE_LIB_DIR/geowave-$DATASTORE_TOKEN-build.properties" if [ $? -eq 0 ]; then su $HDFS_USER -c "hadoop fs -rm $DATASTORE_LIB_DIR/geowave-$DATASTORE_TOKEN-build.properties" fi su $HDFS_USER -c "hadoop fs -put $GEOWAVE_DATASTORE_HOME/geowave-$DATASTORE_TOKEN-build.properties $DATASTORE_LIB_DIR/geowave-$DATASTORE_TOKEN-build.properties" if [ $? -ne 0 ]; then echo >&2 "Unable to upload geowave-$DATASTORE_TOKEN-build.properties into hdfs. Aborting."; exit 1; fi # Set ownership to datastore user su $HDFS_USER -c "hadoop fs -chown -R $DATASTORE_USER:$DATASTORE_USER $DATASTORE_LIB_DIR" if [ $? -ne 0 ]; then echo >&2 "Unable to change ownership of the $DATASTORE_LIB_DIR directory in hdfs. Aborting."; exit 1; fi #Find hbase conf path if [[ -x "$(command -v hbase)" ]]; then PATHS=$(hbase classpath) IFS=':' read -ra CLASSPATHS <<< "$PATHS" for i in "${CLASSPATHS[@]}" do if [[ $i = *"hbase/conf" ]]; then CONFPATH=$i break fi done fi GOTELEM=0 # If using Hbase on AWS, scan hbase configs, find configured bucket and copy library over. if [[ -x "$(command -v aws)" ]] && [ ! -z "$CONFPATH" ] && [[ -e "$CONFPATH/hbase-site.xml" ]]; then while read_dom; do if [[ -z "$(echo -e "${CONTENT}" | tr -d '[:space:]')" ]]; then continue; fi if [[ $ENTITY = "name" ]] && [ $CONTENT = "hbase.rootdir" ]; then GOTELEM=1; continue fi if [[ $GOTELEM -eq 1 ]] && [[ $CONTENT = "s3://"* ]]; then CONTENT=${CONTENT%/} echo -e "s3 cp $GEOWAVE_DATASTORE_HOME/geowave-$DATASTORE_TOKEN-$VENDOR_VERSION.jar $CONTENT/lib/geowave-$DATASTORE_TOKEN-$VENDOR_VERSION.jar" # Upload library to s3 aws s3 cp $GEOWAVE_DATASTORE_HOME/geowave-$DATASTORE_TOKEN-$VENDOR_VERSION.jar $CONTENT/lib/geowave-$DATASTORE_TOKEN-$VENDOR_VERSION.jar # Also upload the build metadata file for ease of inspection aws s3 cp $GEOWAVE_DATASTORE_HOME/geowave-$DATASTORE_TOKEN-build.properties $CONTENT/lib/geowave-$DATASTORE_TOKEN-build.properties fi if [ $GOTELEM -eq 1 ]; then break fi done < $CONFPATH/hbase-site.xml fi ================================================ FILE: deploy/packaging/docker/build-rpm/fpm_scripts/gwgeoserver_logrotate ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2017 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- /usr/local/geowave/tomcat8/webapps/geoserver/data/logs/*.log { compress copytruncate dateext size=+1k notifempty missingok create 644 } ================================================ FILE: deploy/packaging/docker/build-rpm/fpm_scripts/gwgrpc.environment ================================================ GRPC_PORT=8980 ================================================ FILE: deploy/packaging/docker/build-rpm/fpm_scripts/gwgrpc.rsyslog ================================================ if $programname == 'gwgrpc' then /var/log/gwgrpc.log if $programname == 'gwgrpc' then stop ================================================ FILE: deploy/packaging/docker/build-rpm/fpm_scripts/gwgrpc.service ================================================ # Systemd unit file for Geowave gRPC [Unit] Description=Geowave gRPC Service After=syslog.target network.target [Service] Type=simple EnvironmentFile=/etc/geowave/gwgrpc ExecStart=/bin/bash /usr/local/bin/geowave grpc start --port ${GRPC_PORT} ExecStop=/bin/bash /usr/local/bin/geowave grpc stop StandardOutput=syslog StandardError=syslog SyslogIdentifier=gwgrpc User=geowave Group=geowave RestartSec=10 Restart=always [Install] WantedBy=multi-user.target ================================================ FILE: deploy/packaging/docker/build-rpm/fpm_scripts/gwgrpc_logrotate ================================================ /var/log/gwgrpc.log { compress copytruncate dateext size=+1k notifempty missingok create 644 } ================================================ FILE: deploy/packaging/docker/build-rpm/fpm_scripts/gwgrpc_post_install.sh ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- #!/bin/bash # Touch the logfile touch /var/log/gwgrpc.log # Set SystemD File Modes chmod 644 /etc/geowave/gwgrpc chmod 644 /etc/systemd/system/gwgrpc.service chmod 644 /etc/logrotate.d/gwgrpc # Service Permissions chown geowave:geowave /var/log/gwgrpc.log chown -R geowave:geowave /usr/local/geowave* chown -R geowave:geowave /etc/geowave ================================================ FILE: deploy/packaging/docker/build-rpm/fpm_scripts/gwgrpc_post_uninstall.sh ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- #!/bin/bash # Remove SystemD Files rm -rf /etc/geowave/gwgrpc rm -rf /etc/systemd/system/gwgrpc.service rm -rf /etc/rsyslog.d/gwgrpc.conf rm -rf /etc/logrotate.d/gwgrpc ================================================ FILE: deploy/packaging/docker/build-rpm/fpm_scripts/gwtomcat ================================================ #!/bin/bash # # gwtomcat This shell script takes care of starting and stopping Tomcat # # chkconfig: - 80 20 # ### BEGIN INIT INFO # Provides: gwtomcat # Required-Start: $network $syslog # Required-Stop: $network $syslog # Default-Start: # Default-Stop: # Description: Release implementation for Servlet 3.0 and JSP 2.2 # Short-Description: start and stop tomcat ### END INIT INFO # # - originally written by Henri Gomez, Keith Irwin, and Nicolas Mailhot # - heavily rewritten by Deepak Bhole and Jason Corley # ## Source function library. . /etc/rc.d/init.d/functions NAME="gwtomcat" unset ISBOOT if [ "${NAME:0:1}" = "S" -o "${NAME:0:1}" = "K" ]; then NAME="${NAME:3}" ISBOOT="1" fi SU="/bin/su -s /bin/sh" # Where tomcat installation lives CATALINA_BASE="/usr/local/geowave/tomcat8" CATALINA_HOME="/usr/local/geowave/tomcat8" JASPER_HOME="/usr/local/geowave/tomcat8" CATALINA_TMPDIR="/usr/local/geowave/tomcat8/temp" # What user should run tomcat TOMCAT_USER="geowave" # Maximum time to wait in seconds, before killing process SHUTDOWN_WAIT="30" # Maximum time to wait in seconds, after killing the tomcat process KILL_SLEEP_WAIT="5" # Whether to annoy the user with "attempting to shut down" messages or not SHUTDOWN_VERBOSE="false" # Set the TOMCAT_PID location CATALINA_PID="/var/run/gwtomcat.pid" # Define which connector port to use CONNECTOR_PORT="${CONNECTOR_PORT:-8080}" # Path to the tomcat launch script TOMCAT_SCRIPT="/usr/sbin/tomcat8" # Tomcat program name TOMCAT_PROG="${NAME}" # Define the tomcat username TOMCAT_USER="${TOMCAT_USER:-gwtomcat}" # Define the tomcat log file # set kill timeout KILL_SLEEP_WAIT="${KILL_SLEEP_WAIT:-5}" RETVAL="0" # Look for open ports, as the function name might imply function findFreePorts() { local isSet1="false" local isSet2="false" local isSet3="false" local lower="8000" randomPort1="0" randomPort2="0" randomPort3="0" local -a listeners="( $( netstat -ntl | \ awk '/^tcp/ {gsub("(.)*:", "", $4); print $4}' ) )" while [ "$isSet1" = "false" ] || \ [ "$isSet2" = "false" ] || \ [ "$isSet3" = "false" ]; do let port="${lower}+${RANDOM:0:4}" if [ -z `expr " ${listeners[*]} " : ".*\( $port \).*"` ]; then if [ "$isSet1" = "false" ]; then export randomPort1="$port" isSet1="true" elif [ "$isSet2" = "false" ]; then export randomPort2="$port" isSet2="true" elif [ "$isSet3" = "false" ]; then export randomPort3="$port" isSet3="true" fi fi done } # See how we were called. function start() { echo -n "Starting ${TOMCAT_PROG}: " if [ "$RETVAL" != "0" ]; then failure echo return fi if [ -f "/var/lock/subsys/${NAME}" ]; then if [ -s "/var/run/${NAME}.pid" ]; then read kpid < /var/run/${NAME}.pid # if checkpid $kpid 2>&1; then if [ -d "/proc/${kpid}" ]; then success echo return 0 fi fi fi # fix permissions on the log and pid files export CATALINA_PID="/var/run/${NAME}.pid" touch $CATALINA_PID 2>&1 || RETVAL="4" if [ "$RETVAL" -eq "0" -a "$?" -eq "0" ]; then chown ${TOMCAT_USER}:${TOMCAT_USER} $CATALINA_PID fi findFreePorts sed -i -e "s/8005/${randomPort1}/g" -e "s/8080/${CONNECTOR_PORT}/g" \ -e "s/8009/${randomPort2}/g" -e "s/8443/${randomPort3}/g" \ ${CATALINA_HOME}/conf/server.xml $SU - $TOMCAT_USER -c "${CATALINA_HOME}/bin/gwtomcat_tools.sh" || RETVAL="4" $SU - $TOMCAT_USER -c "$CATALINA_HOME/bin/startup.sh -Dprocessname=${NAME}" || RETVAL="4" PID=`ps -eaf|grep processname=${NAME}|grep -v grep|awk '{print $2}'` RETVAL=$? if [ "$RETVAL" -eq "0" ]; then success echo echo "${PID}" > ${CATALINA_PID} touch /var/lock/subsys/${NAME} else echo -n "Error code ${RETVAL}" echo failure fi } function stop() { #check to see if pid file is good. We only want to stop tomcat8 if #we started it from this init script running_pid=`pidofproc -p ${CATALINA_PID} ${NAME}` if [ -f /var/run/${NAME}.pid ]; then read kpid junk< /var/run/${NAME}.pid if [ -z "$kpid" ]; then echo -n "PID file empty" rm -f /var/lock/subsys/${NAME} /var/run/${NAME}.pid failure echo exit 4 fi if [ -z "$running_pid" ]; then echo -n "no ${NAME} running, but pid file exists - cleaning up" rm -f /var/lock/subsys/${NAME} /var/run/${NAME}.pid success echo exit 0 fi if [ -z "$(echo ${kpid} | fgrep -x "${running_pid}")" ]; then echo -n "PID file does not match pid of any running ${NAME}" failure echo rm -f /var/lock/subsys/${NAME} /var/run/${NAME}.pid exit 4 fi #stop tomcat echo -n "Stopping ${TOMCAT_PROG}: " $SU - $TOMCAT_USER -c "$CATALINA_HOME/bin/shutdown.sh" || RETVAL="4" if [ "$RETVAL" -eq "4" ]; then sleep 1 if [ "$SHUTDOWN_VERBOSE" = "true" ]; then echo "Failed to stop ${NAME} normally, sending a graceful kill." fi kill $kpid > /dev/null 2>&1 sleep 1 fi #wait for tomcat to really shutdown count=0 until [ "$(ps --pid $kpid | grep -c $kpid)" -eq "0" ] || \ [ "$count" -gt "$SHUTDOWN_WAIT" ]; do if [ "$SHUTDOWN_VERBOSE" = "true" ]; then echo "waiting for processes ${NAME} ($kpid) to exit" fi sleep 1 let count="${count}+1" done if [ "$count" -gt "$SHUTDOWN_WAIT" ]; then if [ "$SHUTDOWN_VERBOSE" = "true" ]; then echo -n "Failed to stop ${NAME} ($kpid) gracefully after $SHUTDOWN_WAIT seconds, sending SIGKILL." fi warning echo kill -9 $kpid if [ "$SHUTDOWN_VERBOSE" = "true" ]; then echo "Waiting for ${NAME} ($kpid) to exit." fi count=0 until [ "$(ps --pid $kpid | grep -c $kpid)" -eq "0" ] || \ [ "$count" -gt "$KILL_SLEEP_WAIT" ]; do if [ "$SHUTDOWN_VERBOSE" = "true" ]; then echo "waiting for ${NAME} ($kpid) to exit. It could be in the UNINTERRUPTIBLE state" fi sleep 1 let count="${count}+1" done fi #check to make sure tomcat is gone if [ "$(ps --pid $kpid | grep -c $kpid)" -eq "0" ]; then rm -f /var/lock/subsys/${NAME} /var/run/${NAME}.pid RETVAL="0" success echo else echo -n "Unable to stop ${NAME} ($kpid)" RETVAL="4" failure echo fi else if [ -n "$running_pid" ]; then echo -n "${NAME} running, but no pid file" failure echo RETVAL="4" else success echo fi fi return $RETVAL } function usage() { echo "Usage: $0 {start|stop|restart|status|version}" RETVAL="2" } function rh_status() { status -p /var/run/${NAME}.pid ${NAME} } function rh_status_q() { rh_status >/dev/null 2>&1 } # See how we were called. RETVAL="0" case "$1" in start) rh_status_q && exit 0 start ;; stop) stop ;; restart) stop start ;; status) if [ -s "/var/run/${NAME}.pid" ]; then read kpid junk < /var/run/${NAME}.pid if [ -d "/proc/${kpid}" ]; then echo -n "${NAME} (pid ${kpid}) is running..." success echo RETVAL="0" else # The pid file exists but the process is not running echo -n "PID file exists, but process is not running" warning echo RETVAL="1" fi else pid="$(/usr/bin/pgrep -d , -u ${TOMCAT_USER} -G ${TOMCAT_USER} java)" if [ -z "$pid" ]; then echo "${NAME} is stopped" success echo RETVAL="3" else echo "${NAME} (pid $pid) is running, but PID file is missing" success echo RETVAL="0" fi fi ;; version) $SU - $TOMCAT_USER -c "$CATALINA_HOME/bin/version.sh" ;; *) usage ;; esac exit $RETVAL ================================================ FILE: deploy/packaging/docker/build-rpm/fpm_scripts/gwtomcat.service ================================================ # Systemd unit file for tomcat [Unit] Description=Apache Tomcat Web Application Container After=syslog.target network.target [Service] Type=forking Environment=JAVA_HOME=/usr/lib/jvm/jre Environment=CATALINA_PID=/usr/local/geowave/tomcat8/temp/gwtomcat.pid Environment=CATALINA_HOME=/usr/local/geowave/tomcat8 Environment=CATALINA_BASE=/usr/local/geowave/tomcat8 Environment='CATALINA_OPTS=-Xms512M -Xmx1024M -server -XX:+UseParallelGC' Environment='JAVA_OPTS=-Djava.awt.headless=true -Djava.security.egd=file:/dev/./urandom' ExecStart=/usr/local/geowave/tomcat8/bin/startup.sh ExecStop=/usr/local/geowave/tomcat8/bin/shutdown.sh User=tomcat Group=tomcat UMask=0007 RestartSec=10 Restart=always [Install] WantedBy=multi-user.target ================================================ FILE: deploy/packaging/docker/build-rpm/fpm_scripts/gwtomcat_logrotate ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2017 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- /usr/local/geowave/tomcat8/logs/*.log { compress copytruncate dateext size=+1k notifempty missingok create 644 } /usr/local/geowave/tomcat8/logs/*.out { compress copytruncate dateext size=+1k notifempty missingok create 644 } ================================================ FILE: deploy/packaging/docker/build-rpm/fpm_scripts/gwtomcat_post_install.sh ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- GEOWAVE_DIR="/usr/local/geowave" #make sure correct permissions are in place chown -R geowave:geowave ${GEOWAVE_DIR}/tomcat8 #change settings on service script chmod 755 /etc/init.d/gwtomcat chown root:root /etc/init.d/gwtomcat #Removing class path spam when starting and shutting down sed -e /"Using CLASSPATH:"/d -i ${GEOWAVE_DIR}/tomcat8/bin/catalina.sh ================================================ FILE: deploy/packaging/docker/build-rpm/fpm_scripts/gwtomcat_post_uninstall.sh ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- #Ensure all files in dir are removed DIRECTORY="/usr/local/geowave/tomcat8/" if [ -d $DIRECTORY ]; then rm -rf $DIRECTORY fi rm -rf /etc/init.d/gwtomcat ================================================ FILE: deploy/packaging/docker/build-rpm/fpm_scripts/gwtomcat_pre_uninstall.sh ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- #Check if the service is running before removing it PROCESS_NAME=gwtomcat pidfile=${PIDFILE-/var/run/${PROCESS_NAME}.pid}; PID=$(cat ${pidfile}) if [[ (-n ${PID}) && ($PID -gt 0) ]]; then service ${PROCESS_NAME} stop sleep 1 fi ================================================ FILE: deploy/packaging/docker/build-rpm/fpm_scripts/gwtomcat_tools.sh.template ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- #!/bin/bash GEOWAVE_VERSION="temp" VENDOR_VERSION="temp" GEOWAVE_TOOLS_HOME="/usr/local/geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}/tools" if [ -z "$JAVA_HOME" ]; then JAVA_HOME="java" else JAVA_HOME="$JAVA_HOME/bin/java" fi # Setting up Hadoop env if [ -z "$HADOOP_HOME" ]; then if [[ $VENDOR_VERSION == apache ]]; then export HADOOP_HOME=/usr/lib/hadoop elif [[ $VENDOR_VERSION == hdp* ]]; then export HADOOP_HOME=/usr/hdp/current/hadoop-client export HDP_VERSION=$(hdp-select| grep hadoop-hdfs-namenode| sed "s/hadoop-hdfs-namenode - //g") export CATALINA_OPTS="$CATALINA_OPTS -Dhdp.version=${HDP_VERSION}" elif [[ $VENDOR_VERSION == cdh* ]]; then export HADOOP_HOME=/usr/lib/hadoop else echo "Unknown Hadoop Distribution. Set env variable HADOOP_HOME." fi fi # set up HADOOP specific env only if HADOOP is installed if [ -n "${HADOOP_HOME}" ] && [ -d "${HADOOP_HOME}" ]; then . $HADOOP_HOME/libexec/hadoop-config.sh HADOOP_CLASSPATH="" for i in $(echo $CLASSPATH | sed "s/:/ /g") do if [[ "$i" != *log4j-slf4j-impl*.jar && "$i" != *servlet*.jar ]]; then HADOOP_CLASSPATH=${HADOOP_CLASSPATH}:$i fi done fi CLASSPATH=${HADOOP_CLASSPATH} # Setting up Spark env if [ -z "$SPARK_HOME" ]; then if [[ $VENDOR_VERSION == apache ]]; then export SPARK_HOME=/usr/lib/spark elif [[ $VENDOR_VERSION == hdp* ]]; then export SPARK_HOME=/usr/hdp/current/spark2-client elif [[ $VENDOR_VERSION == cdh* ]]; then export SPARK_HOME=/usr/lib/spark else echo "Unknown Spark Distribution. Set env variable SPARK_HOME." fi fi # Ensure both our tools jar and anything in the plugins directory is on the classpath # Add Spark jars to class path only if SPARK_HOME directory exists if [ -n "${SPARK_HOME}" ] && [ -d "${SPARK_HOME}" ]; then . "${SPARK_HOME}"/bin/load-spark-env.sh SPARK_CLASSPATH="" for i in $(ls ${SPARK_HOME}/jars/* ) do if [[ "$i" != *log4j-slf4j-impl*.jar && "$i" != *servlet*.jar ]]; then SPARK_CLASSPATH=${SPARK_CLASSPATH}:$i fi done CLASSPATH="${SPARK_HOME}/conf:${SPARK_CLASSPATH}:${CLASSPATH}" fi ================================================ FILE: deploy/packaging/docker/build-src/build-geowave-common.sh ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- #!/bin/bash # # GeoWave Common Build Script # # This script runs with a volume mount to $WORKSPACE, this ensures that any signal failure will leave all of the files $WORKSPACE editable by the host trap 'chmod -R 777 $WORKSPACE' EXIT trap 'chmod -R 777 $WORKSPACE && exit' ERR # Get the version GEOWAVE_VERSION=$(cat $WORKSPACE/deploy/target/version.txt) BUILD_TYPE=$(cat $WORKSPACE/deploy/target/build-type.txt) GEOWAVE_RPM_VERSION=$(cat $WORKSPACE/deploy/target/rpm_version.txt) echo "---------------------------------------------------------------" echo " Building GeoWave Common" echo "---------------------------------------------------------------" echo "GEOWAVE_VERSION=${GEOWAVE_VERSION}" echo "INSTALL4J_HOME=${INSTALL4J_HOME}" echo "BUILD_ARGS=${BUILD_ARGS} ${@}" echo "---------------------------------------------------------------" # Build and archive HTML/PDF docs if [[ ! -f $WORKSPACE/target/site-${GEOWAVE_VERSION}.tar.gz ]]; then mvn -q javadoc:aggregate $BUILD_ARGS "$@" mvn -q -P pdf,epub,html -pl docs install $BUILD_ARGS "$@" tar -czf $WORKSPACE/target/site-${GEOWAVE_VERSION}.tar.gz -C $WORKSPACE/target site fi # Build and archive the man pages if [[ ! -f $WORKSPACE/docs/target/manpages-${GEOWAVE_VERSION}.tar.gz ]]; then mkdir -p $WORKSPACE/docs/target/{asciidoc,manpages} cp -fR $WORKSPACE/docs/content/commands/manpages/* $WORKSPACE/docs/target/asciidoc find $WORKSPACE/docs/target/asciidoc/ -name "*.txt" -exec sed -i "s|//:||" {} \; find $WORKSPACE/docs/target/asciidoc/ -name "*.txt" -exec sed -i "s|^====|==|" {} \; find $WORKSPACE/docs/target/asciidoc/ -name "*.txt" -exec asciidoctor -d manpage -b manpage {} -D $WORKSPACE/docs/target/manpages \; tar -czf $WORKSPACE/docs/target/manpages-${GEOWAVE_VERSION}.tar.gz -C $WORKSPACE/docs/target/manpages/ . fi ## Copy over the puppet scripts if [[ ! -f $WORKSPACE/deploy/target/puppet-scripts-${GEOWAVE_VERSION}.tar.gz ]]; then tar -czf $WORKSPACE/deploy/target/puppet-scripts-${GEOWAVE_VERSION}.tar.gz -C $WORKSPACE/deploy/packaging/puppet geowave fi ## Build the pyspark module if [[ ! -f $WORKSPACE/analytics/pyspark/target/geowave_pyspark-${GEOWAVE_VERSION}.tar.gz ]]; then mvn package -am -pl analytics/pyspark -P python -Dpython.executable=python3.6 $BUILD_ARGS "$@" if [[ ! -f $WORKSPACE/analytics/pyspark/target/geowave_pyspark-${GEOWAVE_VERSION}.tar.gz ]]; then mv $WORKSPACE/analytics/pyspark/target/geowave_pyspark-*.tar.gz $WORKSPACE/analytics/pyspark/target/geowave_pyspark-${GEOWAVE_VERSION}.tar.gz fi fi if [ -d /opt/install4j7 ]; then # Build standalone installer echo '###### Building standalone installer' mvn -pl '!test' package -P build-installer-plugin $BUILD_ARGS "$@" mvn package -pl deploy -P build-installer-main -Dinstall4j.home=/opt/install4j7 $BUILD_ARGS "$@" fi ================================================ FILE: deploy/packaging/docker/build-src/build-geowave-vendor.sh ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- #!/bin/bash # # GeoWave Vendor-specific Build Script # # This script runs with a volume mount to $WORKSPACE, this ensures that any signal failure will leave all of the files $WORKSPACE editable by the host trap 'chmod -R 777 $WORKSPACE' EXIT trap 'chmod -R 777 $WORKSPACE && exit' ERR # Set a default version VENDOR_VERSION=apache ACCUMULO_API="$(mvn -q -Dexec.executable="echo" -Dexec.args='${accumulo.api}' --non-recursive -f $WORKSPACE/pom.xml exec:exec $BUILD_ARGS "$@")" if [[ ! -z "$BUILD_ARGS" ]]; then VENDOR_VERSION=$(echo "$BUILD_ARGS" | grep -oi "vendor.version=\w*" | sed "s/vendor.version=//g") fi # Get the version GEOWAVE_VERSION=$(cat $WORKSPACE/deploy/target/version.txt) BUILD_TYPE=$(cat $WORKSPACE/deploy/target/build-type.txt) GEOWAVE_RPM_VERSION=$(cat $WORKSPACE/deploy/target/rpm_version.txt) echo "---------------------------------------------------------------" echo " Building GeoWave Vendor-specific with the following settings" echo "---------------------------------------------------------------" echo "GEOWAVE_VERSION=${GEOWAVE_VERSION}" echo "VENDOR_VERSION=${VENDOR_VERSION}" echo "BUILD_ARGS=${BUILD_ARGS} ${@}" echo "ACCUMULO_API=${ACCUMULO_API}" echo "---------------------------------------------------------------" GEOSERVER_VERSION="$(mvn -q -Dexec.executable="echo" -Dexec.args='${geoserver.version}' --non-recursive -f $WORKSPACE/pom.xml exec:exec $BUILD_ARGS)" echo $GEOSERVER_VERSION > $WORKSPACE/deploy/target/geoserver_version.txt # Build each of the "fat jar" artifacts and rename to remove any version strings in the file name mvn -q package -am -pl deploy -P geotools-container-singlejar -Dgeotools.finalName=geowave-geoserver-${GEOWAVE_VERSION}-${VENDOR_VERSION} $BUILD_ARGS "$@" mvn -q package -am -pl services/rest -P rest-services-war -Drestservices.finalName=geowave-restservices-${GEOWAVE_VERSION}-${VENDOR_VERSION} $BUILD_ARGS "$@" mvn -q package -am -pl deploy -P accumulo-container-singlejar -Daccumulo.finalName=geowave-accumulo-${GEOWAVE_VERSION}-${VENDOR_VERSION} $BUILD_ARGS "$@" mvn -q package -am -pl deploy -P hbase-container-singlejar -Dhbase.finalName=geowave-hbase-${GEOWAVE_VERSION}-${VENDOR_VERSION} $BUILD_ARGS "$@" mvn -q package -am -pl deploy -P geowave-tools-singlejar -Dtools.finalName=geowave-tools-${GEOWAVE_VERSION}-${VENDOR_VERSION} $BUILD_ARGS "$@" # Build Accumulo API Jars if [[ "$ACCUMULO_API" != "1.7" ]]; then mvn -q package -am -pl deploy -P geowave-tools-singlejar -Dtools.finalName=geowave-tools-${GEOWAVE_VERSION}-${VENDOR_VERSION}-accumulo1.7 $BUILD_ARGS "$@" -Daccumulo.version=1.7.2 -Daccumulo.api=1.7 mvn -q package -am -pl deploy -P accumulo-container-singlejar -Daccumulo.finalName=geowave-accumulo-${GEOWAVE_VERSION}-${VENDOR_VERSION}-accumulo1.7 $BUILD_ARGS "$@" -Daccumulo.version=1.7.2 -Daccumulo.api=1.7 mvn -q package -am -pl services/rest -P rest-services-war -Drestservices.finalName=geowave-restservices-${GEOWAVE_VERSION}-${VENDOR_VERSION}-accumulo1.7 $BUILD_ARGS "$@" -Daccumulo.version=1.7.2 -Daccumulo.api=1.7 mvn -q package -am -pl deploy -P geotools-container-singlejar -Dgeotools.finalName=geowave-geoserver-${GEOWAVE_VERSION}-${VENDOR_VERSION}-accumulo1.7 $BUILD_ARGS "$@" -Daccumulo.version=1.7.2 -Daccumulo.api=1.7 else echo "Skipping Accumulo API Build" fi ================================================ FILE: deploy/packaging/docker/docker-build-rpms.sh ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- #!/bin/bash # # This script will build and package all of the configurations listed in the BUILD_ARGS_MATRIX array. # # Source all our reusable functionality, argument is the location of this script. trap 'chmod -R 777 $WORKSPACE && exit' ERR echo "INSTALL4J_HOME=${INSTALL4J_HOME}" echo "GEOWAVE_BUCKET=${GEOWAVE_BUCKET}" echo "GEOWAVE_RPM_BUCKET=${GEOWAVE_RPM_BUCKET}" echo '###### Build Variables' declare -A ARGS while [ $# -gt 0 ]; do # Trim the first two chars off of the arg name ex: --foo case "$1" in *) NAME="${1:2}"; shift; ARGS[$NAME]="$1" ;; esac shift done BUILD_ARGS_MATRIX=${ARGS[buildargsmatrix]} DOCKER_ARGS=${ARGS[dockerargs]} SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" TIME_TAG=$(date +"%Y%m%d%H%M") SKIP_EXTRA="-Dspotbugs.skip -Dformatter.skip -DskipTests" cd "$SCRIPT_DIR/../../.." WORKSPACE="$(pwd)" DOCKER_ROOT=$WORKSPACE/docker-root LOCAL_REPO_DIR="${LOCAL_REPO_DIR:-/jenkins/gw-repo/snapshots}" LOCK_DIR=/var/lock/subsys # If you'd like to build a different set of artifacts rename build-args-matrix.sh.example if [ -z $BUILD_ARGS_MATRIX ]; then if [ -f $SCRIPT_DIR/build-args-matrix.sh ]; then source $SCRIPT_DIR/build-args-matrix.sh else # Default build arguments BUILD_ARGS_MATRIX=( "-Dvendor.version=apache" "-P cloudera -Dvendor.version=cdh5" ) fi fi # make the docker_root directory if it has not been created already if [[ ! -d $DOCKER_ROOT ]]; then echo "WARNING: The docker-root directory did not exist. Creating now." mkdir $DOCKER_ROOT fi if [ -z ${INSTALL4J_HOME} ]; then echo "Setting INSTALL4J_HOME=/opt/install4j7" INSTALL4J_HOME=/opt/install4j7 fi if [ -z ${GEOWAVE_RPM_BUCKET} ]; then echo "Setting GEOWAVE_RPM_BUCKET=geowave-rpms" GEOWAVE_RPM_BUCKET=geowave-rpms fi if [ -z ${GEOWAVE_BUCKET} ]; then echo "Setting GEOWAVE_BUCKET=geowave" GEOWAVE_BUCKET=geowave fi if [ -f ~/.install4j ]; then cp ~/.install4j $DOCKER_ROOT/ fi if [ -d ~/.install4j7 ]; then cp -R ~/.install4j7 $DOCKER_ROOT/ fi $WORKSPACE/deploy/packaging/rpm/centos/7/rpm.sh --command clean docker run $DOCKER_ARGS --rm \ -e WORKSPACE=/usr/src/geowave \ -e MAVEN_OPTS="-Xmx1500m" \ -e GEOWAVE_BUCKET="$GEOWAVE_BUCKET" \ -v $DOCKER_ROOT:/root \ -v $WORKSPACE:/usr/src/geowave \ -v $INSTALL4J_HOME:/opt/install4j7 \ locationtech/geowave-centos7-java8-build \ /bin/bash -c \ "cd \$WORKSPACE && deploy/packaging/docker/init.sh && deploy/packaging/docker/build-src/build-geowave-common.sh $SKIP_EXTRA" docker run $DOCKER_ARGS --rm \ -e WORKSPACE=/usr/src/geowave \ -e BUILD_SUFFIX="common" \ -e TIME_TAG="$TIME_TAG" \ -e GEOWAVE_BUCKET="$GEOWAVE_BUCKET" \ -v $DOCKER_ROOT:/root \ -v $WORKSPACE:/usr/src/geowave \ locationtech/geowave-centos7-rpm-build \ /bin/bash -c \ "cd \$WORKSPACE && deploy/packaging/docker/build-rpm/build-rpm.sh" docker run $DOCKER_ARGS --rm \ -e WORKSPACE=/usr/src/geowave \ -e LOCAL_REPO_DIR=/usr/src/repo \ -e LOCK_DIR=/usr/src/lock \ -e TIME_TAG="$TIME_TAG" \ -e GEOWAVE_BUCKET="$GEOWAVE_BUCKET" \ -e GEOWAVE_RPM_BUCKET="$GEOWAVE_RPM_BUCKET" \ -v $DOCKER_ROOT:/root \ -v $WORKSPACE:/usr/src/geowave \ -v $LOCAL_REPO_DIR:/usr/src/repo \ -v $LOCK_DIR:/usr/src/lock \ locationtech/geowave-centos7-publish \ /bin/bash -c \ "cd \$WORKSPACE && deploy/packaging/docker/publish/publish-common-rpm.sh --buildroot deploy/packaging/rpm/centos/7 --arch noarch --repo geowave" for build_args in "${BUILD_ARGS_MATRIX[@]}" do export BUILD_ARGS="$build_args" $WORKSPACE/deploy/packaging/rpm/centos/7/rpm.sh --command clean docker run --rm $DOCKER_ARGS \ -e WORKSPACE=/usr/src/geowave \ -e BUILD_ARGS="$build_args" \ -e MAVEN_OPTS="-Xmx1500m" \ -e GEOWAVE_BUCKET="$GEOWAVE_BUCKET" \ -v $DOCKER_ROOT:/root \ -v $WORKSPACE:/usr/src/geowave \ locationtech/geowave-centos7-java8-build \ /bin/bash -c \ "cd \$WORKSPACE && deploy/packaging/docker/init.sh && deploy/packaging/docker/build-src/build-geowave-vendor.sh $SKIP_EXTRA" docker run --rm $DOCKER_ARGS \ -e WORKSPACE=/usr/src/geowave \ -e BUILD_ARGS="$build_args" \ -e BUILD_SUFFIX="vendor" \ -e TIME_TAG="$TIME_TAG" \ -e GEOWAVE_BUCKET="$GEOWAVE_BUCKET" \ -v $DOCKER_ROOT:/root \ -v $WORKSPACE:/usr/src/geowave \ -v $LOCAL_REPO_DIR:/usr/src/repo \ locationtech/geowave-centos7-rpm-build \ /bin/bash -c \ "cd \$WORKSPACE && deploy/packaging/docker/build-rpm/build-rpm.sh" docker run $DOCKER_ARGS --rm \ -e WORKSPACE=/usr/src/geowave \ -e BUILD_ARGS="$build_args" \ -e TIME_TAG="$TIME_TAG" \ -e GEOWAVE_BUCKET="$GEOWAVE_BUCKET" \ -v $WORKSPACE:/usr/src/geowave \ locationtech/geowave-centos7-rpm-build \ /bin/bash -c \ "cd \$WORKSPACE && deploy/packaging/docker/build-rpm/build-services-rpm.sh --buildroot deploy/packaging/rpm/centos/7 --arch noarch" docker run --rm $DOCKER_ARGS \ -e WORKSPACE=/usr/src/geowave \ -e BUILD_ARGS="$build_args" \ -e LOCAL_REPO_DIR=/usr/src/repo \ -e LOCK_DIR=/usr/src/lock \ -e TIME_TAG="$TIME_TAG" \ -e GEOWAVE_BUCKET="$GEOWAVE_BUCKET" \ -v $DOCKER_ROOT:/root \ -v $WORKSPACE:/usr/src/geowave \ -v $LOCAL_REPO_DIR:/usr/src/repo \ -v $LOCK_DIR:/usr/src/lock \ locationtech/geowave-centos7-publish \ /bin/bash -c \ "cd \$WORKSPACE && deploy/packaging/docker/publish/publish-vendor-rpm.sh --buildroot deploy/packaging/rpm/centos/7 --arch noarch --repo geowave" done ================================================ FILE: deploy/packaging/docker/geowave-centos7-java7-build.dockerfile ================================================ FROM centos:centos7 RUN yum -y install asciidoc boost boost-devel gcc-c++ git glibc.i686 unzip which wget && \ yum clean all # Install repo containing python rpms RUN yum -y install https://centos7.iuscommunity.org/ius-release.rpm # Install python, pip, and python development tools (Will install alongside system python as python3.6) RUN yum -y install python36u python36u-pip python36u-devel # Install asciidoctor RUN yum -y install asciidoctor RUN cd /tmp && wget --no-check-certificate --no-cookies \ --header "Cookie: oraclelicense=accept-securebackup-cookie" \ http://download.oracle.com/otn-pub/java/jdk/7u79-b15/jdk-7u79-linux-x64.rpm -q && \ rpm -Uvh /tmp/*.rpm && rm -fr /tmp/*.rpm && \ wget http://archive.apache.org/dist/maven/maven-3/3.6.0/binaries/apache-maven-3.6.0-bin.zip && \ unzip apache-maven-3.6.0-bin.zip && \ mv apache-maven-3.6.0/ /opt/maven && \ ln -s /opt/maven/bin/mvn /usr/bin/mvn && \ rm -rf apache-maven-3.6.0-bin.zip && \ echo "export JAVA_HOME=/usr/java/latest" > /etc/profile.d/java_home.sh && cd ~ ================================================ FILE: deploy/packaging/docker/geowave-centos7-java8-build.dockerfile ================================================ FROM centos:centos7 RUN yum -y install asciidoc boost boost-devel gcc-c++ git glibc.i686 unzip which wget && \ yum clean all # Install repo containing python rpms RUN yum -y install https://centos7.iuscommunity.org/ius-release.rpm # Install python, pip, and python development tools (Will install alongside system python as python3.6) RUN yum -y install python36u python36u-pip python36u-devel # Install asciidoctor RUN yum -y install asciidoctor RUN cd /tmp && wget --no-check-certificate --no-cookies \ --header "Cookie: oraclelicense=accept-securebackup-cookie" \ http://download.oracle.com/otn-pub/java/jdk/8u131-b11/d54c1d3a095b4ff2b6607d096fa80163/jdk-8u131-linux-x64.rpm && \ rpm -Uvh /tmp/*.rpm && rm -fr /tmp/*.rpm && \ wget http://archive.apache.org/dist/maven/maven-3/3.6.0/binaries/apache-maven-3.6.0-bin.zip && \ unzip apache-maven-3.6.0-bin.zip && \ mv apache-maven-3.6.0/ /opt/maven && \ ln -s /opt/maven/bin/mvn /usr/bin/mvn && \ rm -rf apache-maven-3.6.0-bin.zip && \ echo "export JAVA_HOME=/usr/java/latest" > /etc/profile.d/java_home.sh && cd ~ ================================================ FILE: deploy/packaging/docker/geowave-centos7-publish.dockerfile ================================================ FROM centos:centos7 ARG third_party_deps_path RUN yum -y install epel-release && \ yum -y install createrepo unzip zip wget && \ yum clean all && \ cd /tmp && curl "https://s3.amazonaws.com/aws-cli/awscli-bundle.zip" -o "awscli-bundle.zip" && \ unzip awscli-bundle.zip && \ ./awscli-bundle/install -i /usr/local/aws -b /usr/local/bin/aws && \ cd ~ RUN cd /tmp && \ wget ${third_party_deps_path}/hatools/hatools-2.14-1.1.el6.x86_64.rpm && \ yum -y install hatools-2.14-1.1.el6.x86_64.rpm && \ rm -rf hatools-2.14-1.1.el6.x86_64.rpm && \ cd ~ ================================================ FILE: deploy/packaging/docker/geowave-centos7-rpm-build.dockerfile ================================================ FROM centos:centos7 RUN yum -y install asciidoc asciidoctor rpm-build unzip xmlto zip wget \ ruby-devel autoconf gcc make rpm-build rubygems automake \ java-1.8.0-openjdk java-1.8.0-openjdk-devel libtool && \ yum clean all && \ cd /tmp && curl "https://s3.amazonaws.com/aws-cli/awscli-bundle.zip" -o "awscli-bundle.zip" && \ unzip awscli-bundle.zip && \ ./awscli-bundle/install -i /usr/local/aws -b /usr/local/bin/aws && \ cd ~ RUN gem install --no-ri --no-rdoc fpm ================================================ FILE: deploy/packaging/docker/init.sh ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- #!/bin/bash # # GeoWave Initialization Script # # Clean any classes generated by a previous vendor build to avoid binary incompatibilities mvn clean mkdir -p $WORKSPACE/deploy/target export GEOWAVE_VERSION_STR="$(mvn -q -Dexec.executable="echo" -Dexec.args='${project.version}' --non-recursive -f $WORKSPACE/pom.xml exec:exec)" export GEOWAVE_VERSION="$(echo ${GEOWAVE_VERSION_STR} | sed -e 's/"//g' -e 's/-SNAPSHOT//g')" export GEOWAVE_RPM_VERSION="$(echo ${GEOWAVE_VERSION} | sed -e 's/"//g' -e 's/-/~/g')" echo $GEOWAVE_VERSION > $WORKSPACE/deploy/target/version.txt echo $GEOWAVE_RPM_VERSION > $WORKSPACE/deploy/target/rpm_version.txt if [[ "$GEOWAVE_VERSION_STR" =~ "-SNAPSHOT" ]] then #its a dev/latest build echo "dev" > $WORKSPACE/deploy/target/build-type.txt echo "latest" > $WORKSPACE/deploy/target/version-url.txt else #its a release echo "release" > $WORKSPACE/deploy/target/build-type.txt echo $GEOWAVE_VERSION_STR > $WORKSPACE/deploy/target/version-url.txt fi ================================================ FILE: deploy/packaging/docker/publish/publish-common-rpm.sh ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- #!/bin/bash # # For use by rpm building jenkins jobs. Handles job race conditions and # reindexing the existing rpm repo # # This script runs with a volume mount to $WORKSPACE, this ensures that any signal failure will leave all of the files $WORKSPACE editable by the host trap 'chmod -R 777 $WORKSPACE/deploy/packaging/rpm' EXIT trap 'chmod -R 777 $WORKSPACE/deploy/packaging/rpm && exit' ERR # Get the version GEOWAVE_VERSION=$(cat $WORKSPACE/deploy/target/version.txt) BUILD_TYPE=$(cat $WORKSPACE/deploy/target/build-type.txt) GEOWAVE_VERSION_URL=$(cat $WORKSPACE/deploy/target/version-url.txt) echo "---------------------------------------------------------------" echo " Publishing GeoWave Common RPMs" echo "GEOWAVE_VERSION=${GEOWAVE_VERSION}" echo "GEOWAVE_VERSION_URL=${GEOWAVE_VERSION_URL}" echo "BUILD_TYPE=${BUILD_TYPE}" echo "TIME_TAG=${TIME_TAG}" echo "GEOWAVE_BUCKET=${GEOWAVE_BUCKET}" echo "GEOWAVE_RPM_BUCKET=${GEOWAVE_RPM_BUCKET}" echo "---------------------------------------------------------------" echo "###### Build Variables" set -x declare -A ARGS while [ $# -gt 0 ]; do # Trim the first two chars off of the arg name ex: --foo case "$1" in *) NAME="${1:2}"; shift; ARGS[$NAME]="$1" ;; esac shift done if [[ ${BUILD_TYPE} = "dev" ]] then TIME_TAG_STR="-${TIME_TAG}" fi echo '###### Build tarball distribution archive' # Copy the SRPM into an extract directory mkdir -p ${WORKSPACE}/${ARGS[buildroot]}/TARBALL/geowave cp ${WORKSPACE}/${ARGS[buildroot]}/SRPMS/*.rpm ${WORKSPACE}/${ARGS[buildroot]}/TARBALL/geowave cd ${WORKSPACE}/${ARGS[buildroot]}/TARBALL/geowave # Extract all the files rpm2cpio *.rpm | cpio -idmv # Push our compiled docs and scripts to S3 if aws command has been installed and version url is defined if command -v aws >/dev/null 2>&1 ; then if [[ ! -z "$GEOWAVE_VERSION_URL" ]]; then echo '###### Cleaning and copying documentation to S3' aws s3 rm --recursive s3://${GEOWAVE_BUCKET}/${GEOWAVE_VERSION_URL}/docs/ --quiet aws s3 cp --acl public-read --recursive ${WORKSPACE}/target/site/ s3://${GEOWAVE_BUCKET}/${GEOWAVE_VERSION_URL}/docs/ --quiet echo '###### Cleaning and copying scripts to S3' ${WORKSPACE}/deploy/packaging/emr/generate-emr-scripts.sh --buildtype ${BUILD_TYPE} --version ${GEOWAVE_VERSION} --workspace ${WORKSPACE} --bucket ${GEOWAVE_BUCKET} --rpmbucket ${GEOWAVE_RPM_BUCKET} ${WORKSPACE}/deploy/packaging/sandbox/generate-sandbox-scripts.sh --version ${GEOWAVE_VERSION} --workspace ${WORKSPACE} --bucket ${GEOWAVE_BUCKET} aws s3 rm --recursive s3://${GEOWAVE_BUCKET}/${GEOWAVE_VERSION_URL}/scripts/ --quiet aws s3 cp --acl public-read --recursive ${WORKSPACE}/deploy/packaging/emr/generated/ s3://${GEOWAVE_BUCKET}/${GEOWAVE_VERSION_URL}/scripts/emr/ --quiet aws s3 cp --acl public-read --recursive ${WORKSPACE}/deploy/packaging/sandbox/generated/ s3://${GEOWAVE_BUCKET}/${GEOWAVE_VERSION_URL}/scripts/sandbox/ --quiet if [[ -d ${WORKSPACE}/deploy/target/install4j-output ]]; then echo '###### Copying standalone installers to S3' aws s3 cp --acl public-read --recursive ${WORKSPACE}/deploy/target/install4j-output/ s3://${GEOWAVE_BUCKET}/${GEOWAVE_VERSION_URL}/standalone-installers/ --quiet fi aws s3 cp --acl public-read --recursive ${WORKSPACE}/examples/data/notebooks/ s3://${GEOWAVE_BUCKET}-notebooks/${GEOWAVE_VERSION_URL}/notebooks/ --quiet # Copy built pyspark package to lib directory aws s3 cp --acl public-read ${WORKSPACE}/analytics/pyspark/target/geowave_pyspark-${GEOWAVE_VERSION}.tar.gz s3://${GEOWAVE_BUCKET}/${GEOWAVE_VERSION_URL}/lib/geowave_pyspark-${GEOWAVE_VERSION}.tar.gz echo '###### Cleaning and copying documentation to S3' aws s3 sync s3://${GEOWAVE_RPM_BUCKET}/${BUILD_TYPE}/${ARGS[arch]}/ ${LOCAL_REPO_DIR}/${ARGS[repo]}/${BUILD_TYPE}/${ARGS[arch]}/ --delete else echo '###### Skipping publish to S3: GEOWAVE_VERSION_URL not defined' fi else echo '###### Skipping publish to S3: AWS command not found' fi # Archive things, copy some artifacts up to AWS if available and get rid of our temp area cd .. tar cvzf geowave-${GEOWAVE_VERSION}${TIME_TAG_STR}.tar.gz geowave rm -rf geowave echo '###### Copy rpm to repo and reindex' mkdir -p ${LOCAL_REPO_DIR}/${ARGS[repo]}/${BUILD_TYPE}/{SRPMS,TARBALL,${ARGS[arch]}}/ cp -R ${WORKSPACE}/${ARGS[buildroot]}/RPMS/${ARGS[arch]}/*.rpm ${LOCAL_REPO_DIR}/${ARGS[repo]}/${BUILD_TYPE}/${ARGS[arch]}/ cp -fR ${WORKSPACE}/${ARGS[buildroot]}/SRPMS/*.rpm ${LOCAL_REPO_DIR}/${ARGS[repo]}/${BUILD_TYPE}/SRPMS/ cp -fR ${WORKSPACE}/${ARGS[buildroot]}/TARBALL/*.tar.gz ${LOCAL_REPO_DIR}/${ARGS[repo]}/${BUILD_TYPE}/TARBALL/ # When several processes run createrepo concurrently they will often fail with problems trying to # access index files that are in the process of being overwritten by the other processes. The command # below uses two utilities that will cause calls to createrepo (from this script) to wait to gain an # exclusive file lock before proceeding with a maximum wait time set at 10 minutes before they give # up and fail. the ha* commands are from the hatools rpm available via EPEL. hatimerun -t 10:00 \ halockrun -c ${LOCK_DIR}/rpmrepo \ createrepo --update --workers 2 ${LOCAL_REPO_DIR}/${ARGS[repo]}/${BUILD_TYPE}/${ARGS[arch]}/ ================================================ FILE: deploy/packaging/docker/publish/publish-vendor-rpm.sh ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- #!/bin/bash # # For use by rpm building jenkins jobs. Handles job race conditions and # reindexing the existing rpm repo # # This script runs with a volume mount to $WORKSPACE, this ensures that any signal failure will leave all of the files $WORKSPACE editable by the host trap 'chmod -R 777 $WORKSPACE/deploy/packaging/rpm' EXIT trap 'chmod -R 777 $WORKSPACE/deploy/packaging/rpm && exit' ERR # Get the version GEOWAVE_VERSION=$(cat $WORKSPACE/deploy/target/version.txt) BUILD_TYPE=$(cat $WORKSPACE/deploy/target/build-type.txt) VENDOR_VERSION=apache if [ ! -z "$BUILD_ARGS" ]; then VENDOR_VERSION=$(echo "$BUILD_ARGS" | grep -oi "vendor.version=\w*" | sed "s/vendor.version=//g") fi echo "---------------------------------------------------------------" echo " Publishing GeoWave Vendor-specific RPMs" echo "GEOWAVE_VERSION=${GEOWAVE_VERSION}" echo "TIME_TAG=${TIME_TAG}" echo "VENDOR_VERSION=${VENDOR_VERSION}" echo "BUILD_TYPE=${BUILD_TYPE}" echo "BUILD_ARGS=${BUILD_ARGS}" echo "---------------------------------------------------------------" set -x echo '###### Build Variables' declare -A ARGS while [ $# -gt 0 ]; do # Trim the first two chars off of the arg name ex: --foo case "$1" in *) NAME="${1:2}"; shift; ARGS[$NAME]="$1" ;; esac shift done if [ ${BUILD_TYPE} = "dev" ] then TIME_TAG_STR="-${TIME_TAG}" fi echo '###### Build tarball distribution archive' # Copy the SRPM into an extract directory mkdir -p ${WORKSPACE}/${ARGS[buildroot]}/TARBALL/geowave cp ${WORKSPACE}/${ARGS[buildroot]}/SRPMS/*.rpm ${WORKSPACE}/${ARGS[buildroot]}/TARBALL/geowave cd ${WORKSPACE}/${ARGS[buildroot]}/TARBALL/geowave # Extract all the files rpm2cpio *.rpm | cpio -idmv # Remove what we don't want to distribute within the tarball rm -f *.rpm *.xml *.spec # Extract the build metadata from one of the artifacts unzip -p geowave-accumulo-${GEOWAVE_VERSION}-${VENDOR_VERSION}.jar build.properties > build.properties # Archive things, copy some artifacts up to AWS if available and get rid of our temp area cd .. tar cvzf geowave-${GEOWAVE_VERSION}-${VENDOR_VERSION}${TIME_TAG_STR}.tar.gz geowave rm -rf geowave echo '###### Copy rpm to repo and reindex' mkdir -p ${LOCAL_REPO_DIR}/${ARGS[repo]}/${BUILD_TYPE}/{SRPMS,TARBALL,${ARGS[arch]}}/ mkdir -p ${LOCAL_REPO_DIR}/${ARGS[repo]}/${BUILD_TYPE}-jars/JAR/ cp -R ${WORKSPACE}/${ARGS[buildroot]}/RPMS/${ARGS[arch]}/*.rpm ${LOCAL_REPO_DIR}/${ARGS[repo]}/${BUILD_TYPE}/${ARGS[arch]}/ cp -fR ${WORKSPACE}/${ARGS[buildroot]}/SRPMS/*.rpm ${LOCAL_REPO_DIR}/${ARGS[repo]}/${BUILD_TYPE}/SRPMS/ cp -fR ${WORKSPACE}/${ARGS[buildroot]}/TARBALL/*.tar.gz ${LOCAL_REPO_DIR}/${ARGS[repo]}/${BUILD_TYPE}/TARBALL/ pushd ${WORKSPACE}/${ARGS[buildroot]}/SOURCES/ for i in *.jar; do cp "${i}" ${LOCAL_REPO_DIR}/${ARGS[repo]}/${BUILD_TYPE}-jars/JAR/"${i%.jar}${TIME_TAG_STR}.jar" ; done for i in *.war; do cp "${i}" ${LOCAL_REPO_DIR}/${ARGS[repo]}/${BUILD_TYPE}-jars/JAR/"${i%.war}${TIME_TAG_STR}.war" ; done popd # When several processes run createrepo concurrently they will often fail with problems trying to # access index files that are in the process of being overwritten by the other processes. The command # below uses two utilities that will cause calls to createrepo (from this script) to wait to gain an # exclusive file lock before proceeding with a maximum wait time set at 10 minutes before they give # up and fail. the ha* commands are from the hatools rpm available via EPEL. hatimerun -t 10:00 \ halockrun -c ${LOCK_DIR}/rpmrepo \ createrepo --update --workers 2 ${LOCAL_REPO_DIR}/${ARGS[repo]}/${BUILD_TYPE}/${ARGS[arch]}/ ================================================ FILE: deploy/packaging/docker/pull-s3-caches.sh ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- #!/bin/bash # If we've not specifically disabled and there is no current Maven repo # pull a cache from S3 so the first run won't take forever if [ -z $NO_MAVEN_INIT ] && [ ! -d $1/.m2 ]; then echo "Downloading Maven Cache ..." MVN_CACHE_BASE=https://s3.amazonaws.com/geowave-deploy/cache-bundle CACHE_FILE=mvn-repo-cache-20170810.tar.gz pushd $1 curl -O $MVN_CACHE_BASE/$CACHE_FILE tar xf $1/$CACHE_FILE rm -f $1/$CACHE_FILE popd #if run in docker, do the following: #type getenforce >/dev/null 2>&1 && getenforce >/dev/null 2>&1 && chcon -Rt svirt_sandbox_file_t $1/.m2; echo "Finished Downloading Maven Cache ..." fi ================================================ FILE: deploy/packaging/emr/README.md ================================================ #### GeoWave on EMR The configuration files in this directory can be used to deploy GeoWave to the Amazon Elastic MapReduce (EMR) service which allows you to be able to quickly stand up a cluster with Accumulo and GeoWave pre-installed. There are tokens within the template. Running generate-emr-scripts.sh will take the template and generate a set of scripts, replacing tokens appropriately (required parameters to that scipt are --buildtype (either dev or release), --version, --workspace (path to Jenkins job workspace), and --bucket (custom bucket with default being geowave)). The resultant scripts will be in a 'generated' directory. The GeoWave documentation has instructions for how to deploy and use these file in the Running from EMR section. ================================================ FILE: deploy/packaging/emr/generate-emr-scripts.sh ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- #!/bin/bash # # This will take the template and generate a set of scripts, replacing tokens appropriately # required parameters are --buildtype (dev or release), --version, --workspace, --rpmbucket, and --bucket DATASTORES=( "accumulo" "hbase" "cassandra" ) declare -A ARGS while [ $# -gt 0 ]; do case "$1" in *) NAME="${1:2}"; shift; ARGS[$NAME]="$1" ;; esac shift done if [[ "${ARGS[buildtype]}" = "dev" ]] then #its a dev/latest build GEOWAVE_REPO_RPM_TOKEN=geowave-repo-dev-1.0-3.noarch.rpm GEOWAVE_VERSION_URL_TOKEN=latest GEOWAVE_REPO_NAME_TOKEN=geowave-dev GEOWAVE_REPO_BASE_URL_TOKEN=http://s3.amazonaws.com/${ARGS[rpmbucket]}/dev/noarch/ else #its a release GEOWAVE_REPO_RPM_TOKEN=geowave-repo-1.0-3.noarch.rpm GEOWAVE_VERSION_URL_TOKEN="${ARGS[version]}" GEOWAVE_REPO_NAME_TOKEN=geowave GEOWAVE_REPO_BASE_URL_TOKEN=http://s3.amazonaws.com/${ARGS[rpmbucket]}/release/noarch/ fi GEOWAVE_BUCKET_TOKEN=${ARGS[bucket]} TARGET_ROOT=${ARGS[workspace]}/deploy/packaging/emr/generated TEMPLATE_ROOT=${ARGS[workspace]}/deploy/packaging/emr/template SLD_DIR=${ARGS[workspace]}/examples/data/slds mkdir -p $TARGET_ROOT/quickstart # temporarily cp templates to replace common tokens and then cp it to data store locations and rm it here cp $TEMPLATE_ROOT/bootstrap-geowave.sh.template $TEMPLATE_ROOT/bootstrap-geowave.sh cp $TEMPLATE_ROOT/geowave-install-lib.sh.template $TEMPLATE_ROOT/geowave-install-lib.sh cp $TEMPLATE_ROOT/quickstart/geowave-env.sh.template $TARGET_ROOT/quickstart/geowave-env.sh cp $TEMPLATE_ROOT/jupyter/bootstrap-jupyter.sh.template $TEMPLATE_ROOT/bootstrap-jupyter.sh cp $TEMPLATE_ROOT/jupyter/create-configure-kernel.sh.template $TEMPLATE_ROOT/create-configure-kernel.sh cp $TEMPLATE_ROOT/jupyter/bootstrap-jupyterhub.sh.template $TEMPLATE_ROOT/bootstrap-jupyterhub.sh cp $TEMPLATE_ROOT/bootstrap-zeppelin.sh.template $TEMPLATE_ROOT/bootstrap-zeppelin.sh cp $TEMPLATE_ROOT/configure-zeppelin.sh.template $TEMPLATE_ROOT/configure-zeppelin.sh # copy permanent resources that don't need a template cp $TEMPLATE_ROOT/quickstart/setup-geoserver-geowave-workspace.sh $TARGET_ROOT/quickstart/setup-geoserver-geowave-workspace.sh cp $SLD_DIR/*.sld $TARGET_ROOT/quickstart # replace version token first sed -i -e s/'$GEOWAVE_VERSION_TOKEN'/${ARGS[version]}/g $TEMPLATE_ROOT/bootstrap-geowave.sh sed -i -e s/'$GEOWAVE_VERSION_URL_TOKEN'/${GEOWAVE_VERSION_URL_TOKEN}/g $TEMPLATE_ROOT/bootstrap-geowave.sh sed -i -e s/'$GEOWAVE_REPO_RPM_TOKEN'/${GEOWAVE_REPO_RPM_TOKEN}/g $TEMPLATE_ROOT/bootstrap-geowave.sh sed -i -e s/'$GEOWAVE_BUCKET_TOKEN'/${GEOWAVE_BUCKET_TOKEN}/g $TEMPLATE_ROOT/bootstrap-geowave.sh sed -i -e s~'$GEOWAVE_REPO_BASE_URL_TOKEN'~${GEOWAVE_REPO_BASE_URL_TOKEN}~g $TEMPLATE_ROOT/geowave-install-lib.sh sed -i -e s/'$GEOWAVE_REPO_NAME_TOKEN'/${GEOWAVE_REPO_NAME_TOKEN}/g $TEMPLATE_ROOT/geowave-install-lib.sh sed -i -e s/'$GEOWAVE_VERSION_TOKEN'/${ARGS[version]}/g $TARGET_ROOT/quickstart/geowave-env.sh # replacing tokens for jupyter bootstrap scripts sed -i -e s/'$GEOWAVE_VERSION_TOKEN'/${ARGS[version]}/g $TEMPLATE_ROOT/bootstrap-jupyter.sh sed -i -e s/'$GEOWAVE_VERSION_URL_TOKEN'/${GEOWAVE_VERSION_URL_TOKEN}/g $TEMPLATE_ROOT/bootstrap-jupyter.sh sed -i -e s/'$GEOWAVE_BUCKET_TOKEN'/${GEOWAVE_BUCKET_TOKEN}/g $TEMPLATE_ROOT/bootstrap-jupyter.sh sed -i -e s/'$GEOWAVE_NOTEBOOKS_BUCKET_TOKEN'/${GEOWAVE_BUCKET_TOKEN}-notebooks/g $TEMPLATE_ROOT/bootstrap-jupyter.sh sed -i -e s/'$GEOWAVE_VERSION_TOKEN'/${ARGS[version]}/g $TEMPLATE_ROOT/create-configure-kernel.sh sed -i -e s/'$GEOWAVE_VERSION_TOKEN'/${ARGS[version]}/g $TEMPLATE_ROOT/bootstrap-jupyterhub.sh sed -i -e s/'$GEOWAVE_VERSION_URL_TOKEN'/${GEOWAVE_VERSION_URL_TOKEN}/g $TEMPLATE_ROOT/bootstrap-jupyterhub.sh sed -i -e s/'$GEOWAVE_BUCKET_TOKEN'/${GEOWAVE_BUCKET_TOKEN}/g $TEMPLATE_ROOT/bootstrap-jupyterhub.sh sed -i -e s/'$GEOWAVE_NOTEBOOKS_BUCKET_TOKEN'/${GEOWAVE_BUCKET_TOKEN}-notebooks/g $TEMPLATE_ROOT/bootstrap-jupyterhub.sh sed -i -e s/'$GEOWAVE_VERSION_TOKEN'/${ARGS[version]}/g $TEMPLATE_ROOT/bootstrap-zeppelin.sh sed -i -e s/'$GEOWAVE_VERSION_URL_TOKEN'/${GEOWAVE_VERSION_URL_TOKEN}/g $TEMPLATE_ROOT/bootstrap-zeppelin.sh sed -i -e s/'$GEOWAVE_BUCKET_TOKEN'/${GEOWAVE_BUCKET_TOKEN}/g $TEMPLATE_ROOT/bootstrap-zeppelin.sh sed -i -e s/'$GEOWAVE_NOTEBOOKS_BUCKET_TOKEN'/${GEOWAVE_BUCKET_TOKEN}-notebooks/g $TEMPLATE_ROOT/configure-zeppelin.sh sed -i -e s/'$GEOWAVE_VERSION_TOKEN'/${ARGS[version]}/g $TEMPLATE_ROOT/configure-zeppelin.sh sed -i -e s/'$GEOWAVE_REPO_RPM_TOKEN'/${ARGS[rpmbucket]}/g $TEMPLATE_ROOT/configure-zeppelin.sh for datastore in "${DATASTORES[@]}" do mkdir -p $TARGET_ROOT/quickstart/$datastore mkdir -p $TARGET_ROOT/$datastore cp $TEMPLATE_ROOT/bootstrap-geowave.sh $TARGET_ROOT/$datastore/bootstrap-geowave.sh sed -e '/$DATASTORE_BOOTSTRAP_TOKEN/ {' -e 'r '$TEMPLATE_ROOT/$datastore/DATASTORE_BOOTSTRAP_TOKEN'' -e 'd' -e '}' -i $TARGET_ROOT/$datastore/bootstrap-geowave.sh sed -e '/$DATASTORE_CONFIGURE_GEOWAVE_TOKEN/ {' -e 'r '$TEMPLATE_ROOT/$datastore/DATASTORE_CONFIGURE_GEOWAVE_TOKEN'' -e 'd' -e '}' -i $TARGET_ROOT/$datastore/bootstrap-geowave.sh sed -i -e s/'$DATASTORE_TOKEN'/$datastore/g $TARGET_ROOT/$datastore/bootstrap-geowave.sh cp $TARGET_ROOT/$datastore/bootstrap-geowave.sh $TARGET_ROOT/quickstart/$datastore/bootstrap-geowave.sh sed -i -e s/'$QUICKSTART_BOOTSTRAP_TOKEN'//g $TARGET_ROOT/$datastore/bootstrap-geowave.sh sed -e '/$QUICKSTART_BOOTSTRAP_TOKEN/ {' -e 'r '$TEMPLATE_ROOT/quickstart/QUICKSTART_BOOTSTRAP_TOKEN'' -e 'd' -e '}' -i $TARGET_ROOT/quickstart/$datastore/bootstrap-geowave.sh sed -i -e s/'$GEOWAVE_BUCKET_TOKEN'/$GEOWAVE_BUCKET_TOKEN/g $TARGET_ROOT/quickstart/$datastore/bootstrap-geowave.sh sed -i -e s/'$GEOWAVE_VERSION_URL_TOKEN'/$GEOWAVE_VERSION_URL_TOKEN/g $TARGET_ROOT/quickstart/$datastore/bootstrap-geowave.sh sed -i -e s/'$DATASTORE_TOKEN'/$datastore/g $TARGET_ROOT/quickstart/$datastore/bootstrap-geowave.sh cp $TEMPLATE_ROOT/geowave-install-lib.sh $TARGET_ROOT/$datastore/geowave-install-lib.sh sed -i -e s/'$DATASTORE_TOKEN'/${datastore}/g $TARGET_ROOT/$datastore/geowave-install-lib.sh sed -e '/$DATASTORE_PUPPET_TOKEN/ {' -e 'r '$TEMPLATE_ROOT/$datastore/DATASTORE_PUPPET_TOKEN'' -e 'd' -e '}' -i $TARGET_ROOT/$datastore/geowave-install-lib.sh sed -e '/$DATASTORE_LIB_TOKEN/ {' -e 'r '$TEMPLATE_ROOT/$datastore/DATASTORE_LIB_TOKEN'' -e 'd' -e '}' -i $TARGET_ROOT/$datastore/geowave-install-lib.sh cp $TEMPLATE_ROOT/quickstart/ingest-and-kde-gdelt.sh.template $TARGET_ROOT/quickstart/$datastore/ingest-and-kde-gdelt.sh sed -e '/$DATASTORE_PARAMS_TOKEN/ {' -e 'r '$TEMPLATE_ROOT/$datastore/DATASTORE_PARAMS_TOKEN'' -e 'd' -e '}' -i $TARGET_ROOT/quickstart/$datastore/ingest-and-kde-gdelt.sh done # Copy jupyter additions to separate generated folder # This will put scripts into separate jupyter folder on s3 when published. mkdir -p $TARGET_ROOT/jupyter # copy permanent resources that don't need a template cp $TEMPLATE_ROOT/jupyter/install-conda.sh $TARGET_ROOT/jupyter/install-conda.sh cp $TEMPLATE_ROOT/jupyter/jupyterhub_config.py $TARGET_ROOT/jupyter/jupyterhub_config.py cp $TEMPLATE_ROOT/jupyter/pre-spawn.sh $TARGET_ROOT/jupyter/pre-spawn.sh cp $TEMPLATE_ROOT/jupyter/gw-base.yml $TARGET_ROOT/jupyter/gw-base.yml cp $TEMPLATE_ROOT/bootstrap-jupyter.sh $TARGET_ROOT/jupyter/bootstrap-jupyter.sh cp $TEMPLATE_ROOT/create-configure-kernel.sh $TARGET_ROOT/jupyter/create-configure-kernel.sh cp $TEMPLATE_ROOT/bootstrap-jupyterhub.sh $TARGET_ROOT/jupyter/bootstrap-jupyterhub.sh # Copy zeppelin additions to separate generated folder # This will put scripts into separate zeppelin folder on s3 when published. mkdir -p $TARGET_ROOT/zeppelin cp $TEMPLATE_ROOT/bootstrap-zeppelin.sh $TARGET_ROOT/zeppelin/bootstrap-zeppelin.sh cp $TEMPLATE_ROOT/configure-zeppelin.sh $TARGET_ROOT/zeppelin/configure-zeppelin.sh # clean up temporary templates rm $TEMPLATE_ROOT/bootstrap-geowave.sh rm $TEMPLATE_ROOT/geowave-install-lib.sh rm $TEMPLATE_ROOT/bootstrap-jupyter.sh rm $TEMPLATE_ROOT/create-configure-kernel.sh rm $TEMPLATE_ROOT/bootstrap-jupyterhub.sh rm $TEMPLATE_ROOT/bootstrap-zeppelin.sh rm $TEMPLATE_ROOT/configure-zeppelin.sh ================================================ FILE: deploy/packaging/emr/template/accumulo/DATASTORE_BOOTSTRAP_TOKEN ================================================ # Get Accumulo running os_tweaks && configure_zookeeper create_accumulo_user && install_accumulo && configure_accumulo ================================================ FILE: deploy/packaging/emr/template/accumulo/DATASTORE_CONFIGURE_GEOWAVE_TOKEN ================================================ if is_master ; then configure_geowave_accumulo fi ================================================ FILE: deploy/packaging/emr/template/accumulo/DATASTORE_LIB_TOKEN ================================================ #!/usr/bin/env bash # # Installing additional components on an EMR node depends on several config files # controlled by the EMR framework which may affect the is_master and configure_zookeeper # functions at some point in the future. I've grouped each unit of work into a function # with a descriptive name to help with understanding and maintainability # # You can change these but there is probably no need # Accumulo USER=accumulo # NOTE: This password, the Accumulo instance secret and the geoserver password are left at # The default settings. The default EMR Security group setting only allows ssh/22 open to # external access so access to internal consoles and web UIs has to be done over SSH. # At some point in the future when this is revisited remember that nodes can be added to an # EMR at any point after creation so the password set during the initial spin-up would have # to be persisted somewhere and provided to the newly created nodes at some later date. USERPW=secret # TODO: Can't change until trace.password in accumulo-site.xml is updated ACCUMULO_VERSION=2.0.1 INSTALL_DIR=/opt ACCUMULO_DOWNLOAD_BASE_URL=https://archive.apache.org/dist/accumulo ACCUMULO_INSTANCE=accumulo ACCUMULO_HOME="${INSTALL_DIR}/accumulo" HDFS_USER=hdfs ZK_IPADDR= # Using zookeeper packaged by Apache BigTop for ease of installation configure_zookeeper() { if is_master ; then sudo yum -y install zookeeper-server # EMR 4.3.0 includes Apache Bigtop.repo config # EMR uses Amazon Linux which uses Upstart # EMR 5.30 uses systemctl and earlier versions use initctl if ! command -v initctl &> /dev/null then sudo systemctl start zookeeper-server else sudo initctl start zookeeper-server fi # Zookeeper installed on this node, record internal ip from instance metadata ZK_IPADDR=$(curl http://169.254.169.254/latest/meta-data/local-ipv4) else # Zookeeper intalled on master node, parse config file to find EMR master node ZK_IPADDR=$(xmllint --xpath "//property[name='yarn.resourcemanager.hostname']/value/text()" /etc/hadoop/conf/yarn-site.xml) fi } create_accumulo_user() { id $USER if [ $? != 0 ]; then sudo adduser $USER sudo sh -c "echo '$USERPW' | passwd $USER --stdin" fi } install_accumulo() { wait_until_hdfs_is_available ARCHIVE_FILE="accumulo-${ACCUMULO_VERSION}-bin.tar.gz" LOCAL_ARCHIVE="${INSTALL_DIR}/${ARCHIVE_FILE}" sudo sh -c "curl '${ACCUMULO_DOWNLOAD_BASE_URL}/${ACCUMULO_VERSION}/${ARCHIVE_FILE}' > $LOCAL_ARCHIVE" sudo sh -c "tar xzf $LOCAL_ARCHIVE -C $INSTALL_DIR" sudo rm -f $LOCAL_ARCHIVE sudo ln -s "${INSTALL_DIR}/accumulo-${ACCUMULO_VERSION}" "${INSTALL_DIR}/accumulo" sudo chown -R accumulo:accumulo "${INSTALL_DIR}/accumulo-${ACCUMULO_VERSION}" sudo sh -c "echo 'export PATH=$PATH:${INSTALL_DIR}/accumulo/bin' > /etc/profile.d/accumulo.sh" } configure_accumulo() { sudo sed -i "s/localhost:2181/${ZK_IPADDR}:2181/" $INSTALL_DIR/accumulo/conf/accumulo.properties sudo sed -i "s/localhost:2181/${ZK_IPADDR}:2181/" $INSTALL_DIR/accumulo/conf/accumulo-client.properties sudo sed -i "s/instance.name=/instance.name=${ACCUMULO_INSTANCE}/" $INSTALL_DIR/accumulo/conf/accumulo-client.properties sudo sed -i "s/localhost:8020/${ZK_IPADDR}:8020/" $INSTALL_DIR/accumulo/conf/accumulo.properties sudo sed -i "s/\${LOG4J_JAR}/\${LOG4J_JAR}:\/usr\/lib\/hadoop\/lib\/*:\/usr\/lib\/hadoop\/client\/*/" $INSTALL_DIR/accumulo/bin/accumulo # Crazy escaping to get this shell to fill in values but root to write out the file export ENV_VARS="export HADOOP_USER_NAME=accumulo; export ACCUMULO_HOME=$INSTALL_DIR/accumulo; export HADOOP_HOME=/usr/lib/hadoop; export ACCUMULO_LOG_DIR=$INSTALL_DIR/accumulo/logs; export JAVA_HOME=/usr/lib/jvm/java; export ZOOKEEPER_HOME=/usr/lib/zookeeper; export HADOOP_PREFIX=/usr/lib/hadoop; export HADOOP_CONF_DIR=/etc/hadoop/conf" sudo sed -i "29 a ${ENV_VARS}" $INSTALL_DIR/accumulo/conf/accumulo-env.sh sudo chown -R $USER:$USER $INSTALL_DIR/accumulo if is_master ; then sudo sed -i "s/share\/hadoop\/client/client/" $INSTALL_DIR/accumulo/conf/accumulo-env.sh sudo -u $HDFS_USER hadoop fs -chmod 777 /user # This is more for Spark than Accumulo but put here for expediency sudo -u $HDFS_USER hadoop fs -mkdir /accumulo sudo -u $HDFS_USER hadoop fs -chown accumulo:accumulo /accumulo sudo sh -c "hostname > $INSTALL_DIR/accumulo/conf/monitor" sudo sh -c "hostname > $INSTALL_DIR/accumulo/conf/gc" sudo sh -c "hostname > $INSTALL_DIR/accumulo/conf/tracers" sudo sh -c "hostname > $INSTALL_DIR/accumulo/conf/managers" ## accumulo deprecated masters in place of manages in 2.0 but accumulo scripts seem inconsistent with using managers sudo sh -c "hostname > $INSTALL_DIR/accumulo/conf/masters" sudo sh -c "echo > $INSTALL_DIR/accumulo/conf/tservers" sudo -u $USER $INSTALL_DIR/accumulo/bin/accumulo init --clear-instance-name --instance-name $ACCUMULO_INSTANCE --password $USERPW else sudo sed -i "s/share\/hadoop\/client\/\*/*:\${HADOOP_HOME}\/*:\${HADOOP_HOME}\/lib\/*:\/usr\/lib\/hadoop-hdfs\/*/" $INSTALL_DIR/accumulo/conf/accumulo-env.sh sudo sh -c "echo $ZK_IPADDR > $INSTALL_DIR/accumulo/conf/monitor" sudo sh -c "echo $ZK_IPADDR > $INSTALL_DIR/accumulo/conf/gc" sudo sh -c "echo $ZK_IPADDR > $INSTALL_DIR/accumulo/conf/tracers" sudo sh -c "echo $ZK_IPADDR > $INSTALL_DIR/accumulo/conf/managers" ## accumulo deprecated masters in place of manages in 2.0 but accumulo scripts seem inconsistent with using managers sudo sh -c "echo $ZK_IPADDR > $INSTALL_DIR/accumulo/conf/masters" sudo sh -c "hostname > $INSTALL_DIR/accumulo/conf/tservers" fi # EMR starts worker instances first so there will be timing issues # Test to ensure it's safe to continue before attempting to start things up if is_master ; then with_backoff is_accumulo_initialized else with_backoff is_accumulo_available fi sudo -u $USER $INSTALL_DIR/accumulo/bin/accumulo-cluster start-here } configure_geowave_accumulo(){ # Configure accumulo user and namespace export PATH=${PATH}:/opt/accumulo/bin cat < true, ================================================ FILE: deploy/packaging/emr/template/bootstrap-geowave.sh.template ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- #!/usr/bin/env bash # # Bootstrap a GeoWave cluster node # # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # # Config Settings you might want to update # GeoWave GEOWAVE_REPO_RPM=$GEOWAVE_REPO_RPM_TOKEN # TODO: Should have a prod->latest rpm GEOWAVE_VERSION=$GEOWAVE_VERSION_TOKEN HTTP_PORT='8000' GRPC_PORT='8980' AJP_PORT='8010' SHUTDOWN_PORT='8006' PUBLIC_DNS=$(curl http://169.254.169.254/latest/meta-data/public-hostname) PUBLIC_DNS_NOT_FOUND=`echo "${PUBLIC_DNS}" | grep '404 - Not Found'` if [ ! -z "$PUBLIC_DNS_NOT_FOUND" ]; then PUBLIC_DNS=$(curl http://169.254.169.254/latest/meta-data/hostname) fi GEOSERVER_MEMORY="-Xmx1g" # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # I've externalized commands into library functions for clarity, download and source if [ ! -f /tmp/geowave-install-lib.sh ]; then aws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/scripts/emr/$DATASTORE_TOKEN/geowave-install-lib.sh /tmp/geowave-install-lib.sh fi source /tmp/geowave-install-lib.sh # The EMR customize hooks run _before_ everything else, so Hadoop is not yet ready THIS_SCRIPT="$(realpath "${BASH_SOURCE[0]}")" RUN_FLAG="${THIS_SCRIPT}.run" # On first boot skip past this script to allow EMR to set up the environment. Set a callback # which will poll for availability of HDFS and then install Accumulo and then GeoWave if [ ! -f "$RUN_FLAG" ]; then touch "$RUN_FLAG" TIMEOUT= is_master && TIMEOUT=3 || TIMEOUT=4 echo "bash -x $(realpath "${BASH_SOURCE[0]}") > /tmp/geowave-install.log" | at now + $TIMEOUT min exit 0 # Bail and let EMR finish initializing fi $DATASTORE_BOOTSTRAP_TOKEN # Install GeoWave components on master node if is_master ; then install_geowave true fi $DATASTORE_CONFIGURE_GEOWAVE_TOKEN $QUICKSTART_BOOTSTRAP_TOKEN ================================================ FILE: deploy/packaging/emr/template/bootstrap-zeppelin.sh.template ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- #!/bin/bash GEOWAVE_VER=${1:-$GEOWAVE_VERSION_TOKEN} #I've externalized commands into library functions for clarity, download and source if [ ! -f /tmp/configure-zeppelin.sh ]; then aws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/scripts/emr/zeppelin/configure-zeppelin.sh /tmp/configure-zeppelin.sh fi source /tmp/configure-zeppelin.sh # The EMR customize hooks run _before_ everything else, so Spark is not yet ready THIS_SCRIPT="$(realpath "${BASH_SOURCE[0]}")" RUN_FLAG="${THIS_SCRIPT}.run" # On first boot skip past this script to allow EMR to set up the environment. Set a callback # which will poll for availability of Zeppelin and then configure the zeppelin environment if [ ! -f "$RUN_FLAG" ]; then touch "$RUN_FLAG" TIMEOUT= is_master && TIMEOUT=3 || TIMEOUT=4 echo "bash -x $(realpath "${BASH_SOURCE[0]}") > /tmp/bootstrap-zeppelin.log" | at now + $TIMEOUT min exit 0 # Bail and let EMR finish initializing fi # These steps have to be done after geowave has been installed if is_master ; then config_zep fi echo "Zeppelin configured" ================================================ FILE: deploy/packaging/emr/template/cassandra/DATASTORE_BOOTSTRAP_TOKEN ================================================ # Bootstrap a Cassandra cluster node # cat << EOF > /tmp/cassandra.repo [cassandra] name=Apache Cassandra baseurl=https://www.apache.org/dist/cassandra/redhat/311x/ gpgcheck=1 repo_gpgcheck=1 gpgkey=https://www.apache.org/dist/cassandra/KEYS EOF sudo mv /tmp/cassandra.repo /etc/yum.repos.d/cassandra.repo sudo mkdir -p /mnt/cassandra/data sudo chmod 777 -R /mnt/cassandra sudo yum -y install cassandra MASTER_IP=$(xmllint --xpath "//property[name='yarn.resourcemanager.hostname']/value/text()" /etc/hadoop/conf/yarn-site.xml) sudo chmod 777 /etc/cassandra/conf/cassandra.yaml echo "auto_bootstrap: false" >> /etc/cassandra/conf/cassandra.yaml sudo sed -i 's/seeds:.*/seeds: \"'${MASTER_IP}'\"/g' /etc/cassandra/conf/cassandra.yaml sudo sed -i 's/listen_address:.*/listen_address:/g' /etc/cassandra/conf/cassandra.yaml sudo sed -i 's/endpoint_snitch:.*/endpoint_snitch: Ec2Snitch/g' /etc/cassandra/conf/cassandra.yaml sudo sed -i 's!/var/lib/cassandra/data!/mnt/cassandra/data!g' /etc/cassandra/conf/cassandra.yaml sudo sed -i 's/.*commitlog_total_space_in_mb:.*/commitlog_total_space_in_mb: 4096/g' /etc/cassandra/conf/cassandra.yaml sudo service cassandra start ================================================ FILE: deploy/packaging/emr/template/cassandra/DATASTORE_PARAMS_TOKEN ================================================ -t cassandra --contactPoints localhost ================================================ FILE: deploy/packaging/emr/template/configure-zeppelin.sh.template ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- #!/bin/bash GEOWAVE_VER=${1:-$GEOWAVE_VERSION_TOKEN} INTIAL_POLLING_INTERVAL=15 # This gets doubled for each attempt up to max_attempts # Parses a configuration file put in place by EMR to determine the role of this node is_master() { if [ $(jq '.isMaster' /mnt/var/lib/info/instance.json) = 'true' ]; then return 0 else return 1 fi } # Avoid race conditions and actually poll for availability of component dependencies # Credit: http://stackoverflow.com/questions/8350942/how-to-re-run-the-curl-command-automatically-when-the-error-occurs/8351489#8351489 with_backoff() { local max_attempts=${ATTEMPTS-5} local timeout=${INTIAL_POLLING_INTERVAL-1} local attempt=0 local exitCode=0 while (( $attempt < $max_attempts )) do set +e "$@" exitCode=$? set -e if [[ $exitCode == 0 ]] then break fi echo "Retrying $@ in $timeout.." 1>&2 sleep $timeout attempt=$(( attempt + 1 )) timeout=$(( timeout * 2 )) done if [[ $exitCode != 0 ]] then echo "Fail: $@ failed to complete after $max_attempts attempts" 1>&2 fi return $exitCode } is_geowave_available() { geowave return $? } wait_until_geowave_is_available() { with_backoff is_geowave_available if [ $? != 0 ]; then echo "GeoWave not available before timeout. Exiting ..." exit 1 fi } config_zep() { wait_until_geowave_is_available #Use jq to remove unnecessary keys GEOWAVE_INSTALL=/usr/local/geowave/tools/geowave-tools-${GEOWAVE_VER}-apache.jar ZEPPELIN_ENV=/usr/lib/zeppelin/conf/zeppelin-env.sh #Add geowave jar to submit --jars option jar_arg='--jars '${GEOWAVE_INSTALL} #Modifying default spark allocation properties to use max memory resources available with HBase YARN_SCHED_MAX=`xmllint --xpath 'string(//property[name="yarn.scheduler.maximum-allocation-mb"]/value)' /etc/hadoop/conf/yarn-site.xml` YARN_CONT_MAX=`xmllint --xpath 'string(//property[name="yarn.nodemanager.resource.memory-mb"]/value)' /etc/hadoop/conf/yarn-site.xml` echo "Yarn Scheduler Max Memory = ${YARN_SCHED_MAX}(MB)" echo "Yarn Container Max Memory = ${YARN_CONT_MAX}(MB)" MAX_MOD=0.9 CONT_MOD=0.8 #Use bc calculator to get new max and container memory and truncate floating result MOD_SCHED_MAX=$(echo "($YARN_SCHED_MAX*$MAX_MOD) / 1" | bc) MOD_CONT_MAX=$(echo "($YARN_CONT_MAX*$CONT_MOD) / 1" | bc) echo "Modified Yarn Scheduler Max Memory = ${MOD_SCHED_MAX}(MB)" echo "Modified Yarn Container Max Memory = ${MOD_CONT_MAX}(MB)" DRIVER_MEM="--driver-memory ${MOD_SCHED_MAX}M " EXECUTOR_MEM="--executor-memory ${MOD_CONT_MAX}M " submit_string=$DRIVER_MEM$EXECUTOR_MEM$jar_arg echo "New Spark Submit Options: ${submit_string}" # add spark submit options to zeppelin env replaceEscaped=$(sed 's/[&/\]/\\&/g' <<<"${submit_string}") sudo sed -i -e s/'$SPARK_SUBMIT_OPTIONS'/"$replaceEscaped"/g $ZEPPELIN_ENV # This was added because Upstart doesn't capture user environment variables before loading zeppelin # Cant use the printf command to insert into priviledged file instead use tee command to append # /dev/null prevents command from writing output to console printf "\nexport HOSTNAME=$HOSTNAME" | sudo tee --append $ZEPPELIN_ENV > /dev/null #TODO REPLACE WITH FINAL JAR LOCATION # Download geowave jar and install at correct location aws s3 cp s3://$GEOWAVE_RPMS_BUCKET_TOKEN/release-jars/JAR/geowave-tools-${GEOWAVE_VER}-apache-accumulo1.7.jar /mnt/tmp/geowave-tools-accumulo17.jar mkdir $HOME/backup/ sudo mv $GEOWAVE_INSTALL $HOME/backup/ sudo mv /mnt/tmp/geowave-tools-accumulo17.jar $GEOWAVE_INSTALL return 0 } ================================================ FILE: deploy/packaging/emr/template/geowave-install-lib.sh.template ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- #!/usr/bin/env bash # # Installing additional components on an EMR node depends on several config files # controlled by the EMR framework which may affect the is_master and configure_zookeeper # functions at some point in the future. I've grouped each unit of work into a function # with a descriptive name to help with understanding and maintainability # INTIAL_POLLING_INTERVAL=15 # This gets doubled for each attempt up to max_attempts # Parses a configuration file put in place by EMR to determine the role of this node is_master() { if [ $(jq '.isMaster' /mnt/var/lib/info/instance.json) = 'true' ]; then return 0 else return 1 fi } # Avoid race conditions and actually poll for availability of component dependencies # Credit: http://stackoverflow.com/questions/8350942/how-to-re-run-the-curl-command-automatically-when-the-error-occurs/8351489#8351489 with_backoff() { local max_attempts=${ATTEMPTS-5} local timeout=${INTIAL_POLLING_INTERVAL-1} local attempt=0 local exitCode=0 while (( $attempt < $max_attempts )) do set +e "$@" exitCode=$? set -e if [[ $exitCode == 0 ]] then break fi echo "Retrying $@ in $timeout.." 1>&2 sleep $timeout attempt=$(( attempt + 1 )) timeout=$(( timeout * 2 )) done if [[ $exitCode != 0 ]] then echo "Fail: $@ failed to complete after $max_attempts attempts" 1>&2 fi return $exitCode } is_hdfs_available() { hadoop fs -ls / return $? } wait_until_hdfs_is_available() { with_backoff is_hdfs_available if [ $? != 0 ]; then echo "HDFS not available before timeout. Exiting ..." exit 1 fi } install_geowave() { SET_PUBLIC_DNS=${1:-false} # Install the repo config file sudo rpm -Uvh $GEOWAVE_REPO_BASE_URL_TOKEN$GEOWAVE_REPO_RPM # So as not to install incompatible puppet from the dependencies of geowave-puppet # we're doing this convoluted workaround to download and then install with no dep resolution sudo yumdownloader --enablerepo $GEOWAVE_REPO_NAME_TOKEN --destdir /tmp geowave-${GEOWAVE_VERSION}-puppet sudo rpm -Uvh --force --nodeps /tmp/geowave-${GEOWAVE_VERSION}-puppet.*.noarch.rpm # EMR 5.17.2 and lower has a tar bundle installed puppet in /home/ec2-user # more recent versions of EMR use an emr-puppet RPM installed to /opt/aws/puppet # We need to make more recent versions of EMR's puppet act similar to the older version if [ -d /opt/aws/puppet ]; then # this is a more recent EMR # first add puppet to /usr/bin sudo ln -s /opt/aws/puppet/bin/puppet /usr/bin/ # install stdlib which is required by geowave sudo puppet module install puppetlabs-stdlib # GeoWave puppet always puts its modules assuming puppet is installed to /etc/puppet # move the geowave module and clear the /etc/puppet directory which was created just for geowave sudo mv /etc/puppet/modules/geowave/ /opt/aws/puppet/modules/ sudo rm -rf /etc/puppet/ fi cat << EOF > /tmp/geowave.pp class { 'geowave::repo': repo_base_url => '$GEOWAVE_REPO_BASE_URL_TOKEN', repo_enabled => 1, } -> class { 'geowave': geowave_version => '${GEOWAVE_VERSION}', hadoop_vendor_version => 'apache', $DATASTORE_PUPPET_TOKEN install_app => true, install_restservices => true, install_gwgeoserver => true, install_gwgrpc => false, http_port => "${HTTP_PORT}", grpc_port => "${GRPC_PORT}", ajp_port => "${AJP_PORT}", shutdown_port => "${SHUTDOWN_PORT}", set_public_dns => ${SET_PUBLIC_DNS}, public_dns => "${PUBLIC_DNS}:${HTTP_PORT}" } file { '/usr/local/geowave/tomcat8/bin/setenv.sh': ensure => file, owner => 'geowave', group => 'geowave', mode => '644', content => 'export JAVA_OPTS="${GEOSERVER_MEMORY}"', require => Package['geowave-${GEOWAVE_VERSION}-apache-gwtomcat'], notify => Service['gwtomcat'], } EOF sudo sh -c "puppet apply /tmp/geowave.pp" return 0 } $DATASTORE_LIB_TOKEN ================================================ FILE: deploy/packaging/emr/template/hbase/DATASTORE_PARAMS_TOKEN ================================================ -t hbase --zookeeper $HOSTNAME:2181 ================================================ FILE: deploy/packaging/emr/template/hbase/DATASTORE_PUPPET_TOKEN ================================================ install_hbase => true, ================================================ FILE: deploy/packaging/emr/template/jupyter/bootstrap-jupyter.sh.template ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- #!/bin/bash GEOWAVE_VER=${1:-$GEOWAVE_VERSION_TOKEN} JUPYTER_PASSWORD=${2-geowave} is_master() { if [ $(jq '.isMaster' /mnt/var/lib/info/instance.json) = 'true' ]; then return 0 else return 1 fi } # I've externalized commands into library functions for clarity, download and source if [ ! -f /tmp/create-configure-kernel.sh ]; then aws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/scripts/emr/jupyter/create-configure-kernel.sh /tmp/create-configure-kernel.sh fi if [ ! -f /tmp/install-conda.sh ]; then aws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/scripts/emr/jupyter/install-conda.sh /tmp/install-conda.sh sudo chmod +x /tmp/install-conda.sh fi if [ ! -f /tmp/gw-base.yml ]; then aws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/scripts/emr/jupyter/gw-base.yml /tmp/gw-base.yml fi # The EMR customize hooks run _before_ everything else, so Spark is not yet ready THIS_SCRIPT="$(realpath "${BASH_SOURCE[0]}")" RUN_FLAG="${THIS_SCRIPT}.run" # On first boot skip past this script to allow EMR to set up the environment. Set a callback # which will poll for availability of Spark and then create the jupyter kernel if [ ! -f "$RUN_FLAG" ]; then touch "$RUN_FLAG" TIMEOUT= is_master && TIMEOUT=3 || TIMEOUT=4 echo "bash -x $(realpath "${BASH_SOURCE[0]}") > /tmp/bootstrap-jupyter.log" | at now + $TIMEOUT min exit 0 # Bail and let EMR finish initializing fi # Download example notebooks from s3 aws s3 sync s3://$GEOWAVE_NOTEBOOK_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/notebooks/jupyter/ $HOME/notebooks/ aws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/lib/geowave_pyspark-${GEOWAVE_VER}.tar.gz /tmp/geowave_pyspark-${GEOWAVE_VER}.tar.gz source /tmp/install-conda.sh echo bootstrap_conda.sh completed. PATH now: $PATH echo Performing pixiedust and jupyter kernel setup. source /etc/profile.d/conda.sh jupyter nbextension enable --py --sys-prefix ipyleaflet jupyter nbextension enable --py --sys-prefix widgetsnbextension jupyter nbextension enable --py --sys-prefix vega # generate empty config for notebook server jupyter notebook --generate-config pip install /tmp/geowave_pyspark-${GEOWAVE_VER}.tar.gz # generate default password for server HASHED_PASSWORD=$(python -c "from notebook.auth import passwd; print(passwd('$JUPYTER_PASSWORD'))") printf "c.NotebookApp.password = u'$HASHED_PASSWORD'" >> $HOME/.jupyter/jupyter_notebook_config.py printf "\nc.NotebookApp.open_browser = False" >> $HOME/.jupyter/jupyter_notebook_config.py printf "\nc.NotebookApp.ip = '*'" >> $HOME/.jupyter/jupyter_notebook_config.py printf "\nc.NotebookApp.notebook_dir = '$HOME/notebooks/'" >> $HOME/.jupyter/jupyter_notebook_config.py printf "\nc.NotebookApp.port = 9000" >> $HOME/.jupyter/jupyter_notebook_config.py #Adding Jupyter to Upstart so it can be run at bootstrap sudo cat << EOF > $HOME/jupyter.conf description "Jupyter" start on runlevel [2345] stop on runlevel [016] respawn respawn limit 0 10 env HOME=$HOME script . $HOME/.bashrc . /etc/profile.d/conda.sh exec start-stop-daemon --start -c hadoop --exec $HOME/conda/bin/jupyter-notebook > /var/log/jupyter.log 2>&1 end script EOF sudo mv $HOME/jupyter.conf /etc/init/ sudo chown root:root /etc/init/jupyter.conf # be sure that jupyter daemon is registered in initctl sudo initctl reload-configuration # start jupyter daemon sudo initctl start jupyter if is_master; then source /tmp/create-configure-kernel.sh ${GEOWAVE_VER} fi ================================================ FILE: deploy/packaging/emr/template/jupyter/bootstrap-jupyterhub.sh.template ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- #!/bin/bash # Variables for kernel creation GEOWAVE_VER=${1:-$GEOWAVE_VERSION_TOKEN} USER_PASS=${2:-geowave} is_master() { if [ $(jq '.isMaster' /mnt/var/lib/info/instance.json) = 'true' ]; then return 0 else return 1 fi } # I've externalized commands into library functions for clarity, download and source if [ ! -f /tmp/create-configure-kernel.sh ]; then aws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/scripts/emr/jupyter/create-configure-kernel.sh /tmp/create-configure-kernel.sh sudo chmod +x /tmp/create-configure-kernel.sh fi if [ ! -f /tmp/install-conda.sh ]; then aws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/scripts/emr/jupyter/install-conda.sh /tmp/install-conda.sh sudo chmod +x /tmp/install-conda.sh fi if [ ! -f /tmp/gw-base.yml ]; then aws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/scripts/emr/jupyter/gw-base.yml /tmp/gw-base.yml fi # The EMR customize hooks run _before_ everything else, so Spark is not yet ready THIS_SCRIPT="$(realpath "${BASH_SOURCE[0]}")" RUN_FLAG="${THIS_SCRIPT}.run" # On first boot skip past this script to allow EMR to set up the environment. Set a callback # which will poll for availability of Spark and then create the jupyter kernel if [ ! -f "$RUN_FLAG" ]; then touch "$RUN_FLAG" TIMEOUT= is_master && TIMEOUT=3 || TIMEOUT=4 echo "bash -x $(realpath "${BASH_SOURCE[0]}") > /tmp/bootstrap-jupyterhub.log" | at now + $TIMEOUT min exit 0 # Bail and let EMR finish initializing fi # Download example notebooks from s3 aws s3 sync s3://$GEOWAVE_NOTEBOOKS_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/notebooks/jupyter/ /usr/local/notebooks/ # Grab pre-spawn script for properly hooking new users into system. sudo aws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/scripts/emr/jupyter/pre-spawn.sh /srv/jupyterhub/ sudo chmod +x /srv/jupyterhub/pre-spawn.sh # Download hub configuration file sudo su root -c "aws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/scripts/emr/jupyter/jupyterhub_config.py /etc/jupyterhub/" # Download latest conda to root install location sudo su root -c "source /tmp/install-conda.sh /opt/miniconda.sh /opt/conda/" # TODO find pyspark lib defined below sudo su root -c "aws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/lib/geowave_pyspark-${GEOWAVE_VER}.tar.gz /tmp/geowave_pyspark-${GEOWAVE_VER}.tar.gz" echo bootstrap_conda.sh completed. PATH now: $PATH echo Performing pixiedust and jupyter kernel setup. if is_master; then sudo su root -c "source /tmp/create-configure-kernel.sh $GEOWAVE_VER /usr/local/pixiedust /opt/conda/bin /opt/conda/share/jupyter/kernels" fi sudo su root -c "/opt/conda/bin/pip install /tmp/geowave_pyspark-${GEOWAVE_VER}.tar.gz" # Allow pixiedust to be accessed by all users sudo chmod -R 777 /usr/local/pixiedust/ # Add upstart service to run jupyterhub sudo cat << EOF > $HOME/jupyterhub.conf description "JupyterHub" start on runlevel [2345] stop on runlevel [016] respawn respawn limit 0 10 env JAVA_HOME=$JAVA_HOME script if [ -f /etc/jupyterhub/oauth_env.sh ]; then . /etc/jupyterhub/oauth_env.sh fi . /etc/profile.d/conda.sh exec start-stop-daemon --start --exec /opt/conda/bin/jupyterhub -- --config /etc/jupyterhub/jupyterhub_config.py > /var/log/jupyterhub.log 2>&1 end script EOF sudo mv $HOME/jupyterhub.conf /etc/init/ sudo chown root:root /etc/init/jupyterhub.conf sudo mkdir -p /srv/jupyterhub # Write default userlist that adds jupyterhub user as admin sudo cat << EOF > $HOME/userlist jupyterhub admin EOF sudo mv $HOME/userlist /srv/jupyterhub/ sudo chown root:root /srv/jupyterhub/userlist # Add jupyterhub user sudo useradd -m -s /bin/bash -N jupyterhub sudo printf "jupyterhub:$USER_PASS" | sudo chpasswd # Start jupyterhub service # be sure that jupyter daemon is registered in initctl sudo initctl reload-configuration sudo initctl start jupyterhub ================================================ FILE: deploy/packaging/emr/template/jupyter/create-configure-kernel.sh.template ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- #!/bin/bash GEOWAVE_VER=${1:-$GEOWAVE_VERSION_TOKEN} PIXIEDUST_HOME=${2:-$HOME/pixiedust/} CONDA_INSTALL=${3:-$HOME/conda/bin} KERNEL_OUT=${4:-$HOME/.local/share/jupyter/kernels/} SPARK_HOME=${5:-/usr/lib/spark} MASTER_ARG=${6:-yarn} INTIAL_POLLING_INTERVAL=15 # This gets doubled for each attempt up to max_attempts KERNEL_DIR=$HOME/.local/share/jupyter/kernels/ # Avoid race conditions and actually poll for availability of component dependencies # Credit: http://stackoverflow.com/questions/8350942/how-to-re-run-the-curl-command-automatically-when-the-error-occurs/8351489#8351489 with_backoff() { local max_attempts=${ATTEMPTS-5} local timeout=${INTIAL_POLLING_INTERVAL-1} local attempt=0 local exitCode=0 while (( $attempt < $max_attempts )) do set +e "$@" exitCode=$? set -e if [[ $exitCode == 0 ]] then break fi echo "Retrying $@ in $timeout.." 1>&2 sleep $timeout attempt=$(( attempt + 1 )) timeout=$(( timeout * 2 )) done if [[ $exitCode != 0 ]] then echo "Fail: $@ failed to complete after $max_attempts attempts" 1>&2 fi return $exitCode } is_spark_available() { pyspark --version / return $? } wait_until_spark_is_available() { with_backoff is_spark_available if [ $? != 0 ]; then echo "HDFS not available before timeout. Exiting ..." exit 1 fi } #Install the Kernel wait_until_spark_is_available # Create the jupyter kernel mkdir -p ${PIXIEDUST_HOME} ${CONDA_INSTALL}/jupyter pixiedust install < tmp.$$.json && mv tmp.$$.json ${KERNEL_JSON} jq 'del(.env["SPARK_DRIVER_MEMORY"])' ${KERNEL_JSON} > tmp.$$.json && mv tmp.$$.json ${KERNEL_JSON} #Disable shell file globbing set -f #Use jq to read submit args into array submit_args=($(jq -r '.env["PYSPARK_SUBMIT_ARGS"]' ${KERNEL_JSON})) #Enable shell file globbing set +f #Add geowave jar to submit --jars option submit_args[1]=${submit_args[1]},${GEOWAVE_INSTALL} #Modify master to use yarn/local submit_args[5]=${MASTER_ARG} #Pulling array out to string so it can be passed properly to jq submit_string=${submit_args[@]} #Modifying default spark allocation properties to use max memory resources available with HBase YARN_SCHED_MAX=`xmllint --xpath 'string(//property[name="yarn.scheduler.maximum-allocation-mb"]/value)' /etc/hadoop/conf/yarn-site.xml` YARN_CONT_MAX=`xmllint --xpath 'string(//property[name="yarn.nodemanager.resource.memory-mb"]/value)' /etc/hadoop/conf/yarn-site.xml` echo "Yarn Scheduler Max Memory = ${YARN_SCHED_MAX}(MB)" echo "Yarn Container Max Memory = ${YARN_CONT_MAX}(MB)" MAX_MOD=0.9 CONT_MOD=0.8 #Use bc calculator to get new max and container memory and truncate floating result MOD_SCHED_MAX=$(echo "($YARN_SCHED_MAX*$MAX_MOD) / 1" | bc) MOD_CONT_MAX=$(echo "($YARN_CONT_MAX*$CONT_MOD) / 1" | bc) echo "Modified Yarn Scheduler Max Memory = ${MOD_SCHED_MAX}(MB)" echo "Modified Yarn Container Max Memory = ${MOD_CONT_MAX}(MB)" DRIVER_MEM="--driver-memory ${MOD_SCHED_MAX}M " EXECUTOR_MEM="--executor-memory ${MOD_CONT_MAX}M " submit_string=${DRIVER_MEM}${EXECUTOR_MEM}${submit_string} echo "New Spark Submit Options: ${submit_string}" #Write the new submit_args to the kernel.json jq --arg submit_args "${submit_string}" '.env["PYSPARK_SUBMIT_ARGS"]= $submit_args' ${KERNEL_JSON} > tmp.$$.json && mv tmp.$$.json ${KERNEL_JSON} echo "Modified Kernel to use yarn by default" # Copy final modified kernel to output install location cp -R ${PIXIEDUST_KERNELS} ${KERNEL_OUT} ================================================ FILE: deploy/packaging/emr/template/jupyter/gw-base.yml ================================================ name: base channels: - conda-forge - defaults dependencies: - python=3.6 - folium=0.6.0 - ipykernel=4.9.0 - ipyleaflet=0.9.0 - ipywidgets=7.4.1 - jupyter=1.0.0 - jupyterhub=0.9.2 - matplotlib=2.2.3 - nbconvert=5.4.0 - owslib=0.16.0 - pandas=0.23.4 - pip=18.0 - pytz=2018.5 - pyyaml=3.13 - wheel=0.31.1 - ncurses=6.1 - numpy=1.15.1 - pip: - astunparse==1.5.0 - colour==0.1.5 - geojson==2.4.0 - markdown==2.6.11 - mpld3==0.3 - pixiedust==1.1.11 - py4j==0.10.6 - pyspark==2.3.0 - shapely==1.6.4.post2 - oauthenticator==0.8.0 ================================================ FILE: deploy/packaging/emr/template/jupyter/install-conda.sh ================================================ #!/usr/bin/env bash CONDA_DL_LOC=${1-$HOME/miniconda.sh} CONDA_INSTALL_LOC=${2-$HOME/conda/} RQ_FILE=${3-/tmp/gw-base.yml} # Download conda to root install location wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O "$CONDA_DL_LOC" # Modify the file permissions to allow execution within this shell chmod +x ${CONDA_DL_LOC} # Install miniconda and output directory to /opt/conda ${CONDA_DL_LOC} -bfp ${CONDA_INSTALL_LOC} # Add Conda to the path so all users with shell can see conda printf "export PATH=${CONDA_INSTALL_LOC}bin:"'$PATH' | sudo tee -a /etc/profile.d/conda.sh # setup python 3.6 in the master and workers printf "\nexport PYSPARK_PYTHON=${CONDA_INSTALL_LOC}bin/python" | sudo tee -a /etc/profile.d/conda.sh printf "\nexport PYSPARK_DRIVER_PYTHON=${CONDA_INSTALL_LOC}bin/python" | sudo tee -a /etc/profile.d/conda.sh # This was added because Upstart doesn't capture user environment variables before loading jupyter printf "\nexport HOSTNAME=$HOSTNAME" | sudo tee -a /etc/profile.d/conda.sh sudo chmod +x /etc/profile.d/conda.sh source /etc/profile.d/conda.sh # Set config options to install dependencies properly ${CONDA_INSTALL_LOC}/bin/conda config --system --set always_yes yes --set changeps1 no ${CONDA_INSTALL_LOC}/bin/conda config --system -f --add channels conda-forge # Install dependencies via conda ${CONDA_INSTALL_LOC}/bin/conda env update -f ${RQ_FILE} rm -f ${CONDA_DL_LOC} ================================================ FILE: deploy/packaging/emr/template/jupyter/jupyterhub_config.py ================================================ c = get_config() import os pjoin = os.path.join runtime_dir = pjoin('/srv/jupyterhub') userlist_loc = pjoin(runtime_dir, 'userlist') blacklist_loc = pjoin(runtime_dir, 'env_blacklist') ssl_dir = pjoin(runtime_dir, 'ssl') if not os.path.exists(ssl_dir): os.makedirs(ssl_dir) # Setup whitelist and admins from file in runtime directory whitelist = set() admin = set() if os.path.isfile(userlist_loc): with open(userlist_loc) as f: for line in f: if not line.strip(): continue parts = line.split() name = parts[0].strip() whitelist.add(name) if len(parts) > 1 and parts[1].strip() == 'admin': admin.add(name) c.Authenticator.whitelist = whitelist c.Authenticator.admin_users = admin # Create a blacklist of environment variables to ensure are removed from notebook environments env_blacklist = [] if os.path.isfile(blacklist_loc): with open(blacklist_loc) as f: for line in f: if not line.strip(): continue line = line.strip() env_blacklist.append(line) for var in os.environ: if var not in env_blacklist: c.Spawner.env_keep.append(var) c.JupyterHub.hub_ip = '0.0.0.0' # Allow administrators to access individual user notebook servers. c.JupyterHub.admin_access = True # If SSL certificates exist on cluster uncomment these lines in config. # Will look in /srv/jupyterhub/ssl/ #c.JupyterHub.ssl_key = pjoin(ssl_dir, 'ssl.key') #c.JupyterHub.ssl_cert = pjoin(ssl_dir, 'ssl.cert') c.JupyterHub.port = 9000 # Fix adduser command so it doesn't apply invalid parameters. c.Authenticator.add_user_cmd = ['adduser'] c.PAMAuthenticator.create_system_users = True from subprocess import check_call def copy_notebooks(spawner): username = spawner.user.name check_call(['/srv/jupyterhub/pre-spawn.sh', username]) c.Spawner.pre_spawn_hook = copy_notebooks c.Spawner.notebook_dir = u'~/notebooks/' ================================================ FILE: deploy/packaging/emr/template/jupyter/pre-spawn.sh ================================================ #!/usr/bin/env bash USER=$1 if ["$USER" == ""]; then echo "must include username argument" exit 1 fi # Start the Bootstrap Process echo "bootstrap process running for user $USER ..." # User Directory: That's the private directory for the user to be created, if none exists USER_DIRECTORY="/home/${USER}/notebooks/" # TODO: I don't like this but it fixes an error with pixiedust creating files owned by the first user to import it. # Really there needs to be some changes to how pixiedust itself looks for user config + db files to support multi-user access # for jupyterhub that don't exist currently. sudo chmod -R 777 /usr/local/pixiedust/ if [ -d "$USER_DIRECTORY" ]; then echo "home directory for user already exists. skipped creation" else echo "creating a home directory for the user: $USER_DIRECTORY" mkdir ${USER_DIRECTORY} echo "...copying example notebooks for user ..." cp -R /usr/local/notebooks/. ${USER_DIRECTORY} chown -R ${USER}:${USER} ${USER_DIRECTORY} fi if [ hadoop fs -test -d /user/${USER} ]; then echo "hdfs directory for user already exists. skipped creation." else echo "creating hdfs directory for user." sudo -u hdfs hdfs dfs -mkdir /user/${USER} sudo -u hdfs hdfs dfs -chmod 777 /user/${USER} fi exit 0 ================================================ FILE: deploy/packaging/emr/template/quickstart/QUICKSTART_BOOTSTRAP_TOKEN ================================================ if is_master ; then if [ ! -f /mnt/geowave-env.sh ]; then aws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/scripts/emr/quickstart/geowave-env.sh /mnt/geowave-env.sh fi source /mnt/geowave-env.sh if [ ! -f /mnt/KDEColorMap.sld ]; then aws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/scripts/emr/quickstart/KDEColorMap.sld /mnt/KDEColorMap.sld fi if [ ! -f /mnt/SubsamplePoints.sld ]; then aws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/scripts/emr/quickstart/SubsamplePoints.sld /mnt/SubsamplePoints.sld fi if [ ! -f /mnt/setup-geoserver-geowave-workspace.sh ]; then aws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/scripts/emr/quickstart/setup-geoserver-geowave-workspace.sh /mnt/setup-geoserver-geowave-workspace.sh fi if [ ! -f /mnt/ingest-and-kde-gdelt.sh ]; then aws s3 cp s3://$GEOWAVE_BUCKET_TOKEN/$GEOWAVE_VERSION_URL_TOKEN/scripts/emr/quickstart/$DATASTORE_TOKEN/ingest-and-kde-gdelt.sh /mnt/ingest-and-kde-gdelt.sh fi chmod 755 /mnt/*.sh cd /mnt;./ingest-and-kde-gdelt.sh fi ================================================ FILE: deploy/packaging/emr/template/quickstart/geowave-env.sh.template ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- #!/bin/bash export STAGING_DIR=/mnt #Europe 02/2016 export TIME_REGEX=201602 export EAST=40 export WEST=-31.25 export NORTH=81 export SOUTH=27.6363 export GERMANY="MULTIPOLYGON (((8.710256576538086 47.696809768676758,8.678594589233398 47.69334602355957,8.670557022094727 47.71110725402832,8.710256576538086 47.696809768676758)),((6.806390762329102 53.60222053527832,6.746946334838867 53.560274124145508,6.658334732055664 53.58610725402832,6.806390762329102 53.60222053527832)),((6.939443588256836 53.669443130493164,6.87639045715332 53.67027473449707,7.088335037231445 53.684167861938477,6.939443588256836 53.669443130493164)),((7.242498397827148 53.704439163208008,7.135835647583008 53.706110000610352,7.346944808959961 53.721109390258789,7.242498397827148 53.704439163208008)),((8.191110610961914 53.72471809387207,8.120000839233398 53.713052749633789,8.142778396606445 53.733606338500977,8.191110610961914 53.72471809387207)),((7.622224807739258 53.75444221496582,7.467779159545898 53.733057022094727,7.485834121704102 53.757501602172852,7.622224807739258 53.75444221496582)),((7.758890151977539 53.760553359985352,7.664445877075195 53.761667251586914,7.812780380249023 53.775552749633789,7.758890151977539 53.760553359985352)),((8.42527961730957 53.928056716918945,8.411664962768555 53.95555305480957,8.454999923706055 53.963052749633789,8.42527961730957 53.928056716918945)),((13.940279006958008 54.024995803833008,13.925832748413086 54.018327713012695,13.934446334838867 54.027772903442383,13.940279006958008 54.024995803833008)),((8.695554733276367 54.041109085083008,8.671388626098633 54.077775955200195,8.693334579467773 54.082498550415039,8.695554733276367 54.041109085083008)),((14.001317977905273 54.065362930297852,14.225557327270508 53.928606033325195,14.218889236450195 53.869020462036133,13.823431015014648 53.85374641418457,14.056005477905273 53.984865188598633,13.759164810180664 54.159997940063477,14.001317977905273 54.065362930297852)),((10.97944450378418 54.380556106567383,11.017778396606445 54.380273818969727,11.003053665161133 54.37693977355957,10.97944450378418 54.380556106567383)),((8.893056869506836 54.461938858032227,8.815000534057617 54.500833511352539,8.960554122924805 54.519166946411133,8.893056869506836 54.461938858032227)),((11.312776565551758 54.406946182250977,11.006387710571289 54.461664199829102,11.184167861938477 54.519998550415039,11.312776565551758 54.406946182250977)),((8.662778854370117 54.494165420532227,8.59111213684082 54.527772903442383,8.710832595825195 54.551668167114258,8.662778854370117 54.494165420532227)),((13.073610305786133 54.488611221313477,13.09666633605957 54.590555191040039,13.151388168334961 54.602777481079102,13.073610305786133 54.488611221313477)),((13.383054733276367 54.638887405395508,13.730833053588867 54.275835037231445,13.11833381652832 54.333887100219727,13.267499923706055 54.382501602172852,13.146963119506836 54.54560661315918,13.503091812133789 54.493097305297852,13.244722366333008 54.559167861938477,13.383054733276367 54.638887405395508)),((8.364442825317383 54.61332893371582,8.294443130493164 54.666666030883789,8.353887557983398 54.711664199829102,8.364442825317383 54.61332893371582)),((8.567777633666992 54.685274124145508,8.396944046020508 54.713884353637695,8.551111221313477 54.753885269165039,8.567777633666992 54.685274124145508)),((10.97944450378418 54.380556106567383,10.818536758422852 53.890054702758789,12.526945114135742 54.474161148071289,12.924165725708008 54.426942825317383,12.369722366333008 54.26500129699707,13.023889541625977 54.399721145629883,13.455831527709961 54.096109390258789,13.718332290649414 54.169717788696289,13.813055038452148 53.845277786254883,14.275629043579102 53.699068069458008,14.149168014526367 52.86277961730957,14.640275955200195 52.57249641418457,14.599443435668945 51.818605422973633,15.03639030456543 51.285554885864258,14.828332901000977 50.86583137512207,14.309720993041992 51.053606033325195,12.093706130981445 50.322534561157227,12.674444198608398 49.424997329711914,13.833612442016602 48.77360725402832,12.758333206176758 48.12388801574707,13.016668319702148 47.470277786254883,12.735555648803711 47.684167861938477,11.095556259155273 47.396112442016602,10.478055953979492 47.591943740844727,10.173334121704102 47.274721145629883,9.56672477722168 47.54045295715332,8.566110610961914 47.806940078735352,8.576421737670898 47.591371536254883,7.697225570678711 47.543329238891602,7.58827018737793 47.584482192993164,7.578889846801758 48.119722366333008,8.226079940795898 48.964418411254883,6.36216926574707 49.459390640258789,6.524446487426758 49.808610916137695,6.134416580200195 50.127847671508789,6.39820671081543 50.323175430297852,6.011800765991211 50.757272720336914,5.864721298217773 51.046106338500977,6.222223281860352 51.465829849243164,5.962499618530273 51.807779312133789,6.828889846801758 51.965555191040039,7.065557479858398 52.385828018188477,6.68889045715332 52.549165725708008,7.051668167114258 52.643610000610352,7.208364486694336 53.242807388305664,7.015554428100586 53.41472053527832,7.295835494995117 53.685274124145508,8.008333206176758 53.710000991821289,8.503053665161133 53.354166030883789,8.665555953979492 53.893884658813477,9.832498550415039 53.536386489868164,8.899721145629883 53.940828323364258,8.883611679077148 54.294168472290039,8.599443435668945 54.333887100219727,9.016942977905273 54.498331069946289,8.580549240112305 54.867879867553711,8.281110763549805 54.746942520141602,8.393331527709961 55.053056716918945,8.664545059204102 54.913095474243164,9.44536018371582 54.825403213500977,9.972776412963867 54.761110305786133,9.870279312133789 54.454439163208008,10.97944450378418 54.380556106567383),(11.459165573120117 53.96110725402832,11.488611221313477 54.023050308227539,11.37388801574707 53.988611221313477,11.459165573120117 53.96110725402832),(11.544168472290039 54.06138801574707,11.612421035766602 54.104585647583008,11.511110305786133 54.048608779907227,11.544168472290039 54.06138801574707),(12.72972297668457 54.416666030883789,12.702775955200195 54.42833137512207,12.68610954284668 54.418329238891602,12.72972297668457 54.416666030883789)))" export BERLIN_BBOX="BBOX(shape,13.0535, 52.3303, 13.7262, 52.6675)" export PARIS_BBOX="BBOX(shape,2.0868, 48.6583, 2.6379, 49.0469)" export HDFS_PORT=8020 export RESOURCE_MAN_PORT=8032 export NUM_PARTITIONS=32 export GEOWAVE_TOOL_JAVA_OPT=-Xmx4g export GEOWAVE_TOOLS_JAR=/usr/local/geowave/tools/geowave-tools-$GEOWAVE_VERSION_TOKEN-apache.jar ================================================ FILE: deploy/packaging/emr/template/quickstart/ingest-and-kde-gdelt.sh.template ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- #!/bin/bash echo "Ingesting GeoWave sample data and running kernel density estimate..." source geowave-env.sh # Grab whatever gdelt data matches $TIME_REGEX. The example is set to 201602 sudo mkdir $STAGING_DIR/gdelt;cd $STAGING_DIR/gdelt sudo wget http://data.gdeltproject.org/events/md5sums for file in `cat md5sums | cut -d' ' -f3 | grep "^${TIME_REGEX}"` ; do sudo wget http://data.gdeltproject.org/events/$file ; done md5sum -c md5sums 2>&1 | grep "^${TIME_REGEX}" cd $STAGING_DIR # disabling encryption geowave config set geowave.encryption.enabled=false # Ingest the data. Indexed spatial only in this example. It can also be indexed using spatial-temporal geowave store add gdelt --gwNamespace geowave.gdelt \ $DATASTORE_PARAMS_TOKEN geowave index add gdelt gdelt-spatial -t spatial --partitionStrategy round_robin --numPartitions $NUM_PARTITIONS geowave ingest localtogw $STAGING_DIR/gdelt gdelt gdelt-spatial -f gdelt --gdelt.cql "BBOX(geometry,${WEST},${SOUTH},${EAST},${NORTH})" geowave store add gdelt-kde --gwNamespace geowave.kde_gdelt \ $DATASTORE_PARAMS_TOKEN # Run a kde to produce a heatmap geowave analytic kde --featureType gdeltevent --minLevel 5 --maxLevel 26 --minSplits $NUM_PARTITIONS --maxSplits $NUM_PARTITIONS --coverageName gdeltevent_kde --hdfsHostPort ${HOSTNAME}:${HDFS_PORT} --jobSubmissionHostPort ${HOSTNAME}:${RESOURCE_MAN_PORT} --tileSize 1 gdelt gdelt-kde # Run the geoserver workspace setup script cd $STAGING_DIR ./setup-geoserver-geowave-workspace.sh ================================================ FILE: deploy/packaging/emr/template/quickstart/setup-geoserver-geowave-workspace.sh ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- #!/bin/bash source geowave-env.sh # Configue the local host geowave config geoserver "$HOSTNAME:8000" # Add layers for the point and kde representations of the data geowave gs layer add gdelt geowave gs layer add gdelt-kde # Add the colormap and DecimatePoints style geowave gs style add kdecolormap -sld /mnt/KDEColorMap.sld geowave gs style add SubsamplePoints -sld /mnt/SubsamplePoints.sld # Set the kde layer default style to colormap geowave gs style set gdeltevent_kde --styleName kdecolormap geowave gs style set gdeltevent --styleName SubsamplePoints ================================================ FILE: deploy/packaging/puppet/geowave/manifests/accumulo.pp ================================================ class geowave::accumulo { package { "geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}-accumulo": ensure => latest, tag => 'geowave-package', } if !defined(Package["geowave-${geowave::geowave_version}-core"]) { package { "geowave-${geowave::geowave_version}-core": ensure => latest, tag => 'geowave-package', } } } ================================================ FILE: deploy/packaging/puppet/geowave/manifests/app.pp ================================================ class geowave::app { $geowave_base_app_rpms = [ "geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}-tools", "geowave-${geowave::geowave_version}-docs", ] package { $geowave_base_app_rpms: ensure => latest, tag => 'geowave-package', } if !defined(Package["geowave-${geowave::geowave_version}-core"]) { package { "geowave-${geowave::geowave_version}-core": ensure => latest, tag => 'geowave-package', } } } ================================================ FILE: deploy/packaging/puppet/geowave/manifests/gwgeoserver.pp ================================================ class geowave::gwgeoserver { if !defined(Package["geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}-gwtomcat"]) { package { "geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}-gwtomcat": ensure => latest, tag => 'geowave-package', } } package { "geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}-gwgeoserver": ensure => latest, tag => 'geowave-package', notify => Service['gwtomcat'] } } ================================================ FILE: deploy/packaging/puppet/geowave/manifests/gwgrpc.pp ================================================ class geowave::gwgrpc { $grpc_port = $geowave::grpc_port if !defined(Package["geowave-${geowave::geowave_version}-core"]) { package { "geowave-${geowave::geowave_version}-core": ensure => latest, tag => 'geowave-package', } } package { "geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}-grpc": ensure => latest, tag => 'geowave-package', } file { '/etc/geowave/gwgrpc': ensure => present, path => "/etc/geowave/gwgrpc", content => "GRPC_PORT=${grpc_port}", } service { 'gwgrpc': ensure => 'running', provider => 'redhat', enable => true, } } ================================================ FILE: deploy/packaging/puppet/geowave/manifests/gwtomcat_server.pp ================================================ class geowave::gwtomcat_server { $http_port = $geowave::http_port $ajp_port = $geowave::ajp_port $shutdown_port = $geowave::shutdown_port if !defined(Package["geowave-${geowave::geowave_version}-core"]) { package { "geowave-${geowave::geowave_version}-core": ensure => latest, tag => 'geowave-package', } } package { "geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}-gwtomcat": ensure => latest, tag => 'geowave-package', notify => Service['gwtomcat'], } file_line {'change_http_port': ensure => present, path => "/usr/local/geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}/tomcat8/conf/server.xml", line => " '.Connector\ port="(\d{1,5})".protocol="HTTP.*"$', require => Package["geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}-gwtomcat"], notify => Service['gwtomcat'], } file_line {'change_ajp_port': ensure => present, path => "/usr/local/geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}/tomcat8/conf/server.xml", line => "", match => '.Connector\ port="(\d{1,5})".protocol="AJP.*$', require => Package["geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}-gwtomcat"], notify => Service['gwtomcat'], } file_line {'change_shutdown_port': ensure => present, path => "/usr/local/geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}/tomcat8/conf/server.xml", line => "", match => '.Server\ port="(\d{1,5})" shutdown="SHUTDOWN">$', require => Package["geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}-gwtomcat"], notify => Service['gwtomcat'], } } ================================================ FILE: deploy/packaging/puppet/geowave/manifests/gwtomcat_service.pp ================================================ class geowave::gwtomcat_service { service { 'gwtomcat': ensure => 'running', provider => 'redhat', enable => true, } } ================================================ FILE: deploy/packaging/puppet/geowave/manifests/hbase.pp ================================================ class geowave::hbase { package { "geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}-hbase": ensure => latest, tag => 'geowave-package', } if !defined(Package["geowave-${geowave::geowave_version}-core"]) { package { "geowave-${geowave::geowave_version}-core": ensure => latest, tag => 'geowave-package', } } } ================================================ FILE: deploy/packaging/puppet/geowave/manifests/init.pp ================================================ class geowave( $geowave_version = $geowave::params::geowave_version, $hadoop_vendor_version = $geowave::params::hadoop_vendor_version, $install_accumulo = $geowave::params::install_accumulo, $install_hbase = $geowave::params::install_hbase, $install_app = $geowave::params::install_app, $install_gwgeoserver = $geowave::params::install_gwgeoserver, $install_gwgrpc = $geowave::params::install_gwgrpc, $install_restservices = $geowave::params::install_restservices, $grpc_port = $geowave::params::grpc_port, $http_port = $geowave::params::http_port, $ajp_port = $geowave::params::ajp_port_ajp, $shutdown_port = $geowave::params::shutdown_port, $set_public_dns = $geowave::params::set_public_dns, $public_dns = $geowave::params::public_dns, ) inherits geowave::params { if $geowave_version == undef { fail("geowave_version parameter is required") } if $hadoop_vendor_version == undef { fail("hadoop_vendor_version parameter is required") } if $install_accumulo { class {'geowave::accumulo':} } if $install_hbase { class {'geowave::hbase':} } if $install_app { class {'geowave::app':} } if $install_gwgeoserver or $install_restservices { anchor {'geowave_tomcat::begin': } -> class {'geowave::gwtomcat_server':} -> class {'geowave::gwtomcat_service':} -> anchor {'geowave_tomcat::end':} if $install_gwgeoserver { class {'geowave::gwgeoserver':} } if $install_restservices { class {'geowave::restservices':} } } if $install_gwgrpc { class {'geowave::gwgrpc':} } } ================================================ FILE: deploy/packaging/puppet/geowave/manifests/params.pp ================================================ class geowave::params { $geowave_version = undef $hadoop_vendor_version = undef $install_accumulo = false $install_hbase = false $install_app = false $install_app_server = false $http_port = '8080' $grpc_port = '8980' $install_grpc = false } ================================================ FILE: deploy/packaging/puppet/geowave/manifests/repo.pp ================================================ class geowave::repo( $repo_name = 'geowave', $repo_desc = 'GeoWave Repo', $repo_enabled = 0, $repo_base_url = 'http://s3.amazonaws.com/geowave-rpms/release/noarch/', $repo_refresh_md = 21600, # Repo metadata is good for 6 hours by default $repo_priority = 15, $repo_gpg_check = 0, ) { yumrepo {$repo_name: baseurl => $repo_base_url, descr => $repo_desc, enabled => $repo_enabled, gpgcheck => $repo_gpg_check, priority => $repo_priority, metadata_expire => $repo_refresh_md, } Yumrepo[$repo_name] -> Package<|tag == 'geowave-package' |> } ================================================ FILE: deploy/packaging/puppet/geowave/manifests/restservices.pp ================================================ class geowave::restservices { $set_public_dns = $geowave::set_public_dns $public_dns = $geowave::public_dns $line_string = " host_port $public_dns " if !defined(Package["geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}-gwtomcat"]) { package { "geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}-gwtomcat": ensure => latest, tag => 'geowave-package', } } package { "geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}-restservices": ensure => latest, tag => 'geowave-package', notify => Exec['wait_for_restservices_to_unpack'], #force restart of service } #This is done instead of a notify => Service['gwtomcat'] to force immediate #restart of the tomcat8 server. This is to ensure the war file is unpacked #so we can run the file_line block if needed. exec { 'wait_for_restservices_to_unpack': require => Package["geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}-restservices"], command => "/sbin/service gwtomcat restart && sleep 10", } if $set_public_dns{ file_line {'set_public_dns': ensure => present, line => $line_string, path => "/usr/local/geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}/tomcat8/webapps/restservices/WEB-INF/web.xml", match => "$public_dns<\/param-value>", after => "<\/context-param>", replace => false, require => Package["geowave-${geowave::geowave_version}-${geowave::hadoop_vendor_version}-restservices"], notify => Service['gwtomcat'], } } } ================================================ FILE: deploy/packaging/rpm/.gitignore ================================================ BUILD/ BUILDROOT/ RPMS/ SRPMS/ ================================================ FILE: deploy/packaging/rpm/centos/7/.gitignore ================================================ *.jar *.zip *.tar.gz BUILD/ BUILDROOT/ RPMS/ SRPMS/ ================================================ FILE: deploy/packaging/rpm/centos/7/SOURCES/bash_profile.sh ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- # For use by geowave jetty server, set if not already set elsewhere if [ "x" == "x$JAVA_HOME" ]; then export JAVA_HOME=$(readlink -f /usr/bin/java | sed "s:bin/java::") fi if [ "x" == "x$GEOSERVER_HOME" ]; then export GEOSERVER_HOME=/usr/local/geowave/tomcat8/webapps/geoserver fi if [ "x" == "x$GEOSERVER_DATA_DIR" ]; then export GEOSERVER_DATA_DIR=/usr/local/geowave/tomcat8/webapps/geoserver/data fi ================================================ FILE: deploy/packaging/rpm/centos/7/SOURCES/default.xml ================================================ WorkspaceInfoImpl--5ccd188:124761b8d78:-9dd9 geowave ================================================ FILE: deploy/packaging/rpm/centos/7/SOURCES/geowave-tools.sh ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- #!/bin/bash # Which java to use if [ -z "$JAVA_HOME" ]; then JAVA="java" else JAVA="$JAVA_HOME/bin/java" fi # Setting up Hadoop env if [ -z "$HADOOP_HOME" ]; then VENDOR_VERSION=$( cat $GEOWAVE_TOOLS_HOME/geowave-tools-build.properties | grep -oi "vendor.version=\w*" | sed "s/vendor.version=//g") if [[ $VENDOR_VERSION == apache ]]; then export HADOOP_HOME=/usr/lib/hadoop elif [[ $VENDOR_VERSION == hdp* ]]; then export HADOOP_HOME=/usr/hdp/current/hadoop-client export HDP_VERSION=$(hdp-select| grep hadoop-hdfs-namenode| sed "s/hadoop-hdfs-namenode - //g") export GEOWAVE_TOOL_JAVA_OPT="$GEOWAVE_TOOL_JAVA_OPT -Dhdp.version=${HDP_VERSION}" elif [[ $VENDOR_VERSION == cdh* ]]; then export HADOOP_HOME=/usr/lib/hadoop else echo "Unknown Hadoop Distribution. Set env variable HADOOP_HOME." fi fi # set up HADOOP specific env only if HADOOP is installed if [ -n "${HADOOP_HOME}" ] && [ -d "${HADOOP_HOME}" ]; then . $HADOOP_HOME/libexec/hadoop-config.sh HADOOP_CLASSPATH="" for i in $(echo $CLASSPATH | sed "s/:/ /g") do if [[ "$i" != *log4j-slf4j-impl*.jar && "$i" != *slf4j-log4j*.jar && "$i" != *protobuf-java*.jar ]]; then HADOOP_CLASSPATH=${HADOOP_CLASSPATH}:$i fi done fi CLASSPATH=${HADOOP_CLASSPATH} # Setting up Spark env if [ -z "$SPARK_HOME" ]; then VENDOR_VERSION=$( cat $GEOWAVE_TOOLS_HOME/geowave-tools-build.properties | grep -oi "vendor.version=\w*" | sed "s/vendor.version=//g") if [[ $VENDOR_VERSION == apache ]]; then export SPARK_HOME=/usr/lib/spark elif [[ $VENDOR_VERSION == hdp* ]]; then export SPARK_HOME=/usr/hdp/current/spark2-client elif [[ $VENDOR_VERSION == cdh* ]]; then export SPARK_HOME=/usr/lib/spark else echo "Unknown Spark Distribution. Set env variable SPARK_HOME." fi fi # Ensure both our tools jar and anything in the plugins directory is on the classpath # Add Spark jars to class path only if SPARK_HOME directory exists if [ -n "${SPARK_HOME}" ] && [ -d "${SPARK_HOME}" ]; then . "${SPARK_HOME}"/bin/load-spark-env.sh SPARK_CLASSPATH="" for i in ${SPARK_HOME}/jars/*.jar do if [[ "$i" != *log4j-slf4j-impl*.jar && "$i" != *guava*.jar && "$i" != *slf4j-log4j*.jar && "$i" != *protobuf-java*.jar ]]; then SPARK_CLASSPATH=${SPARK_CLASSPATH}:$i fi done CLASSPATH="${SPARK_HOME}/conf:${SPARK_CLASSPATH}:$GEOWAVE_TOOLS_HOME/$GEOWAVE_TOOLS_JAR:$GEOWAVE_TOOLS_HOME/plugins/*:${CLASSPATH}" else CLASSPATH="$GEOWAVE_TOOLS_HOME/$GEOWAVE_TOOLS_JAR:$GEOWAVE_TOOLS_HOME/plugins/*:${CLASSPATH}" fi # Define log4j properties file in jar call, to reduce log spam. LOG_PROPERTIES="-Djava.util.logging.config.file=jul-geowave-cli.properties -Dgeowave.home=$GEOWAVE_TOOLS_HOME" # Using -cp and the classname instead of -jar because Java 7 and below fail to auto-launch jars with more than 65k files exec $JAVA $GEOWAVE_TOOL_JAVA_OPT $LOG_PROPERTIES -cp $CLASSPATH org.locationtech.geowave.core.cli.GeoWaveMain "$@" ================================================ FILE: deploy/packaging/rpm/centos/7/SOURCES/namespace.xml ================================================ NamespaceInfoImpl--5ccd188:124761b8d78:-9dd8 geowave https://github.com/locationtech/geowave ================================================ FILE: deploy/packaging/rpm/centos/7/SOURCES/workspace.xml ================================================ WorkspaceInfoImpl--5ccd188:124761b8d78:-9dd9 geowave ================================================ FILE: deploy/packaging/rpm/centos/7/SPECS/geowave-common.spec ================================================ %define timestamp %{?_timestamp}%{!?_timestamp: %(date +%Y%m%d%H%M)} %define name_version %{?_name_version}%{!?_name_version: UNKNOWN} %define rpm_version %{?_rpm_version}%{!?_rpm_version: UNKNOWN} %define base_name geowave %define name %{base_name} %define common_app_name %{base_name}-%{name_version} %define buildroot %{_topdir}/BUILDROOT/%{common_app_name}-root %define installpriority %{_priority} # Used by alternatives for concurrent version installs %define __jar_repack %{nil} %define _rpmfilename %%{ARCH}/%%{NAME}.%%{RELEASE}.%%{ARCH}.rpm %define geowave_home /usr/local/geowave %define geowave_docs_home /usr/share/doc/%{common_app_name} %define geowave_config /etc/geowave # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Name: %{base_name} Version: %{rpm_version} Release: %{timestamp} BuildRoot: %{buildroot} BuildArch: noarch Summary: GeoWave provides geospatial and temporal indexing on top of Accumulo and HBase License: Apache2 Group: Applications/Internet Source1: bash_profile.sh Source2: site-%{name_version}.tar.gz Source3: manpages-%{name_version}.tar.gz Source4: puppet-scripts-%{name_version}.tar.gz BuildRequires: unzip BuildRequires: zip BuildRequires: xmlto BuildRequires: asciidoc %description GeoWave provides geospatial and temporal indexing on top of key-value stores %install # Copy system service files into place mkdir -p %{buildroot}/etc/profile.d cp %{SOURCE1} %{buildroot}/etc/profile.d/geowave.sh mkdir -p %{buildroot}%{geowave_config} # Copy documentation into place mkdir -p %{buildroot}%{geowave_docs_home} tar -xzf %{SOURCE2} -C %{buildroot}%{geowave_docs_home} --strip=1 # Copy man pages into place mkdir -p %{buildroot}/usr/local/share/man/man1 tar -xvf %{SOURCE3} -C %{buildroot}/usr/local/share/man/man1 rm -rf %{buildroot}%{geowave_docs_home}/manpages rm -f %{buildroot}%{geowave_docs_home}/*.pdfmarks # Puppet scripts mkdir -p %{buildroot}/etc/puppet/modules tar -xzf %{SOURCE4} -C %{buildroot}/etc/puppet/modules # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ %package -n %{common_app_name}-core Summary: GeoWave Core Group: Applications/Internet Provides: %{common_app_name}-core = %{rpm_version} %description -n %{common_app_name}-core GeoWave provides geospatial and temporal indexing on top of Accumulo. This package installs the GeoWave home directory and user account %pre -n %{common_app_name}-core getent group geowave > /dev/null || /usr/sbin/groupadd -r geowave getent passwd geowave > /dev/null || /usr/sbin/useradd --system --home /usr/local/geowave -g geowave geowave -c "GeoWave Application Account" %postun -n %{common_app_name}-core if [ $1 -eq 0 ]; then /usr/sbin/userdel geowave fi %files -n %{common_app_name}-core %attr(644, root, root) /etc/profile.d/geowave.sh %defattr(644, geowave, geowave, 755) %dir %{geowave_config} # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ %package -n %{common_app_name}-docs Summary: GeoWave Documentation Group: Applications/Internet Provides: %{common_app_name}-docs = %{rpm_version} Requires: %{common_app_name}-core = %{rpm_version} %description -n %{common_app_name}-docs GeoWave provides geospatial and temporal indexing on top of Accumulo and HBase. This package installs the GeoWave documentation into the /usr/share/doc/geowave- directory %files -n %{common_app_name}-docs %defattr(644, geowave, geowave, 755) %doc %{geowave_docs_home} %doc %defattr(644 root, root, 755) /usr/local/share/man/man1/ # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ %package -n %{common_app_name}-puppet Summary: GeoWave Puppet Scripts Group: Applications/Internet Requires: puppet %description -n %{common_app_name}-puppet This package installs the geowave Puppet module to /etc/puppet/modules %files -n %{common_app_name}-puppet %defattr(644, root, root, 755) /etc/puppet/modules/geowave # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ %changelog * Thu Jun 27 2019 Rich Fecher - 1.0.0 - Enabled prerelease versioning * Wed Nov 23 2016 Rich Fecher - 0.9.3 - Refactor to separate vendor-specific and common rpms * Fri Jun 5 2015 Andrew Spohn - 0.8.7-1 - Add external config file * Fri May 22 2015 Andrew Spohn - 0.8.7 - Use alternatives to support parallel version and vendor installs - Replace geowave-ingest with geowave-tools * Thu Jan 15 2015 Andrew Spohn - 0.8.2-3 - Added man pages * Mon Jan 5 2015 Andrew Spohn - 0.8.2-2 - Added geowave-puppet rpm * Fri Jan 2 2015 Andrew Spohn - 0.8.2-1 - Added a helper script for geowave-ingest and bash command completion * Wed Nov 19 2014 Andrew Spohn - 0.8.2 - First packaging ================================================ FILE: deploy/packaging/rpm/centos/7/SPECS/geowave-vendor.spec ================================================ %define timestamp %{?_timestamp}%{!?_timestamp: %(date +%Y%m%d%H%M)} %define name_version %{?_name_version}%{!?_name_version: UNKNOWN} %define rpm_version %{?_rpm_version}%{!?_rpm_version: UNKNOWN} %define vendor_version %{?_vendor_version}%{!?_vendor_version: UNKNOWN} %define base_name geowave %define name %{base_name}-%{vendor_version} %define common_app_name %{base_name}-%{name_version} %define vendor_app_name %{base_name}-%{name_version}-%{vendor_version} %define buildroot %{_topdir}/BUILDROOT/%{vendor_app_name}-root %define installpriority %{_priority} # Used by alternatives for concurrent version installs %define __jar_repack %{nil} %define _rpmfilename %%{ARCH}/%%{NAME}.%%{RELEASE}.%%{ARCH}.rpm %define geowave_home /usr/local/geowave %define geowave_tools_script geowave-tools.sh %define geowave_install /usr/local/%{vendor_app_name} %define geowave_accumulo_home %{geowave_install}/accumulo %define geowave_hbase_home %{geowave_install}/hbase %define geowave_tools_home %{geowave_install}/tools %define geowave_plugins_home %{geowave_tools_home}/plugins # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Name: %{base_name} Version: %{rpm_version} Release: %{timestamp} BuildRoot: %{buildroot} BuildArch: noarch Summary: GeoWave provides geospatial and temporal indexing on top of key-value stores License: Apache2 Group: Applications/Internet Source0: geowave-accumulo-%{name_version}-%{vendor_version}.jar Source1: deploy-geowave-accumulo-to-hdfs.sh Source2: geowave-hbase-%{name_version}-%{vendor_version}.jar Source3: deploy-geowave-hbase-to-hdfs.sh Source8: default.xml Source9: namespace.xml Source10: workspace.xml Source11: geowave-tools-%{name_version}-%{vendor_version}.jar Source12: %{geowave_tools_script} BuildRequires: unzip BuildRequires: zip BuildRequires: xmlto BuildRequires: asciidoc %description GeoWave provides geospatial and temporal indexing on top of key-value stores %prep rm -rf %{_rpmdir}/%{buildarch}/%{vendor_app_name}* rm -rf %{_srcrpmdir}/%{vendor_app_name}* %build rm -fr %{_builddir} mkdir -p %{_builddir}/%{vendor_app_name} %clean rm -fr %{buildroot} rm -fr %{_builddir}/* %install rm -fr %{buildroot} mkdir -p %{buildroot}%{geowave_accumulo_home} mkdir -p %{buildroot}%{geowave_hbase_home} # Copy Accumulo library and deployment script onto local file system cp %{SOURCE0} %{SOURCE1} %{buildroot}%{geowave_accumulo_home} cp %{SOURCE2} %{SOURCE3} %{buildroot}%{geowave_hbase_home} # Extract version info file for easy inspection unzip -p %{SOURCE0} build.properties > %{buildroot}%{geowave_accumulo_home}/geowave-accumulo-build.properties unzip -p %{SOURCE2} build.properties > %{buildroot}%{geowave_hbase_home}/geowave-hbase-build.properties # Stage geowave tools mkdir -p %{buildroot}%{geowave_tools_home} mkdir -p %{buildroot}%{geowave_tools_home}/logs chmod 777 %{buildroot}%{geowave_tools_home}/logs cp %{SOURCE11} %{buildroot}%{geowave_tools_home} cp %{buildroot}%{geowave_accumulo_home}/geowave-accumulo-build.properties %{buildroot}%{geowave_tools_home}/build.properties pushd %{buildroot}%{geowave_tools_home} zip -qg %{buildroot}%{geowave_tools_home}/geowave-tools-%{name_version}-%{vendor_version}.jar build.properties popd mv %{buildroot}%{geowave_tools_home}/build.properties %{buildroot}%{geowave_tools_home}/geowave-tools-build.properties cp %{SOURCE12} %{buildroot}%{geowave_tools_home}/%{geowave_tools_script} #replace vendor-version particular variables in geowave-tools.sh sed -i -e s~'$GEOWAVE_TOOLS_HOME'~%{geowave_tools_home}~g %{buildroot}%{geowave_tools_home}/%{geowave_tools_script} sed -i -e s/'$GEOWAVE_TOOLS_JAR'/geowave-tools-%{name_version}-%{vendor_version}.jar/g %{buildroot}%{geowave_tools_home}/%{geowave_tools_script} mkdir -p %{buildroot}%{geowave_plugins_home} # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ %package -n %{vendor_app_name}-single-host Summary: All GeoWave Components Group: Applications/Internet Requires: %{vendor_app_name}-accumulo = %{rpm_version} Requires: %{vendor_app_name}-hbase = %{rpm_version} Requires: %{vendor_app_name}-gwgeoserver = %{rpm_version} Requires: %{vendor_app_name}-restservices = %{rpm_version} Requires: %{vendor_app_name}-tools = %{rpm_version} %description -n %{vendor_app_name}-single-host GeoWave provides geospatial and temporal indexing on top of Accumulo. This package installs the accumulo, geoserver and tools components and would likely be useful for dev environments %files -n %{vendor_app_name}-single-host # This is a meta-package and only exists to install other packages # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ %package -n %{vendor_app_name}-accumulo Summary: GeoWave Accumulo Components Group: Applications/Internet Provides: %{vendor_app_name}-accumulo = %{rpm_version} Requires: %{vendor_app_name}-tools = %{rpm_version} Requires: %{common_app_name}-core = %{rpm_version} %description -n %{vendor_app_name}-accumulo GeoWave provides geospatial and temporal indexing on top of Accumulo. This package installs the Accumulo components of GeoWave %post -n %{vendor_app_name}-accumulo /bin/bash %{geowave_accumulo_home}/deploy-geowave-accumulo-to-hdfs.sh >> %{geowave_accumulo_home}/geowave-accumulo-to-hdfs.log 2>&1 %files -n %{vendor_app_name}-accumulo %defattr(644, geowave, geowave, 755) %dir %{geowave_install} %attr(755, hdfs, hdfs) %{geowave_accumulo_home} %attr(644, hdfs, hdfs) %{geowave_accumulo_home}/geowave-accumulo-%{name_version}-%{vendor_version}.jar %attr(755, hdfs, hdfs) %{geowave_accumulo_home}/deploy-geowave-accumulo-to-hdfs.sh # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ %package -n %{vendor_app_name}-hbase Summary: GeoWave HBase Components Group: Applications/Internet Provides: %{vendor_app_name}-hbase = %{rpm_version} Requires: %{vendor_app_name}-tools = %{rpm_version} Requires: %{common_app_name}-core = %{rpm_version} %description -n %{vendor_app_name}-hbase GeoWave provides geospatial and temporal indexing on top of HBase. This package installs the HBase components of GeoWave %post -n %{vendor_app_name}-hbase /bin/bash %{geowave_hbase_home}/deploy-geowave-hbase-to-hdfs.sh >> %{geowave_hbase_home}/geowave-hbase-to-hdfs.log 2>&1 %files -n %{vendor_app_name}-hbase %defattr(644, geowave, geowave, 755) %dir %{geowave_install} %attr(755, hdfs, hdfs) %{geowave_hbase_home} %attr(644, hdfs, hdfs) %{geowave_hbase_home}/geowave-hbase-%{name_version}-%{vendor_version}.jar %attr(755, hdfs, hdfs) %{geowave_hbase_home}/deploy-geowave-hbase-to-hdfs.sh # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ %package -n %{vendor_app_name}-tools Summary: GeoWave Tools Group: Applications/Internet Provides: %{vendor_app_name}-tools = %{rpm_version} Requires: %{common_app_name}-core = %{rpm_version} %description -n %{vendor_app_name}-tools GeoWave provides geospatial and temporal indexing on top of Accumulo. This package installs GeoWave tools utility %post -n %{vendor_app_name}-tools alternatives --install %{geowave_home} geowave-home %{geowave_install} %{installpriority} ln -fs /usr/local/geowave/tools/geowave-tools.sh /usr/local/bin/geowave ln -fs /usr/local/geowave/tools/geowave-tools.sh /usr/local/sbin/geowave %postun -n %{vendor_app_name}-tools if [ $1 -eq 0 ]; then rm -f /usr/local/bin/geowave rm -f /usr/local/sbin/geowave alternatives --remove geowave-home %{geowave_install} fi %files -n %{vendor_app_name}-tools %defattr(644, geowave, geowave, 755) %{geowave_tools_home} %attr(755, geowave, geowave) %{geowave_tools_home}/geowave-tools.sh %attr(777, geowave, geowave) %{geowave_tools_home}/logs # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ %changelog * Thu Jun 27 2019 Rich Fecher - 1.0.0 - Enabled prerelease versioning * Wed Nov 23 2016 Rich Fecher - 0.9.3 - Add geowave-hbase and refactor to separate vendor-specific and common rpms * Fri Jun 5 2015 Andrew Spohn - 0.8.7-1 - Add external config file * Fri May 22 2015 Andrew Spohn - 0.8.7 - Use alternatives to support parallel version and vendor installs - Replace geowave-ingest with geowave-tools * Thu Jan 15 2015 Andrew Spohn - 0.8.2-3 - Added man pages * Mon Jan 5 2015 Andrew Spohn - 0.8.2-2 - Added geowave-puppet rpm * Fri Jan 2 2015 Andrew Spohn - 0.8.2-1 - Added a helper script for geowave-ingest and bash command completion * Wed Nov 19 2014 Andrew Spohn - 0.8.2 - First packaging ================================================ FILE: deploy/packaging/rpm/centos/7/rpm.sh ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- #!/bin/bash # # RPM build script # SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" # Source all our reusable functionality, argument is the location of this script. . "$SCRIPT_DIR/../../rpm-functions.sh" "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" declare -A ARGS while [ $# -gt 0 ]; do case "$1" in *) NAME="${1:2}"; shift; ARGS[$NAME]="$1" ;; esac shift done GEOWAVE_VERSION=${ARGS[geowave-version]} GEOWAVE_RPM_VERSION=${ARGS[geowave-rpm-version]} case ${ARGS[command]} in build-vendor) rpmbuild \ --define "_topdir $(pwd)" \ --define "_name_version $GEOWAVE_VERSION" \ --define "_rpm_version $GEOWAVE_RPM_VERSION" \ --define "_timestamp ${ARGS[time-tag]}" \ --define "_vendor_version ${ARGS[vendor-version]}" \ --define "_priority $(parsePriorityFromVersion $GEOWAVE_VERSION)" \ $(buildArg "${ARGS[buildarg]}") SPECS/*-vendor.spec ;; build-common) rpmbuild \ --define "_topdir $(pwd)" \ --define "_name_version $GEOWAVE_VERSION" \ --define "_rpm_version $GEOWAVE_RPM_VERSION" \ --define "_timestamp ${ARGS[time-tag]}" \ --define "_priority $(parsePriorityFromVersion $GEOWAVE_VERSION)" \ $(buildArg "${ARGS[buildarg]}") SPECS/*-common.spec ;; clean) clean ;; esac ================================================ FILE: deploy/packaging/rpm/repo-dev/SOURCES/geowave-dev.repo ================================================ [geowave-dev] name=GeoWave for Enterprise Linux 6 (Development Repo) baseurl=https://s3.amazonaws.com/geowave-rpms/dev/noarch/ enabled=0 gpgcheck=0 failovermethod=priority priority=15 # Uncomment if you _always_ want your client to check for new dev RPMs #metadata_expire=0 ================================================ FILE: deploy/packaging/rpm/repo-dev/SPECS/geowave-dev.spec ================================================ %define component geowave-repo-dev %define version 1.0 %define repo_dir /etc/yum.repos.d %define buildroot %{_topdir}/BUILDROOT/%{name}-%{version}-root Name: %{component} Version: %{version} Release: 3 BuildArch: noarch Summary: GeoWave Development RPM Repo Group: Applications/Internet License: Apache2 Source0: geowave-dev.repo BuildRoot: %{buildroot} %description GeoWave Development RPM Repo %prep rm -rf %{_rpmdir}/%{buildarch}/%{name}* rm -rf %{_srcrpmdir}/%{name}* %build rm -fr %{_builddir} mkdir -p %{_builddir}/%{name} %install # Clean and init the directory rm -fr %{buildroot} mkdir -p %{buildroot}%{repo_dir} # Unpack and rename app directory cp %{SOURCE0} %{buildroot}%{repo_dir} %clean rm -fr %{buildroot} rm -fr %{_builddir}/* %files %attr(644,root,root) %{repo_dir}/geowave-dev.repo %changelog * Thu Dec 5 2014 Andrew Spohn - 1.0 - First packaging ================================================ FILE: deploy/packaging/rpm/repo-dev/rpm.sh ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- #!/bin/bash # # RPM build script # # Source all our reusable functionality, argument is the location of this script. . ../rpm-functions.sh "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" declare -A ARGS while [ $# -gt 0 ]; do case "$1" in *) NAME="${1:2}"; shift; ARGS[$NAME]="$1" ;; esac shift done # Artifact settings RPM_ARCH=noarch case ${ARGS[command]} in build) rpmbuild \ --define "_topdir $(pwd)" \ $(buildArg "${ARGS[buildarg]}") SPECS/*.spec ;; clean) clean ;; *) about ;; esac ================================================ FILE: deploy/packaging/rpm/repo-release/SOURCES/geowave.repo ================================================ [geowave] name=GeoWave for Enterprise Linux 6 baseurl=https://s3.amazonaws.com/geowave-rpms/release/noarch/ enabled=0 gpgcheck=0 failovermethod=priority priority=15 ================================================ FILE: deploy/packaging/rpm/repo-release/SPECS/geowave-release.spec ================================================ %define component geowave-repo %define version 1.0 %define repo_dir /etc/yum.repos.d %define buildroot %{_topdir}/BUILDROOT/%{name}-%{version}-root Name: %{component} Version: %{version} Release: 3 BuildArch: noarch Summary: GeoWave RPM Repo Group: Applications/Internet License: Apache2 Source0: geowave.repo BuildRoot: %{buildroot} %description GeoWave RPM Repo %prep rm -rf %{_rpmdir}/%{buildarch}/%{name}* rm -rf %{_srcrpmdir}/%{name}* %build rm -fr %{_builddir} mkdir -p %{_builddir}/%{name} %install # Clean and init the directory rm -fr %{buildroot} mkdir -p %{buildroot}%{repo_dir} # Unpack and rename app directory cp %{SOURCE0} %{buildroot}%{repo_dir} %clean rm -fr %{buildroot} rm -fr %{_builddir}/* %files %attr(644,root,root) %{repo_dir}/geowave.repo %changelog * Tue Feb 3 2015 Andrew Spohn - 1.0 - First packaging ================================================ FILE: deploy/packaging/rpm/repo-release/rpm.sh ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- #!/bin/bash # # RPM build script # # Source all our reusable functionality, argument is the location of this script. . ../rpm-functions.sh "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" declare -A ARGS while [ $# -gt 0 ]; do case "$1" in *) NAME="${1:2}"; shift; ARGS[$NAME]="$1" ;; esac shift done # Artifact settings RPM_ARCH=noarch case ${ARGS[command]} in build) rpmbuild \ --define "_topdir $(pwd)" \ $(buildArg "${ARGS[buildarg]}") SPECS/*.spec ;; clean) clean ;; *) about ;; esac ================================================ FILE: deploy/packaging/rpm/rpm-functions.sh ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- #!/bin/bash # # The reusable functionality needed to update, build and deploy RPMs. # Should be sourced by individual projects which then only need to override # any unique behavior # # Absolute path to the directory containing admin scripts ADMIN_SCRIPTS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" # When sourcing this script the directory of the calling script is passed CALLING_SCRIPT_DIR=$1 about() { echo "Usage: $0 --command [clean|update|build]" echo " clean - Removes build files and RPMs" echo " update - Pulls down new artifact from Jenkins" echo " build [-ba|-bb|-bp|-bc|-bi|-bl|-bs] - Builds artifacts, default is -ba (build all)" } # Check for valid RPM build lifecycle argument or use default buildArg() { # ba : Build binary and source packages (after doing the %prep, %build, and %install stages) # bb : Build a binary package (after doing the %prep, %build, and %install stages) # bp : Build a binary package (after doing the %prep, %build, and %install stages) # bc : Do the "%build" stage from the spec file (after doing the %prep stage) # bi : Do the "%install" stage from the spec file (after doing the %prep and %build stages) # bl : Do a "list check". The "%files" section from the spec file is macro expanded, and checks are made to verify that each file exists # bs : Build just the source package VALID_ARGS=('ba' 'bb' 'bp' 'bc' 'bi' 'bl' 'bs') DEFAULT_ARG='ba' BUILD_ARG="$1" # No arg uses default if [ -z "$BUILD_ARG" ]; then echo "-$DEFAULT_ARG" exit fi # A bad arg uses default (as long as our default is build all the worst case is it will do more than you asked) match=0 for arg in "${VALID_ARGS[@]}" do if [ "$BUILD_ARG" = $arg ]; then match=1 break fi done if [ $match -eq 0 ]; then echo "-$DEFAULT_ARG" exit fi # Pass along valid build arg echo "-$BUILD_ARG" } # Given a version string, remove all dots and patch version dash labels, then take the first three sets of digits # and interpret as an integer to determine the install priority number used by alternatives in an automated way parsePriorityFromVersion() { # Drop trailing bug fix or pre-release labels (0.8.8-alpha2 or 0.8.8-1) VERSION=${1%-*} VERSION=${VERSION%~*} # Truncate the version string after the first three groups delimited by dots VERSION=$(echo $VERSION | cut -d '.' -f1-3) # Remove non digits (dots) VERSION=$(echo ${VERSION//[^0-9]/}) # If empty or not a number is the result return a low priority if [ -z "$VERSION" ] || [ "$VERSION" -ne "$VERSION" ] ; then echo 1 else # Interpret as a base 10 number (drop leading zeros) echo $(( 10#$VERSION )) fi } # Removes all files except spec and sources clean() { rm -rf $CALLING_SCRIPT_DIR/BUILD/* rm -rf $CALLING_SCRIPT_DIR/BUILDROOT/* rm -rf $CALLING_SCRIPT_DIR/RPMS/* rm -rf $CALLING_SCRIPT_DIR/SRPMS/* rm -rf $CALLING_SCRIPT_DIR/TARBALL/* } # Just grabbed off the Interwebs, looks to give sane results in the # couple of tests I've written. Add more and tweak if found to be defective isValidUrl() { VALID_URL_REGEX='(https?|ftp|file)://[-A-Za-z0-9\+&@#/%?=~_|!:,.;]*[-A-Za-z0-9\+&@#/%=~_|]' [[ $1 =~ $VALID_URL_REGEX ]] && return 0 || return 1 } if [ ! -d "$CALLING_SCRIPT_DIR" ]; then echo >&2 "Usage: . $0 [calling script directory]" exit 1 fi ================================================ FILE: deploy/packaging/sandbox/generate-sandbox-scripts.sh ================================================ #------------------------------------------------------------------------------- # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt #------------------------------------------------------------------------------- #!/bin/bash # # This will take the template and generate a set of scripts, replacing tokens appropriately # required parameters are --version and --workspace declare -A ARGS while [ $# -gt 0 ]; do case "$1" in *) NAME="${1:2}"; shift; ARGS[$NAME]="$1" ;; esac shift done TARGET_ROOT=${ARGS[workspace]}/deploy/packaging/sandbox/generated TEMPLATE_ROOT=${ARGS[workspace]}/deploy/packaging/sandbox/template mkdir -p $TARGET_ROOT/quickstart # temporarily cp templates to replace common tokens and then cp it to data store locations and rm it here cp $TEMPLATE_ROOT/quickstart/geowave-env.sh.template $TARGET_ROOT/quickstart/geowave-env.sh # replace version token first sed -i -e s/'$GEOWAVE_VERSION_TOKEN'/${ARGS[version]}/g $TARGET_ROOT/quickstart/geowave-env.sh ================================================ FILE: deploy/packaging/sandbox/template/quickstart/geowave-env.sh.template ================================================ #!/bin/bash export STAGING_DIR=/mnt #PARIS 11/13/2015-11/14/2015 #export TIME_REGEX=2015111[34] #export EAST=2.63791 #export WEST=2.08679 #export NORTH=49.04694 #export SOUTH=48.658291 #Europe 02/2016 export TIME_REGEX=201602 export EUROPE_EAST=40 export EUROPE_WEST=-31.25 export EUROPE_NORTH=81 export EUROPE_SOUTH=27.6363 export GERMANY="MULTIPOLYGON (((8.710256576538086 47.696809768676758,8.678594589233398 47.69334602355957,8.670557022094727 47.71110725402832,8.710256576538086 47.696809768676758)),((6.806390762329102 53.60222053527832,6.746946334838867 53.560274124145508,6.658334732055664 53.58610725402832,6.806390762329102 53.60222053527832)),((6.939443588256836 53.669443130493164,6.87639045715332 53.67027473449707,7.088335037231445 53.684167861938477,6.939443588256836 53.669443130493164)),((7.242498397827148 53.704439163208008,7.135835647583008 53.706110000610352,7.346944808959961 53.721109390258789,7.242498397827148 53.704439163208008)),((8.191110610961914 53.72471809387207,8.120000839233398 53.713052749633789,8.142778396606445 53.733606338500977,8.191110610961914 53.72471809387207)),((7.622224807739258 53.75444221496582,7.467779159545898 53.733057022094727,7.485834121704102 53.757501602172852,7.622224807739258 53.75444221496582)),((7.758890151977539 53.760553359985352,7.664445877075195 53.761667251586914,7.812780380249023 53.775552749633789,7.758890151977539 53.760553359985352)),((8.42527961730957 53.928056716918945,8.411664962768555 53.95555305480957,8.454999923706055 53.963052749633789,8.42527961730957 53.928056716918945)),((13.940279006958008 54.024995803833008,13.925832748413086 54.018327713012695,13.934446334838867 54.027772903442383,13.940279006958008 54.024995803833008)),((8.695554733276367 54.041109085083008,8.671388626098633 54.077775955200195,8.693334579467773 54.082498550415039,8.695554733276367 54.041109085083008)),((14.001317977905273 54.065362930297852,14.225557327270508 53.928606033325195,14.218889236450195 53.869020462036133,13.823431015014648 53.85374641418457,14.056005477905273 53.984865188598633,13.759164810180664 54.159997940063477,14.001317977905273 54.065362930297852)),((10.97944450378418 54.380556106567383,11.017778396606445 54.380273818969727,11.003053665161133 54.37693977355957,10.97944450378418 54.380556106567383)),((8.893056869506836 54.461938858032227,8.815000534057617 54.500833511352539,8.960554122924805 54.519166946411133,8.893056869506836 54.461938858032227)),((11.312776565551758 54.406946182250977,11.006387710571289 54.461664199829102,11.184167861938477 54.519998550415039,11.312776565551758 54.406946182250977)),((8.662778854370117 54.494165420532227,8.59111213684082 54.527772903442383,8.710832595825195 54.551668167114258,8.662778854370117 54.494165420532227)),((13.073610305786133 54.488611221313477,13.09666633605957 54.590555191040039,13.151388168334961 54.602777481079102,13.073610305786133 54.488611221313477)),((13.383054733276367 54.638887405395508,13.730833053588867 54.275835037231445,13.11833381652832 54.333887100219727,13.267499923706055 54.382501602172852,13.146963119506836 54.54560661315918,13.503091812133789 54.493097305297852,13.244722366333008 54.559167861938477,13.383054733276367 54.638887405395508)),((8.364442825317383 54.61332893371582,8.294443130493164 54.666666030883789,8.353887557983398 54.711664199829102,8.364442825317383 54.61332893371582)),((8.567777633666992 54.685274124145508,8.396944046020508 54.713884353637695,8.551111221313477 54.753885269165039,8.567777633666992 54.685274124145508)),((10.97944450378418 54.380556106567383,10.818536758422852 53.890054702758789,12.526945114135742 54.474161148071289,12.924165725708008 54.426942825317383,12.369722366333008 54.26500129699707,13.023889541625977 54.399721145629883,13.455831527709961 54.096109390258789,13.718332290649414 54.169717788696289,13.813055038452148 53.845277786254883,14.275629043579102 53.699068069458008,14.149168014526367 52.86277961730957,14.640275955200195 52.57249641418457,14.599443435668945 51.818605422973633,15.03639030456543 51.285554885864258,14.828332901000977 50.86583137512207,14.309720993041992 51.053606033325195,12.093706130981445 50.322534561157227,12.674444198608398 49.424997329711914,13.833612442016602 48.77360725402832,12.758333206176758 48.12388801574707,13.016668319702148 47.470277786254883,12.735555648803711 47.684167861938477,11.095556259155273 47.396112442016602,10.478055953979492 47.591943740844727,10.173334121704102 47.274721145629883,9.56672477722168 47.54045295715332,8.566110610961914 47.806940078735352,8.576421737670898 47.591371536254883,7.697225570678711 47.543329238891602,7.58827018737793 47.584482192993164,7.578889846801758 48.119722366333008,8.226079940795898 48.964418411254883,6.36216926574707 49.459390640258789,6.524446487426758 49.808610916137695,6.134416580200195 50.127847671508789,6.39820671081543 50.323175430297852,6.011800765991211 50.757272720336914,5.864721298217773 51.046106338500977,6.222223281860352 51.465829849243164,5.962499618530273 51.807779312133789,6.828889846801758 51.965555191040039,7.065557479858398 52.385828018188477,6.68889045715332 52.549165725708008,7.051668167114258 52.643610000610352,7.208364486694336 53.242807388305664,7.015554428100586 53.41472053527832,7.295835494995117 53.685274124145508,8.008333206176758 53.710000991821289,8.503053665161133 53.354166030883789,8.665555953979492 53.893884658813477,9.832498550415039 53.536386489868164,8.899721145629883 53.940828323364258,8.883611679077148 54.294168472290039,8.599443435668945 54.333887100219727,9.016942977905273 54.498331069946289,8.580549240112305 54.867879867553711,8.281110763549805 54.746942520141602,8.393331527709961 55.053056716918945,8.664545059204102 54.913095474243164,9.44536018371582 54.825403213500977,9.972776412963867 54.761110305786133,9.870279312133789 54.454439163208008,10.97944450378418 54.380556106567383),(11.459165573120117 53.96110725402832,11.488611221313477 54.023050308227539,11.37388801574707 53.988611221313477,11.459165573120117 53.96110725402832),(11.544168472290039 54.06138801574707,11.612421035766602 54.104585647583008,11.511110305786133 54.048608779907227,11.544168472290039 54.06138801574707),(12.72972297668457 54.416666030883789,12.702775955200195 54.42833137512207,12.68610954284668 54.418329238891602,12.72972297668457 54.416666030883789)))" export BERLIN_BBOX="BBOX(shape,13.0535, 52.3303, 13.7262, 52.6675)" export PARIS_BBOX="BBOX(shape,2.0868, 48.6583, 2.6379, 49.0469)" export HDFS_PORT=8020 export RESOURCE_MAN_PORT=8032 export NUM_PARTITIONS=32 export GEOWAVE_TOOL_JAVA_OPT=-Xmx4g export GEOWAVE_TOOLS_JAR=/usr/local/geowave/tools/geowave-tools-${GEOWAVE_VERSION_TOKEN}-hdp2.jar ================================================ FILE: deploy/packaging/standalone/standalone-installer.install4j ================================================ sys.installationDir context.getBooleanVariable("sys.confirmedUpdateInstallation") ${form:welcomeMessage} !context.isConsole() String message = context.getMessage("ConsoleWelcomeLabel", context.getApplicationName()); return console.askOkCancel(message, true); updateCheck ${i18n:ClickNext} !context.getBooleanVariable("sys.confirmedUpdateInstallation") sys.installationDir context.getVariable("sys.responseFile") == null ${i18n:SelectDirLabel(${compiler:sys.fullName})} true suggestAppDir validateApplicationId existingDirWarning checkWritable manualEntryAllowed checkFreeSpace showRequiredDiskSpace showFreeDiskSpace allowSpacesOnUnix validationScript standardValidation ${i18n:SelectComponentsLabel2} !context.isConsole() true selectionChangedScript ${i18n:UninstallerMenuEntry(${compiler:sys.fullName})} !context.getBooleanVariable("sys.programGroupDisabled") ${compiler:sys.fullName} ${compiler:sys.version} com.install4j.runtime.beans.actions.misc.ModifyStringType APPEND ${installer:sys.installationDir} Path com.install4j.runtime.beans.actions.misc.ModifyStringType PREPEND ${installer:sys.installationDir}\lib\utilities\gdal Path ${i18n:WizardPreparing} ${form:finishedMessage} ${i18n:UninstallerMenuEntry(${compiler:sys.fullName})} true ${form:welcomeMessage} !context.isConsole() String message = context.getMessage("ConfirmUninstall", context.getApplicationName()); return console.askYesNo(message, true); ${installer:sys.installationDir} true ${i18n:UninstallerPreparing} ${form:successMessage} ================================================ FILE: deploy/pom.xml ================================================ 4.0.0 geowave-parent org.locationtech.geowave ../ 2.0.2-SNAPSHOT geowave-deploy GeoWave Deployment Configurations ${project.artifactId}-${project.version}-tools ${project.artifactId}-${project.version}-hbase ${project.artifactId}-${project.version}-accumulo ${project.artifactId}-${project.version}-geoserver ${project.artifactId}-${project.version}-jace yyyy-MM-dd'T'HH:mm:ssZ ${maven.build.timestamp} ON ON org.locationtech.geowave geowave-adapter-vector ${project.version} org.locationtech.geowave geowave-adapter-raster ${project.version} true src/main/resources build.properties GeoWaveLabels.properties log4j2.properties org.apache.maven.plugins maven-resources-plugin 2.7 UTF-8 org.codehaus.mojo buildnumber-maven-plugin 1.3 validate create geowave-tools-singlejar org.locationtech.geowave geowave-analytic-spark ${project.version} org.locationtech.geowave geowave-analytic-mapreduce ${project.version} org.locationtech.geowave geowave-datastore-accumulo ${project.version} org.locationtech.geowave geowave-datastore-hbase ${project.version} org.locationtech.geowave geowave-datastore-bigtable ${project.version} org.locationtech.geowave geowave-datastore-cassandra ${project.version} org.locationtech.geowave geowave-datastore-dynamodb ${project.version} org.locationtech.geowave geowave-datastore-redis ${project.version} org.locationtech.geowave geowave-datastore-rocksdb ${project.version} org.locationtech.geowave geowave-datastore-filesystem ${project.version} org.locationtech.geowave geowave-datastore-kudu ${project.version} org.locationtech.geowave geowave-cli-geoserver ${project.version} org.locationtech.geowave geowave-cli-landsat8 ${project.version} org.locationtech.geowave geowave-cli-sentinel2 ${project.version} org.locationtech.geowave geowave-format-4676 ${project.version} org.locationtech.geowave geowave-format-avro ${project.version} org.locationtech.geowave geowave-format-gdelt ${project.version} org.locationtech.geowave geowave-format-geolife ${project.version} org.locationtech.geowave geowave-format-gpx ${project.version} org.locationtech.geowave geowave-format-raster ${project.version} org.locationtech.geowave geowave-format-tdrive ${project.version} org.locationtech.geowave geowave-format-twitter ${project.version} org.locationtech.geowave geowave-format-vector ${project.version} org.locationtech.geowave geowave-grpc-server ${project.version} org.locationtech.geowave geowave-python ${project.version} org.locationtech.geowave geowave-migration ${project.version} org.apache.maven.plugins maven-shade-plugin 2.2 package shade junit:junit junit/framework/** org/junit/** org/junit/experimental/** org/junit/runners/** *:* META-INF/*.SF META-INF/*.DSA META-INF/*.RSA org.locationtech.geowave.core.cli.GeoWaveMain GeoWave LocationTech ${project.version} org.locationtech.geowave LocationTech ${project.version} META-INF/javax.media.jai.registryFile.jai META-INF/registryFile.jai META-INF/registryFile.jaiext false false ${tools.finalName} build-installer-main 2.10.0 org.locationtech.geowave geowave-analytic-api ${project.version} maven-assembly-plugin 3.1.1 installer-main false default-installer-main make-assembly package single org.locationtech.geowave geowave-dev-resources ${geowave-dev-resources.version} org.sonatype.install4j install4j-maven-plugin compile-installers package compile ${project.version} ${project.build.directory}/install4j-output ${project.basedir}/packaging/standalone/standalone-installer.install4j geotools-container-singlejar provided -geotools-container org.locationtech.geowave geowave-datastore-accumulo ${project.version} org.locationtech.geowave geowave-datastore-hbase ${project.version} org.locationtech.geowave geowave-datastore-bigtable ${project.version} org.locationtech.geowave geowave-datastore-cassandra ${project.version} org.locationtech.geowave geowave-datastore-dynamodb ${project.version} org.locationtech.geowave geowave-datastore-redis ${project.version} org.locationtech.geowave geowave-datastore-rocksdb ${project.version} org.locationtech.geowave geowave-datastore-filesystem ${project.version} org.locationtech.geowave geowave-format-4676 ${project.version} org.apache.maven.plugins maven-shade-plugin 2.2 package shade org.slf4j:* *:pom:* log4j:log4j commons-codec:commons-codec com.google.guava:guava tomcat:* javax.media:jai*:* com.sun.jersey:* *:servlet*:* *:javax.servlet*:* *:jsp*:* *:jetty*:* *:commons-httpclient:* *:maven*:* *:commons-logging:* *:commons-io:* *:jts*:* *:activation:* *:servlet-api:* *:*:jsr305 org.springframework.security:* org.springframework:* org.geoserver:* xpp3:xpp3_min xpp3:xpp3 org.apache.xmlgraphics:batik-ext commons-beanutils commons-digester commons-collections *:* META-INF/*.SF META-INF/*.DSA META-INF/*.RSA log4j.properties GeoWave-Tools LocationTech ${project.version} org.locationtech.geowave LocationTech ${project.version} META-INF/javax.media.jai.registryFile.jai META-INF/registryFile.jai META-INF/registryFile.jaiext false false ${geotools.finalName} accumulo-container-singlejar provided -accumulo-container org.locationtech.geowave geowave-datastore-accumulo ${project.version} org.apache.maven.plugins maven-shade-plugin 2.2 package shade org.slf4j:* *:* META-INF/*.SF META-INF/*.DSA META-INF/*.RSA log4j.properties GeoWave-Accumulo LocationTech ${project.version} org.locationtech.geowave LocationTech ${project.version} META-INF/javax.media.jai.registryFile.jai META-INF/registryFile.jai META-INF/registryFile.jaiext false false ${accumulo.finalName} hbase-container-singlejar provided -hbase-container 12.0.1 org.locationtech.geowave geowave-datastore-hbase-coprocessors ${project.version} org.apache.maven.plugins maven-shade-plugin 2.2 package shade org.slf4j:* com.google.protobuf:* org.apache.hadoop:* *:* META-INF/*.SF META-INF/*.DSA META-INF/*.RSA log4j.properties GeoWave-HBase LocationTech ${project.version} org.locationtech.geowave LocationTech ${project.version} META-INF/javax.media.jai.registryFile.jai META-INF/registryFile.jai META-INF/registryFile.jaiext false false ${hbase.finalName} generate-geowave-jace org.locationtech.geowave geowave-datastore-accumulo ${project.version} org.apache.maven.plugins maven-shade-plugin 2.2 package shade *:* META-INF/*.SF META-INF/*.DSA META-INF/*.RSA GeoWave-C++ LocationTech ${project.version} org.locationtech.geowave LocationTech ${project.version} META-INF/javax.media.jai.registryFile.jai META-INF/registryFile.jai META-INF/registryFile.jaiext false false ${jace.finalName} maven-dependency-plugin 2.4 unpack-dependencies package unpack com.googlecode.jace jace-core-cpp 1.3.0 sources ${project.build.directory}/dependency/jace copy-jace-core-runtime package copy com.googlecode.jace jace-core-runtime 1.3.0 ${project.build.directory}/dependency com.googlecode.jace jace-maven-plugin 1.3.0 generate-proxies package generate-cpp-proxies ${basedir}/jace/ ${project.build.directory}/dependency/jace/include ${project.build.directory}/dependency/jace/source ${env.JAVA_HOME}/jre/lib/rt.jar ${project.build.directory}/${jace.finalName}.jar PUBLIC true true org.eclipse.m2e lifecycle-mapping 1.0.0 maven-dependency-plugin [2.4,) unpack copy maven-shade-plugin [2.2,) shade com.googlecode.jace jace-maven-plugin [1.2.22,) generate-cpp-proxies ================================================ FILE: deploy/scripts/clean-up.py ================================================ #!/usr/bin/python # Copyright (c) 2013-2022 Contributors to the Eclipse Foundation # # See the NOTICE file distributed with this work for additional # information regarding copyright ownership. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, # Version 2.0 which accompanies this distribution and is available at # http://www.apache.org/licenses/LICENSE-2.0.txt ############################################################################## # This script relies on an EC2 instance with IAM role that enables S3 access. # It pulls metadata from the underlying OS and does not require locally defined credentials import re import os import sys import boto3 from collections import OrderedDict import argparse from datetime import datetime, timedelta class CleanUp(): def __init__(self, workspace_path): self.workspace_path = None if workspace_path.startswith(os.sep): self.workspace_path = workspace_path else: print("ERROR: Path provided for workspace is invalid. Please ensure it is an absolute path") sys.exit(1) # Information for builds to keep session = boto3.Session() creds = session.get_credentials() os.environ["AWS_ACCESS_KEY_ID"] = creds.access_key os.environ["AWS_SECRET_ACCESS_KEY"] = creds.secret_key os.environ["AWS_SESSION_TOKEN"] = creds.token os.environ["AWS_DEFAULT_REGION"] = "us-east-1" # Delete everything older than 3 days self.date_threshhold = datetime.now() - timedelta(days=3) self.rpm_bucket = os.environ['rpm_bucket'] def find_build_type(self): build_type_file = os.path.join(self.workspace_path, 'deploy', 'target', 'build-type.txt') build_type = "" if os.path.isfile(build_type_file): fileptr = open(build_type_file, 'r') build_type = fileptr.readline().rstrip() fileptr.close() else: print("WARNING: \"{}\" file not found. Script will not run clean".format(build_type_file)) build_type = None return build_type def clean_bucket(self): s3 = boto3.client('s3') resp = s3.list_objects_v2(Bucket="geowave-rpms", Prefix="dev") for obj in resp['Contents']: key = obj['Key'] if 'repo' not in key: if 'noarch' in key: artifact_date_str = os.path.basename(key).split('.')[3] else: artifact_date_str = os.path.basename(key).rsplit('-', 1)[1].split('.')[0] try: date_time = datetime.strptime(artifact_date_str, "%Y%m%d%H%M") if date_time < self.date_threshhold and date_time != None: s3.delete_object(Bucket=self.rpm_bucket, Key=key) except ValueError as error: print(error) print("Incorrect date format, skipping") if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('workspace', type=str, help='The path to the jenkins workspace. Must be absolute path.') args = parser.parse_args() cleaner = CleanUp(args.workspace) build_type = cleaner.find_build_type() if build_type == 'dev': cleaner.clean_bucket() elif build_type == 'release': print("Build type detected as release. Not doing clean up.") ================================================ FILE: deploy/src/main/resources/GeoWaveLabels.properties ================================================ accumulo.pass.label=Accumulo Connection Password ================================================ FILE: deploy/src/main/resources/build.properties ================================================ # Project Metadata project.version=${project.parent.version} project.branch=${scmBranch} project.scm.revision=${buildNumber} project.build.args=${env.BUILD_ARGS} # Build Details build.timestamp=${build.timestamp} build.user=${user.name} build.os=${os.name} build.os.version=${os.version} build.os.arch=${os.arch} build.jvm.version=${java.runtime.version} build.jvm.vendor=${java.vendor} build.maven.version=${maven.version} ================================================ FILE: deploy/src/main/resources/log4j2.properties ================================================ ## This log4j 2.x configuration file for geowave appender.rolling.type = RollingFile appender.rolling.name = RollingFile appender.rolling.fileName = ${sys:geowave.home:-${sys:user.home}/geowave}/logs/geowave.log appender.rolling.filePattern = ${sys:geowave.home:-${sys:user.home}/geowave}/logs/geowave-%i.log.gz appender.rolling.filePermissions = rw-rw-rw- appender.rolling.layout.type = PatternLayout appender.rolling.layout.pattern = %d{dd MMM HH:mm:ss} %p [%c{2}] - %m%n appender.rolling.policies.type = Policies appender.rolling.policies.size.type = SizeBasedTriggeringPolicy appender.rolling.policies.size.size=10MB rootLogger=WARN, RollingFile category.org.geotools=WARN category.org.geotools.factory=WARN category.org.geoserver=INFO category.org.vfny.geoserver=INFO category.org.vfny.geoserver.config.web.tiles.definition.MultipleDefinitionsFactory=WARN category.org.vfny.geoserver.global=WARN category.org.springframework=WARN category.org.apache.struts=WARN category.org.apache.spark.util.ShutdownHookManager=OFF category.org.apache.spark.SparkEnv=ERROR category.org.apache.hadoop.mapreduce=INFO category.org.apache.thrift=ERROR # [client.ClientConfiguration] - Found no client.conf in default paths. Using default client configuration values. category.org.apache.accumulo.core.client.ClientConfiguration=ERROR # Avoiding these warnings WARNING: Extension lookup '****', but ApplicationContext is unset. # org.geoserver.platform.GeoServerExtensions checkContext category.org.geoserver.platform=ERROR ================================================ FILE: dev-resources/pom.xml ================================================ 4.0.0 geowave-dev-resources org.locationtech.geowave 1.7 GeoWave Development Resources jar Development resources and settings for geowave https://github.com/locationtech/geowave The Apache Software License, Version 2.0 http://www.apache.org/licenses/LICENSE-2.0.txt ossrh https://oss.sonatype.org/content/repositories/snapshots ossrh https://oss.sonatype.org/service/local/staging/deploy/maven2/ https://github.com/locationtech/geowave.git scm:git:git@github.com:locationtech/geowave.git rfecher Rich Fecher rfecher@gmail.com developer architect release org.sonatype.plugins nexus-staging-maven-plugin 1.6.8 true ossrh https://oss.sonatype.org/ true org.apache.maven.plugins maven-source-plugin 3.0.1 attach-sources jar-no-fork org.apache.maven.plugins maven-javadoc-plugin 2.10.4 attach-javadocs jar org.apache.maven.plugins maven-gpg-plugin 1.6 sign-artifacts verify sign --pinentry-mode loopback ================================================ FILE: dev-resources/src/main/resources/assemblies/default-installer-main.xml ================================================ default-installer-main dir false / true org.locationtech.geowave:geowave-datastore* org.locationtech.geowave:geowave-cli* org.locationtech.geowave:geowave-analytic-mapreduce org.locationtech.geowave:geowave-analytic-spark org.locationtech.geowave:geowave-grpc* org.locationtech.geowave:geowave-service* org.locationtech.geowave:geowave-format* runtime true true ================================================ FILE: dev-resources/src/main/resources/assemblies/default-installer-plugin.xml ================================================ default-installer-plugin dir false / true org.locationtech.geowave:geowave-core* org.locationtech.geowave:geowave-adapter* org.locationtech.geowave:geowave-analytic-api org.slf4j:* net.sf.json-lib:* org.glassfish.jersey.core:* javax.servlet:javax.servlet-api com.github.ben-manes.caffeine:caffeine com.clearspring.analytics:stream io.netty:* com.fasterxml.jackson.core:* *:jsr305 org.apache.httpcomponents:httpcore org.apache.hadoop:hadoop-client org.apache.hadoop:hadoop-auth org.apache.hadoop:hadoop-annotations org.apache.hadoop:hadoop-hdfs-client org.apache.hadoop:hadoop-mapreduce-client-app org.apache.hadoop:hadoop-mapreduce-client-core org.apache.hadoop:hadoop-mapreduce-client-common org.apache.hadoop:hadoop-mapreduce-client-jobclient:jar: org.apache.hadoop:hadoop-yarn-api org.apache.hadoop:hadoop-yarn-client org.apache.hadoop:hadoop-yarn-common org.apache.hadoop:hadoop-yarn-server-common org.apache.spark:spark-core* org.apache.spark:spark-sql* org.apache.spark:spark-tags* com.google.guava:guava org.locationtech.jts:jts-core *:commons-io *:commons-vfs2 *:commons-lang *:commons-lang3 *:commons-logging *:commons-math *:commons-math3 *:commons-net *:commons-pool* *:commons-lzf *:commons-httpclient *:commons-jxpath *:commons-collections *:commons-configuration *:commons-beanutils-core *:commons-digester *:jcommander *:log4j *:log4j-slf4j* *:zookeeper *:metrics-core joda-time:joda-time net.jcip:jcip-annotations com.google.code.gson:gson com.google.protobuf:protobuf-java com.amazonaws:aws-java-sdk-s3 com.amazonaws:aws-java-sdk-kms com.amazonaws:aws-java-sdk-core com.aol.simplereact:cyclops-react com.github.spotbugs:spotbugs-annotations org.geotools:gt-main:jar: org.geotools:gt-coverage:jar: org.geotools:gt-image:jar: org.geotools:gt-imagemosaic:jar: org.geotools:gt-jdbc:jar: org.geotools:gt-shapefile:jar: org.geotools:gt-transform:jar: org.geotools.ogc:net.opengis.ows:jar: org.geotools.ogc:org.w3.xlink:jar: org.eclipse.emf:org.eclipse.emf.common:jar: org.eclipse.emf:org.eclipse.emf.ecore:jar: org.eclipse.emf:org.eclipse.emf.ecore.xmi:jar: javax.media:jai_core javax.media:jai_codec javax.media:jai_imageio it.geosolutions.imageio-ext:imageio-ext-gdalframework it.geosolutions.imageio-ext:imageio-ext-geocore it.geosolutions.imageio-ext:imageio-ext-imagereadmt it.geosolutions.imageio-ext:imageio-ext-utilities runtime true true ================================================ FILE: dev-resources/src/main/resources/eclipse/eclipse-cleanup.xml ================================================ ================================================ FILE: dev-resources/src/main/resources/eclipse/eclipse-formatter.xml ================================================ ================================================ FILE: dev-resources/src/main/resources/findbugs/findbugs-exclude.xml ================================================ ================================================ FILE: docs/.gitignore ================================================ content/manpages/*.1 content/manpages/*.xml content/manpages/*.text ================================================ FILE: docs/content/commands/000-header.adoc ================================================ <<< :linkattrs: = GeoWave Command-Line Interface ifdef::backend-html5[] ++++ ++++ endif::backend-html5[] ================================================ FILE: docs/content/commands/005-commands-and-flags.adoc ================================================ <<< == Overview The Command-Line Interface provides a way to execute a multitude of common operations on GeoWave data stores without having to use the Programmatic API. It allows users to manage data stores, indices, statistics, and more. All command options that are marked with `*` are required for the command to execute. == Configuration The CLI uses a local configuration file to store sets of data store connection parameters aliased by a store name. Most GeoWave commands ask for a store name and use the configuration file to determine which connection parameters should be used. It also stores connection information for GeoServer, AWS, and HDFS for commands that use those services. This configuration file is generally stored in the user's home directory, although an alternate configuration file can be specified when running commands. == General Usage The root of all GeoWave CLI commands is the base `geowave` command. [source, bash] ---- $ geowave ---- This will display a list of all available top-level commands along with a brief description of each. === Version [source, bash] ---- $ geowave --version ---- The `--version` flag will display various information about the installed version of GeoWave, including the version, build arguments, and revision information. === General Flags These flags can be optionally supplied to any GeoWave command, and should be supplied before the command itself. ==== Config File The `--config-file` flag causes GeoWave to use an alternate configuration file. The supplied file path should include the file name (e.g. `--config-file /mnt/config.properties`). This can be useful if you have multiple projects that use GeoWave and want to keep the configuration for those data stores separate from each other. [source, bash] ---- $ geowave --config-file ---- ==== Debug The `--debug` flag causes all DEBUG, INFO, WARN, and ERROR log events to be output to the console. By default, only WARN and ERROR log events are displayed. [source, bash] ---- $ geowave --debug ---- == Help Command Adding `help` before any CLI command will show that command's options and their defaults. [source, bash] ---- $ geowave help ---- For example, using the `help` command on `index add` would result in the following output: .... $ geowave help index add Usage: geowave index add [options] Options: -np, --numPartitions The number of partitions. Default partitions will be 1. Default: 1 -ps, --partitionStrategy The partition strategy to use. Default will be none. Default: NONE Possible Values: [NONE, HASH, ROUND_ROBIN] * -t, --type The type of index, such as spatial, or spatial_temporal .... == Explain Command The `explain` command is similar to the `help` command in it's usage, but shows all options, including hidden ones. It can be a great way to make sure your parameters are correct before issuing a command. [source, bash] ---- $ geowave explain ---- For example, if you wanted to add a spatial index to a store named `test-store` but weren't sure what all of the options available to you were, you could do the following: .... $ geowave explain index add -t spatial test-store spatial-idx Command: geowave [options] ... VALUE NEEDED PARAMETER NAMES ---------------------------------------------- { } -cf, --config-file, { } --debug, { } --version, Command: add [options] VALUE NEEDED PARAMETER NAMES ---------------------------------------------- { EPSG:4326} -c, --crs, { false} -fp, --fullGeometryPrecision, { 7} -gp, --geometryPrecision, { 1} -np, --numPartitions, { NONE} -ps, --partitionStrategy, { false} --storeTime, { spatial} -t, --type, Expects: Specified: test-store spatial-idx .... The output is broken down into two sections. The first section shows all of the options available on the `geowave` command. If you wanted to use any of these options, they would need to be specified before `index add`. The second section shows all of the options available on the `index add` command. Some commands contain options that, when specified, may reveal more options. In this case, the `-t spatial` option has revealed some additional configuration options that we could apply to the spatial index. Another command where this is useful is the `store add` command, where each data store type specified by the `-t ` option has a different set of configuration options. ================================================ FILE: docs/content/commands/010-config-commands.adoc ================================================ <<< == Config Commands Commands that affect the local GeoWave configuration. [[config-aws]] === Configure AWS include::manpages/config/geowave-aws.txt[] ''' [[config-geoserver]] === Configure GeoServer include::manpages/config/geowave-geoserver.txt[] ''' [[config-hdfs]] === Configure HDFS include::manpages/config/geowave-hdfs.txt[] ''' [[config-list]] === List Configured Properties include::manpages/config/geowave-list.txt[] ''' [[config-newcryptokey]] === Configure Cryptography Key include::manpages/config/geowave-newcryptokey.txt[] ''' [[config-set]] === Set Configuration Property include::manpages/config/geowave-set.txt[] ''' ================================================ FILE: docs/content/commands/011-store-commands.adoc ================================================ <<< == Store Commands Commands for managing GeoWave data stores. [[store-add]] === Add Store include::manpages/store/geowave-addstore.txt[] [[store-describe]] === Describe Store include::manpages/store/geowave-describestore.txt[] [[store-clear]] === Clear Store include::manpages/store/geowave-clear.txt[] [[store-copy]] === Copy Store include::manpages/store/geowave-copy.txt[] [[store-copymr]] === Copy Store with MapReduce include::manpages/store/geowave-copymr.txt[] [[store-copystorecfg]] === Copy Store Configuration include::manpages/store/geowave-copystorecfg.txt[] [[store-list]] === List Stores include::manpages/store/geowave-liststores.txt[] [[store-rm]] === Remove Store include::manpages/store/geowave-rmstore.txt[] [[store-version]] === Store Version include::manpages/store/geowave-version.txt[] [[store-listplugins]] === List Store Plugins include::manpages/store/geowave-liststoreplugins.txt[] ''' ================================================ FILE: docs/content/commands/012-index-commands.adoc ================================================ <<< == Index Commands Commands for managing GeoWave indices. [[index-add]] === Add Index include::manpages/index/geowave-addindex.txt[] [[index-compact]] === Compact Index include::manpages/index/geowave-compactindex.txt[] [[index-list]] === List Indices include::manpages/index/geowave-listindex.txt[] [[index-rm]] === Remove Index include::manpages/index/geowave-rmindex.txt[] [[index-listindexplugins]] === List Index Plugins include::manpages/index/geowave-listindexplugins.txt[] ''' ================================================ FILE: docs/content/commands/013-type-commands.adoc ================================================ <<< == Type Commands Commands for managing GeoWave types. [[type-list]] === List Types include::manpages/type/geowave-listtypes.txt[] [[type-add]] === Add Type include::manpages/type/geowave-addtype.txt[] [[type-rm]] === Remove Type include::manpages/type/geowave-rmtype.txt[] [[type-describe]] === Describe Type include::manpages/type/geowave-describetype.txt[] ''' ================================================ FILE: docs/content/commands/014-stat-commands.adoc ================================================ <<< == Statistics Commands Commands to manage GeoWave statistics. [[stat-list]] === List Stats include::manpages/stat/geowave-liststats.txt[] [[stat-listtypes]] === List Stat Types include::manpages/stat/geowave-liststattypes.txt[] [[stat-calc]] === Add Stat include::manpages/stat/geowave-addstat.txt[] [[stat-rm]] === Remove Stat include::manpages/stat/geowave-rmstat.txt[] [[stat-recalc]] === Recalculate Stats include::manpages/stat/geowave-recalcstats.txt[] [[stat-compact]] === Compact Stats include::manpages/stat/geowave-compactstats.txt[] ''' ================================================ FILE: docs/content/commands/015-ingest-commands.adoc ================================================ <<< == Ingest Commands Commands that ingest data directly into GeoWave or stage data to be ingested into GeoWave. [[ingest-localToGW]] === Ingest Local to GeoWave include::manpages/ingest/geowave-localToGW.txt[] [[ingest-kafkaToGW]] === Ingest Kafka to GeoWave include::manpages/ingest/geowave-kafkaToGW.txt[] [[ingest-localToHdfs]] === Stage Local to HDFS include::manpages/ingest/geowave-localToHdfs.txt[] [[ingest-localToKafka]] === Stage Local to Kafka include::manpages/ingest/geowave-localToKafka.txt[] [[ingest-localToMrGW]] === Ingest Local to GeoWave with MapReduce include::manpages/ingest/geowave-localToMrGW.txt[] [[ingest-mrToGW]] === Ingest MapReduce to GeoWave include::manpages/ingest/geowave-mrToGW.txt[] [[ingest-sparkToGW]] === Ingest Spark to GeoWave include::manpages/ingest/geowave-sparkToGW.txt[] [[ingest-listplugins]] === List Ingest Plugins include::manpages/ingest/geowave-listplugins.txt[] ''' ================================================ FILE: docs/content/commands/017-query-command.adoc ================================================ <<< == Query Commands Commands related to querying data. [[query]] === Query include::manpages/query/geowave-query.txt[] ''' ================================================ FILE: docs/content/commands/020-analytic-commands.adoc ================================================ <<< == Analytic Commands Commands that run MapReduce or Spark processing to enhance an existing GeoWave dataset. [NOTE] ==== The commands below can also be run as a Yarn or Hadoop API command (i.e. mapreduce). For instance, to run the analytic using Yarn: [source] ---- yarn jar geowave-tools.jar analytic ---- ==== [[analytic-dbscan]] === Density-Based Scan include::manpages/analytic/geowave-dbscan.txt[] [[analytic-kde]] === Kernel Density Estimate include::manpages/analytic/geowave-kde.txt[] [[analytic-kdespark]] === Kernel Density Estimate on Spark include::manpages/analytic/geowave-kdespark.txt[] [[analytic-kmeansjump]] === K-means Jump include::manpages/analytic/geowave-kmeansjump.txt[] [[analytic-kmeansparallel]] === K-means Parallel include::manpages/analytic/geowave-kmeansparallel.txt[] [[analytic-kmeansspark]] === K-means on Spark include::manpages/analytic/geowave-kmeansspark.txt[] [[analytic-nn]] === Nearest Neighbor include::manpages/analytic/geowave-nn.txt[] [[analytic-sql]] === Spark SQL include::manpages/analytic/geowave-sql.txt[] [[analytic-spatialjoin]] === Spark Spatial Join include::manpages/analytic/geowave-spatialjoin.txt[] ''' ================================================ FILE: docs/content/commands/025-vector-commands.adoc ================================================ <<< == Vector Commands Commands that operate on vector data. [[vector-cqldelete]] === CQL Delete include::manpages/vector/geowave-cqldelete.txt[] [[vector-localexport]] === Local Export include::manpages/vector/geowave-localexport.txt[] [[vector-mrexport]] === MapReduce Export include::manpages/vector/geowave-mrexport.txt[] ''' ================================================ FILE: docs/content/commands/030-raster-commands.adoc ================================================ <<< == Raster Commands Commands that operate on raster data. [[raster-resizemr]] === Resize with MapReduce include::manpages/raster/geowave-resizemr.txt[] [[raster-resizespark]] === Resize with Spark include::manpages/raster/geowave-resizespark.txt[] [[raster-installgdal]] === Install GDAL include::manpages/raster/geowave-installgdal.txt[] ''' ================================================ FILE: docs/content/commands/035-geoserver-commands.adoc ================================================ <<< == GeoServer Commands Commands that manage GeoServer stores and layers. [[geoserver-run]] === Run GeoServer include::manpages/geoserver/geowave-rungs.txt[] [[gs-store-commands]] === *Store Commands* [[gs-ds-add]] === Add Store include::manpages/geoserver/geowave-addds.txt[] [[gs-ds-get]] === Get Store include::manpages/geoserver/geowave-getds.txt[] [[gs-ds-getsa]] === Get Store Adapters include::manpages/geoserver/geowave-getsa.txt[] [[gs-ds-list]] === List Stores include::manpages/geoserver/geowave-listds.txt[] [[gs-ds-rm]] === Remove Store include::manpages/geoserver/geowave-rmds.txt[] [[gs-coverage-store-commands]] === *Coverage Store Commands* [[gs-cs-add]] === Add Coverage Store include::manpages/geoserver/geowave-addcs.txt[] [[gs-cs-get]] === Get Coverage Store include::manpages/geoserver/geowave-getcs.txt[] [[gs-cs-list]] === List Coverage Stores include::manpages/geoserver/geowave-listcs.txt[] [[gs-cs-rm]] === Remove Coverage Store include::manpages/geoserver/geowave-rmcs.txt[] [[gs-coverage-commands]] === *Coverage Commands* [[gs-cv-add]] === Add Coverage include::manpages/geoserver/geowave-addcv.txt[] [[gs-cv-get]] === Get Coverage include::manpages/geoserver/geowave-getcv.txt[] [[gs-cv-list]] === List Coverages include::manpages/geoserver/geowave-listcv.txt[] [[gs-cv-rm]] === Remove Coverage include::manpages/geoserver/geowave-rmcv.txt[] [[gs-layer-commands]] === *Layer Commands* [[gs-layer-add]] === Add GeoWave Layer include::manpages/geoserver/geowave-addlayer.txt[] [[gs-fl-add]] === Add Feature Layer include::manpages/geoserver/geowave-addfl.txt[] [[gs-fl-get]] === Get Feature Layer include::manpages/geoserver/geowave-getfl.txt[] [[gs-fl-list]] === List Feature Layers include::manpages/geoserver/geowave-listfl.txt[] [[gs-fl-rm]] === Remove Feature Layer include::manpages/geoserver/geowave-rmfl.txt[] [[gs-style-commands]] === *Style Commands* [[gs-style-add]] === Add Style include::manpages/geoserver/geowave-addstyle.txt[] [[gs-style-get]] === Get Style include::manpages/geoserver/geowave-getstyle.txt[] [[gs-style-list]] === List Styles include::manpages/geoserver/geowave-liststyles.txt[] [[gs-style-rm]] === Remove Style include::manpages/geoserver/geowave-rmstyle.txt[] [[gs-style-set]] === Set Layer Style include::manpages/geoserver/geowave-setls.txt[] [[gs-workspace-commands]] === *Workspace Commands* [[gs-ws-add]] === Add Workspace include::manpages/geoserver/geowave-addws.txt[] [[gs-ws-list]] === List Workspaces include::manpages/geoserver/geowave-listws.txt[] [[gs-ws-rm]] === Remove Workspace include::manpages/geoserver/geowave-rmws.txt[] ''' ================================================ FILE: docs/content/commands/040-util-commands.adoc ================================================ <<< == Utility Commands Miscellaneous operations that don't really warrant their own top-level command. This includes commands to start standalone data stores and services. ================================================ FILE: docs/content/commands/041-util-migrate.adoc ================================================ <<< [[util-migrate]] === Migration Command include::manpages/util/migrate/geowave-util-migrate.txt[] ''' ================================================ FILE: docs/content/commands/045-util-standalone-commands.adoc ================================================ <<< [[standalone-commands]] === *Standalone Store Commands* Commands that stand up standalone stores for testing and debug purposes. [[accumulo-runserver]] === Run Standalone Accumulo include::manpages/util/accumulo/geowave-runserver.txt[] [[bigtable-runserver]] === Run Standalone Bigtable include::manpages/util/bigtable/geowave-runbigtable.txt[] [[cassandra-runserver]] === Run Standalone Cassandra include::manpages/util/cassandra/geowave-runcassandra.txt[] [[dynamodb-runserver]] === Run Standalone DynamoDB include::manpages/util/dynamodb/geowave-rundynamodb.txt[] [[hbase-runserver]] === Run Standalone HBase include::manpages/util/hbase/geowave-runhbase.txt[] [[kudu-runserver]] === Run Standalone Kudu include::manpages/util/kudu/geowave-runkudu.txt[] [[redis-runserver]] === Run Standalone Redis include::manpages/util/redis/geowave-runredis.txt[] ''' ================================================ FILE: docs/content/commands/050-util-accumulo-commands.adoc ================================================ <<< [[accumulo-commands]] === *Accumulo Commands* Utility operations to set Accumulo splits and run a test server. [[accumulo-runserver]] === Run Standalone include::manpages/util/accumulo/geowave-runserver.txt[] [[accumulo-presplitpartitionid]] === Pre-split Partition IDs include::manpages/util/accumulo/geowave-presplitpartitionid.txt[] [[accumulo-splitequalinterval]] === Split Equal Interval include::manpages/util/accumulo/geowave-splitequalinterval.txt[] [[accumulo-splitnumrecords]] === Split by Number of Records include::manpages/util/accumulo/geowave-splitnumrecords.txt[] [[accumulo-splitquantile]] === Split Quantile Distribution include::manpages/util/accumulo/geowave-splitquantile.txt[] ''' ================================================ FILE: docs/content/commands/050-util-osm-commands.adoc ================================================ <<< [[osm-commands]] === *OSM Commands* Operations to ingest Open Street Map (OSM) nodes, ways and relations to GeoWave. IMPORTANT: OSM commands are not included in GeoWave by default. [[osm-ingest]] === Import OSM include::manpages/util/osm/geowave-ingest.txt[] [[osm-stage]] === Stage OSM include::manpages/util/osm/geowave-stage.txt[] ''' ================================================ FILE: docs/content/commands/055-util-python-commands.adoc ================================================ <<< [[python-commands]] === *Python Commands* Commands for use with the GeoWave Python bindings. [[python-rungateway]] === Run Py4J Java Gateway include::manpages/util/python/geowave-python-rungateway.txt[] ''' ================================================ FILE: docs/content/commands/060-util-landsat-commands.adoc ================================================ <<< [[landsat-commands]] === *Landsat8 Commands* Operations to analyze, download, and ingest Landsat 8 imagery publicly available on AWS. [[landsat-analyze]] === Analyze Landsat 8 include::manpages/util/landsat/geowave-analyze.txt[] [[landsat-download]] === Download Landsat 8 include::manpages/util/landsat/geowave-download.txt[] [[landsat-ingest]] === Ingest Landsat 8 include::manpages/util/landsat/geowave-ingest.txt[] [[landsat-ingestraster]] === Ingest Landsat 8 Raster include::manpages/util/landsat/geowave-ingestraster.txt[] [[landsat-ingestvector]] === Ingest Landsat 8 Metadata include::manpages/util/landsat/geowave-ingestvector.txt[] ''' ================================================ FILE: docs/content/commands/065-util-grpc-commands.adoc ================================================ <<< [[grpc-commands]] === *gRPC Commands* Commands for working with the gRPC service. [[grpc-start]] === Start gRPC Server include::manpages/util/grpc/geowave-grpc-start.txt[] [[grpc-stop]] === Stop gRPC Server include::manpages/util/grpc/geowave-grpc-stop.txt[] ''' ================================================ FILE: docs/content/commands/070-util-filesystem-commands.adoc ================================================ <<< [[filesystem-commands]] === *Python Commands* FileSystem datastore commands [[filesystem-listformats]] === List Available FileSystem Data Formats include::manpages/util/filesystem/geowave-filesystem-listformats.txt[] ''' ================================================ FILE: docs/content/commands/manpages/analytic/geowave-dbscan.txt ================================================ //:= geowave-analytic-dbscan(1) :doctype: manpage [[analytic-dbscan-name]] ==== NAME geowave-analytic-dbscan - Density-Based Scanner [[analytic-dbscan-synopsis]] ==== SYNOPSIS geowave analytic dbscan [options] [[analytic-dbscan-description]] ==== DESCRIPTION This command runs a density based scanner analytic on GeoWave data. [[analytic-dbscan-options]] ==== OPTIONS *-conf, --mapReduceConfigFile* __:: MapReduce configuration file. *$$*$$ -hdfsbase, --mapReduceHdfsBaseDir* __:: Fully qualified path to the base directory in HDFS. *$$*$$ -jobtracker, --mapReduceJobtrackerHostPort* __:: [REQUIRED (or `-resourceman`)] Hadoop job tracker hostname and port in the format `hostname:port`. *$$*$$ -resourceman, --mapReduceYarnResourceManager* __:: [REQUIRED (or `-jobtracker`)] Yarn resource manager hostname and port in the format `hostname:port`. *-hdfs, --mapReduceHdfsHostPort* __:: HDFS hostname and port in the format `hostname:port`. *--cdf, --commonDistanceFunctionClass* __:: Distance function class that implements `org.locationtech.geowave.analytics.distance.DistanceFn`. *$$*$$ --query.typeNames* __:: The comma-separated list of types to query; by default all types are used. *--query.auth* __:: The comma-separated list of authorizations used during extract; by default all authorizations are used. *--query.index* __:: The specific index to query; by default one is chosen for each adapter. *$$*$$ -emx, --extractMaxInputSplit* __:: Maximum HDFS input split size. *$$*$$ -emn, --extractMinInputSplit* __:: Minimum HDFS input split size. *-eq, --extractQuery* __:: Query *-ofc, --outputOutputFormat* __:: Output format class. *-ifc, --inputFormatClass* __:: Input format class. *-orc, --outputReducerCount* __:: Number of reducers For output. *$$*$$ -cmi, --clusteringMaxIterations* __:: Maximum number of iterations when finding optimal clusters. *$$*$$ -cms, --clusteringMinimumSize* __:: Minimum cluster size. *$$*$$ -pmd, --partitionMaxDistance* __:: Maximum partition distance. *-b, --globalBatchId* __:: Batch ID. *-hdt, --hullDataTypeId* __:: Data Type ID for a centroid item. *-hpe, --hullProjectionClass* __:: Class to project on to 2D space. Implements `org.locationtech.geowave.analytics.tools.Projection`. *-ons, --outputDataNamespaceUri* __:: Output namespace for objects that will be written to GeoWave. *-odt, --outputDataTypeId* __:: Output Data ID assigned to objects that will be written to GeoWave. *-oop, --outputHdfsOutputPath* __:: Output HDFS file path. *-oid, --outputIndexId* __:: Output index for objects that will be written to GeoWave. *-pdt, --partitionDistanceThresholds* __:: Comma separated list of distance thresholds, per dimension. *-pdu, --partitionGeometricDistanceUnit* __:: Geometric distance unit (m=meters,km=kilometers, see symbols for javax.units.BaseUnit). *-pms, --partitionMaxMemberSelection* __:: Maximum number of members selected from a partition. *-pdr, --partitionPartitionDecreaseRate* __:: Rate of decrease for precision(within (0,1]). *-pp, --partitionPartitionPrecision* __:: Partition precision. *-pc, --partitionPartitionerClass* __:: Index identifier for centroids. *-psp, --partitionSecondaryPartitionerClass* __:: Perform secondary partitioning with the provided class. [[analytic-dbscan-examples]] ==== EXAMPLES Run through 5 max iterations (`-cmi`), with max distance between points as 10 meters (`-cms`), min HDFS input split is 2 (`-emn`), max HDFS input split is 6 (`-emx`), max search distance is 1000 meters (`-pmd`), reducer count is 4 (`-orc`), the HDFS IPC port is `localhost:53000` (`-hdfs`), the yarn job tracker is at `localhost:8032` (`-jobtracker`), the temporary files needed by this job are stored in `hdfs:/host:port//user/rwgdrummer` (`-hdfsbase`), the data type used is `gpxpoint` (`-query.typeNames`), and the data store connection parameters are loaded from `my_store`. geowave analytic dbscan -cmi 5 -cms 10 -emn 2 -emx 6 -pmd 1000 -orc 4 -hdfs localhost:53000 -jobtracker localhost:8032 -hdfsbase /user/rwgdrummer --query.typeNames gpxpoint my_store [[analytic-dbscan-execution]] ==== EXECUTION DBSCAN uses GeoWaveInputFormat to load data from GeoWave into HDFS. You can use the extract query parameter to limit the records used in the analytic. It iteratively calls Nearest Neighbor to execute a sequence of concave hulls. The hulls are saved into sequence files written to a temporary HDFS directory, and then read in again for the next DBSCAN iteration. After completion, the data is written back from HDFS to Accumulo using a job called the "input load runner". ================================================ FILE: docs/content/commands/manpages/analytic/geowave-kde.txt ================================================ //:= geowave-analytic-kde(1) :doctype: manpage [[analytic-kde-name]] ==== NAME geowave-analytic-kde - Kernel Density Estimate [[analytic-kde-synopsis]] ==== SYNOPSIS geowave analytic kde [options] [[analytic-kde-description]] ==== DESCRIPTION This command runs a Kernel Density Estimate analytic on GeoWave data. [[analytic-kde-options]] ==== OPTIONS *$$*$$ --coverageName* __:: The output coverage name. *$$*$$ --featureType* __:: The name of the feature type to run a KDE on. *$$*$$ --minLevel* __:: The minimum zoom level to run a KDE at. *$$*$$ --maxLevel* __:: The maximum zoom level to run a KDE at. *--minSplits* __:: The minimum partitions for the input data. *--maxSplits* __:: The maximum partitions for the input data. *--tileSize* __:: The size of output tiles. *--cqlFilter* __:: An optional CQL filter applied to the input data. *--indexName* __:: An optional index to filter the input data. *--outputIndex* __:: An optional index for output data store. Only spatial index type is supported. *--hdfsHostPort* __:: The HDFS host and port. *$$*$$ --jobSubmissionHostPort* __:: The job submission tracker host and port in the format `hostname:port`. [[analytic-kde-examples]] ==== EXAMPLES Perform a Kernel Density Estimation using a local resource manager at port 8032 on the `gdeltevent` type. The KDE should be run at zoom levels 5-26 and that the new raster generated should be under the type name `gdeltevent_kde`. Finally, the input and output data store is called `gdelt`. geowave analytic kde --featureType gdeltevent --jobSubmissionHostPort localhost:8032 --minLevel 5 --maxLevel 26 --coverageName gdeltevent_kde gdelt gdelt ================================================ FILE: docs/content/commands/manpages/analytic/geowave-kdespark.txt ================================================ //:= geowave-analytic-kdespark(1) :doctype: manpage [[analytic-kdespark-name]] ==== NAME geowave-analytic-kdespark - Kernel Density Estimate using Spark [[analytic-kdespark-synopsis]] ==== SYNOPSIS geowave analytic kdespark [options] [[analytic-kdespark-description]] ==== DESCRIPTION This command runs a Kernel Density Estimate analytic on GeoWave data using Apache Spark. [[analytic-kdespark-options]] ==== OPTIONS *$$*$$ --coverageName* __:: The output coverage name. *$$*$$ --featureType* __:: The name of the feature type to run a KDE on. *$$*$$ --minLevel* __:: The minimum zoom level to run a KDE at. *$$*$$ --maxLevel* __:: The maximum zoom level to run a KDE at. *--minSplits* __:: The minimum partitions for the input data. *--maxSplits* __:: The maximum partitions for the input data. *--tileSize* __:: The size of output tiles. *--cqlFilter* __:: An optional CQL filter applied to the input data. *--indexName* __:: An optional index name to filter the input data. *--outputIndex* __:: An optional index for output data store. Only spatial index type is supported. *-n, --name* __:: The Spark application name. *-ho, --host* __:: The Spark driver host. *-m, --master* __:: The Spark master designation. [[analytic-kdespark-examples]] ==== EXAMPLES Perform a Kernel Density Estimation using a local spark cluster on the `gdeltevent` type. The KDE should be run at zoom levels 5-26 and that the new raster generated should be under the type name `gdeltevent_kde`. Finally, the input and output data store is called `gdelt`. geowave analytic kdespark --featureType gdeltevent -m local --minLevel 5 --maxLevel 26 --coverageName gdeltevent_kde gdelt gdelt ================================================ FILE: docs/content/commands/manpages/analytic/geowave-kmeansjump.txt ================================================ //:= geowave-analytic-kmeansjump(1) :doctype: manpage [[analytic-kmeansjump-name]] ==== NAME geowave-analytic-kmeansjump - KMeans Clustering using Jump Method [[analytic-kmeansjump-synopsis]] ==== SYNOPSIS geowave analytic kmeansjump [options] [[analytic-kmeansjump-description]] ==== DESCRIPTION This command executes a KMeans Clustering analytic using a Jump Method. [[analytic-kmeansjump-options]] ==== OPTIONS *-conf, --mapReduceConfigFile* __:: MapReduce configuration file. *$$*$$ -hdfsbase, --mapReduceHdfsBaseDir* __:: Fully qualified path to the base directory in HDFS. *$$*$$ -jobtracker, --mapReduceJobtrackerHostPort* __:: [REQUIRED (or `-resourceman`)] Hadoop job tracker hostname and port in the format `hostname:port`. *$$*$$ -resourceman, --mapReduceYarnResourceManager* __:: [REQUIRED (or `-jobtracker`)] Yarn resource manager hostname and port in the format `hostname:port`. *-hdfs, --mapReduceHdfsHostPort* __:: HDFS hostname and port in the format `hostname:port`. *--cdf, --commonDistanceFunctionClass* __:: Distance function class that implements `org.locationtech.geowave.analytics.distance.DistanceFn`. *$$*$$ --query.typeNames* __:: The comma-separated list of types to query; by default all types are used. *--query.auth* __:: The comma-separated list of authorizations used during extract; by default all authorizations are used. *--query.index* __:: The specific index to query; by default one is chosen for each adapter. *$$*$$ -emx, --extractMaxInputSplit* __:: Maximum HDFS input split size. *$$*$$ -emn, --extractMinInputSplit* __:: Minimum HDFS input split size. *-eq, --extractQuery* __:: Query *-ofc, --outputOutputFormat* __:: Output format class. *-ifc, --inputFormatClass* __:: Input format class. *-orc, --outputReducerCount* __:: Number of reducers For output. *-cce, --centroidExtractorClass* __:: Centroid exractor class that implements `org.locationtech.geowave.analytics.extract.CentroidExtractor`. *-cid, --centroidIndexId* __:: Index to use for centroids. *-cfc, --centroidWrapperFactoryClass* __:: A factory class that implements `org.locationtech.geowave.analytics.tools.AnalyticItemWrapperFactory`. *-czl, --centroidZoomLevel* __:: Zoom level for centroids. *-cct, --clusteringConverganceTolerance* __:: Convergence tolerance. *$$*$$ -cmi, --clusteringMaxIterations* __:: Maximum number of iterations when finding optimal clusters. *-crc, --clusteringMaxReducerCount* __:: Maximum clustering reducer count. *$$*$$ -zl, --clusteringZoomLevels* __:: Number of zoom levels to process. *-dde, --commonDimensionExtractClass* __:: Dimension extractor class that implements `org.locationtech.geowave.analytics.extract.DimensionExtractor`. *-ens, --extractDataNamespaceUri* __:: Output data namespace URI. *-ede, --extractDimensionExtractClass* __:: Class to extract dimensions into a simple feature output. *-eot, --extractOutputDataTypeId* __:: Output data type ID. *-erc, --extractReducerCount* __:: Number of reducers For initial data extraction and de-duplication. *-b, --globalBatchId* __:: Batch ID. *-pb, --globalParentBatchId* __:: Parent Batch ID. *-hns, --hullDataNamespaceUri* __:: Data type namespace for a centroid item. *-hdt, --hullDataTypeId* __:: Data type ID for a centroid item. *-hid, --hullIndexId* __:: Index to use for centroids. *-hpe, --hullProjectionClass* __:: Class to project on to 2D space. Implements `org.locationtech.geowave.analytics.tools.Projection`. *-hrc, --hullReducerCount* __:: Centroid reducer count. *-hfc, --hullWrapperFactoryClass* __:: Class to create analytic item to capture hulls. Implements `org.locationtech.geowave.analytics.tools.AnalyticItemWrapperFactory`. *$$*$$ -jkp, --jumpKplusplusMin* __:: The minimum K when K-means parallel takes over sampling. *$$*$$ -jrc, --jumpRangeOfCentroids* __:: Comma-separated range of centroids (e.g. 2,100). [[analytic-kmeansjump-examples]] ==== EXAMPLES The minimum clustering iterations is 15 (`-cmi`), the zoom level is 1 (`-zl`), the maximum HDFS input split is 4000 (`-emx`), the minimum HDFS input split is 100 (`-emn`), the temporary files needed by this job are stored in `hdfs:/host:port/user/rwgdrummer/temp_dir_kmeans` (`-hdfsbase`), the HDFS IPC port is `localhost:53000` (`-hdfs`), the yarn job tracker is at `localhost:8032` (`-jobtracker`), the type used is 'hail' (`query.typeNames`), the minimum K for K-means parallel sampling is 3 (`-jkp`), the comma separated range of centroids is 4,8 (`-jrc`), and the data store parameters are loaded from `my_store`. geowave analytic kmeansjump -cmi 15 -zl 1 -emx 4000 -emn 100 -hdfsbase /usr/rwgdrummer/temp_dir_kmeans -hdfs localhost:53000 -jobtracker localhost:8032 --query.typeNames hail -jkp 3 -jrc 4,8 my_store [[analytic-kmeansjump-execution]] ==== EXECUTION KMeansJump uses most of the same parameters from KMeansParallel. It tries every K value given (-jrc) to find the value with least entropy. The other value, `jkp`, will specify which K values should use K-means parallel for sampling versus a single sampler (which uses a random sample). For instance, if you specify 4,8 for `jrc` and 6 for `jkp`, then K=4,5 will use the K-means parallel sampler, while 6,7,8 will use the single sampler. KMeansJump executes by executing several iterations, running the sampler (described above, which also calls the normal K-means algorithm to determine centroids) and then executing a K-means distortion job, which calculates the entropy of the calculated centroids. Look at the `EXECUTION` documentation for the `kmeansparallel` command for discussion of output, tolerance, and performance variables. ================================================ FILE: docs/content/commands/manpages/analytic/geowave-kmeansparallel.txt ================================================ //:= geowave-analytic-kmeansparallel(1) :doctype: manpage [[analytic-kmeansparallel-name]] ==== NAME geowave-analytic-kmeansparallel - K-means Parallel Clustering [[analytic-kmeansparallel-synopsis]] ==== SYNOPSIS geowave analytic kmeansparallel [options] [[analytic-kmeansparallel-description]] ==== DESCRIPTION This command executes a K-means Parallel Clustering analytic. [[analytic-kmeansparallel-options]] ==== OPTIONS *-conf, --mapReduceConfigFile* __:: MapReduce configuration file. *$$*$$ -hdfsbase, --mapReduceHdfsBaseDir* __:: Fully qualified path to the base directory in HDFS. *$$*$$ -jobtracker, --mapReduceJobtrackerHostPort* __:: [REQUIRED (or `-resourceman`)] Hadoop job tracker hostname and port in the format `hostname:port`. *$$*$$ -resourceman, --mapReduceYarnResourceManager* __:: [REQUIRED (or `-jobtracker`)] Yarn resource manager hostname and port in the format `hostname:port`. *-hdfs, --mapReduceHdfsHostPort* __:: HDFS hostname and port in the format `hostname:port`. *--cdf, --commonDistanceFunctionClass* __:: Distance function class that implements `org.locationtech.geowave.analytics.distance.DistanceFn`. *$$*$$ --query.typeNames* __:: The comma-separated list of types to query; by default all types are used. *--query.auth* __:: The comma-separated list of authorizations used during extract; by default all authorizations are used. *--query.index* __:: The specific index to query; by default one is chosen for each adapter. *$$*$$ -emx, --extractMaxInputSplit* __:: Maximum HDFS input split size. *$$*$$ -emn, --extractMinInputSplit* __:: Minimum HDFS input split size. *-eq, --extractQuery* __:: Query *-ofc, --outputOutputFormat* __:: Output format class. *-ifc, --inputFormatClass* __:: Input format class. *-orc, --outputReducerCount* __:: Number of reducers For output. *-cce, --centroidExtractorClass* __:: Centroid exractor class that implements `org.locationtech.geowave.analytics.extract.CentroidExtractor`. *-cid, --centroidIndexId* __:: Index to use for centroids. *-cfc, --centroidWrapperFactoryClass* __:: A factory class that implements `org.locationtech.geowave.analytics.tools.AnalyticItemWrapperFactory`. *-czl, --centroidZoomLevel* __:: Zoom level for centroids. *-cct, --clusteringConverganceTolerance* __:: Convergence tolerance. *$$*$$ -cmi, --clusteringMaxIterations* __:: Maximum number of iterations when finding optimal clusters. *-crc, --clusteringMaxReducerCount* __:: Maximum clustering reducer count. *$$*$$ -zl, --clusteringZoomLevels* __:: Number of zoom levels to process. *-dde, --commonDimensionExtractClass* __:: Dimension extractor class that implements `org.locationtech.geowave.analytics.extract.DimensionExtractor`. *-ens, --extractDataNamespaceUri* __:: Output data namespace URI. *-ede, --extractDimensionExtractClass* __:: Class to extract dimensions into a simple feature output. *-eot, --extractOutputDataTypeId* __:: Output data type ID. *-erc, --extractReducerCount* __:: Number of reducers For initial data extraction and de-duplication. *-b, --globalBatchId* __:: Batch ID. *-pb, --globalParentBatchId* __:: Parent Batch ID. *-hns, --hullDataNamespaceUri* __:: Data type namespace for a centroid item. *-hdt, --hullDataTypeId* __:: Data type ID for a centroid item. *-hid, --hullIndexId* __:: Index to use for centroids. *-hpe, --hullProjectionClass* __:: Class to project on to 2D space. Implements `org.locationtech.geowave.analytics.tools.Projection`. *-hrc, --hullReducerCount* __:: Centroid reducer count. *-hfc, --hullWrapperFactoryClass* __:: Class to create analytic item to capture hulls. Implements `org.locationtech.geowave.analytics.tools.AnalyticItemWrapperFactory`. *$$*$$ -sxs, --sampleMaxSampleSize* __:: Maximum sample size. *$$*$$ -sms, --sampleMinSampleSize* __:: Minimum sample size. *$$*$$ -ssi, --sampleSampleIterations* __:: Minimum number of sample iterations. [[analytic-kmeansparallel-examples]] ==== EXAMPLES The minimum clustering iterations is 15 (`-cmi`), the zoom level is 1 (`-zl`), the maximum HDFS input split is 4000 (`-emx`), the minimum HDFS input split is 100 (`-emn`), the temporary files needed by this job are stored in `hdfs:/host:port/user/rwgdrummer/temp_dir_kmeans` (`-hdfsbase`), the HDFS IPC port is `localhost:53000` (`-hdfs`), the Yarn job tracker is at `localhost:8032` (`-jobtracker`), the type used is 'hail' (`-query.typeNames`), the minimum sample size is 4 (`-sms`, which is kmin), the maximum sample size is 8 (`-sxs`, which is kmax), the minimum number of sampling iterations is 10 (`-ssi`), and the data store parameters are loaded from `my_store`. geowave analytic kmeansparallel -cmi 15 -zl 1 -emx 4000 -emn 100 -hdfsbase /usr/rwgdrummer/temp_dir_kmeans -hdfs localhost:53000 -jobtracker localhost:8032 --query.typeNames hail -sms 4 -sxs 8 -ssi 10 my_store [[analytic-kmeansparallel-execution]] ==== EXECUTION K-means parallel tries to identify the optimal K (between `-sms` and `-sxs`) for a set of zoom levels (1 -> `-zl`). When the zoom level is 1, it will perform a normal K-means and find K clusters. If the zoom level is 2 or higher, it will take each cluster found, and then try to create sub-clusters (bounded by that cluster), identifying a new optimal K for that sub-cluster. As such, without powerful infrastucture, this approach could take a significant amount of time to complete with zoom levels higher than 1. K-means parallel executes by first executing an extraction and de-duplication on data received via `GeoWaveInputFormat`. The data is copied to HDFS for faster processing. The K-sampler job is used to pick sample centroid points. These centroids are then assigned a cost, and then weak centroids are stripped before the K-sampler is executed again. This process iterates several times, before the best centroid locations are found, which are fed into the real K-means algorithm as initial guesses. K-means iterates until the tolerance is reached (`-cct`, which defaults to 0.0001) or the max iterations is met (`-cmi`). After execution, K-means parallel writes the centroids to an output data type (`-eot`, defaults to `centroid`), and then creates an informational set of convex hulls which you can plot in GeoServer to visually identify cluster groups (`-hdt`, defaults to `convex_hull`). For tuning performance, you can set the number of reducers used in each step. Extraction/dedupe reducer count is `-crc`, clustering reducer count is `-erc`, convex Hull reducer count is `-hrc`, and output reducer count is `-orc`). If you would like to run the algorithm multiple times, it may be useful to set the batch id (`-b`), which can be used to distinguish between multiple batches (runs). ================================================ FILE: docs/content/commands/manpages/analytic/geowave-kmeansspark.txt ================================================ //:= geowave-analytic-kmeansspark(1) :doctype: manpage [[analytic-kmeansspark-name]] ==== NAME geowave-analytic-kmeansspark - K-means Clustering via Spark ML [[analytic-kmeansspark-synopsis]] ==== SYNOPSIS geowave analytic kmeansspark [options] [[analytic-kmeansspark-description]] ==== DESCRIPTION This command executes a K-means clustering analytic via Spark ML. [[analytic-kmeansspark-options]] ==== OPTIONS *-ct, --centroidType* __:: Feature type name for centroid output. Default is `kmeans-centroids`. *-ch, --computeHullData*:: If specified, hull count, area, and density will be computed. *--cqlFilter* __:: An optional CQL filter applied to the input data. *-e, --epsilon* __:: The convergence tolerance. *-f, --featureType* __:: Feature type name to query. *-ht, --hullType* __:: Feature type name for hull output. Default is `kmeans-hulls`. *-h, --hulls*:: If specified, convex hulls will be generated. *-ho, --host* __:: The spark driver host. Default is `localhost`. *-m, --master* __:: The spark master designation. Default is `yarn`. *--maxSplits* __:: The maximum partitions for the input data. *--minSplits* __:: The minimum partitions for the input data. *-n, --name* __:: The Spark application name. Default is `KMeans Spark`. *-k, --numClusters* __:: The number of clusters to generate. Default is 8. *-i, --numIterations* __:: The number of iterations to run. Default is 20. *-t, --useTime*:: If specified, the time field from the input data will be used. [[analytic-kmeansspark-examples]] ==== EXAMPLES Perform a K-means analytic on a local spark cluster on the `hail` type in the `my_store` data store and output the results to the same data store: geowave analytic kmeansspark -m local -f hail my_store my_store ================================================ FILE: docs/content/commands/manpages/analytic/geowave-nn.txt ================================================ //:= geowave-analytic-nn(1) :doctype: manpage [[analytic-nn-name]] ==== NAME geowave-analytic-nn - Nearest Neighbors [[analytic-nn-synopsis]] ==== SYNOPSIS geowave analytic nn [options] [[analytic-nn-description]] ==== DESCRIPTION This command executes a Nearest Neighbors analytic. This is similar to DBScan, with less arguments. Nearest neighbor just dumps all near neighbors for every feature to a list of pairs. Most developers will want to extend the framework to add their own extensions. [[analytic-nn-options]] ==== OPTIONS *-conf, --mapReduceConfigFile* __:: MapReduce configuration file. *$$*$$ -hdfsbase, --mapReduceHdfsBaseDir* __:: Fully qualified path to the base directory in HDFS. *$$*$$ -jobtracker, --mapReduceJobtrackerHostPort* __:: [REQUIRED (or `-resourceman`)] Hadoop job tracker hostname and port in the format `hostname:port`. *$$*$$ -resourceman, --mapReduceYarnResourceManager* __:: [REQUIRED (or `-jobtracker`)] Yarn resource manager hostname and port in the format `hostname:port`. *-hdfs, --mapReduceHdfsHostPort* __:: HDFS hostname and port in the format `hostname:port`. *--cdf, --commonDistanceFunctionClass* __:: Distance function class that implements `org.locationtech.geowave.analytics.distance.DistanceFn`. *$$*$$ --query.typeNames* __:: The comma-separated list of types to query; by default all types are used. *--query.auth* __:: The comma-separated list of authorizations used during extract; by default all authorizations are used. *--query.index* __:: The specific index to query; by default one is chosen for each adapter. *$$*$$ -emx, --extractMaxInputSplit* __:: Maximum HDFS input split size. *$$*$$ -emn, --extractMinInputSplit* __:: Minimum HDFS input split size. *-eq, --extractQuery* __:: Query *-ofc, --outputOutputFormat* __:: Output format class. *-ifc, --inputFormatClass* __:: Input format class. *-orc, --outputReducerCount* __:: Number of reducers For output. *$$*$$ -oop, --outputHdfsOutputPath* __:: Output HDFS file path. *-pdt, --partitionDistanceThresholds* __:: Comma separated list of distance thresholds, per dimension. *-pdu, --partitionGeometricDistanceUnit* __:: Geometric distance unit (m=meters,km=kilometers, see symbols for javax.units.BaseUnit). *$$*$$ -pmd, --partitionMaxDistance* __:: Maximum partition distance. *-pms, --partitionMaxMemberSelection* __:: Maximum number of members selected from a partition. *-pp, --partitionPartitionPrecision* __:: Partition precision. *-pc, --partitionPartitionerClass* __:: Perform primary partitioning for centroids with the provided class. *-psp, --partitionSecondaryPartitionerClass* __:: Perform secondary partitioning for centroids with the provided class. [[analytic-nn-examples]] ==== EXAMPLES The minimum HDFS input split is 2 (`-emn`), maximum HDFS input split is 6 (`-emx`), maximum search distance is 1000 meters (`-pmd`), the sequence file output directory is `hdfs://host:port/user/rwgdrummer_out`, reducer count is 4 (`-orc`), the HDFS IPC port is `localhost:53000` (`-hdfs`), the Yarn job tracker is at `localhost:8032` (`-jobtracker`), the temporary files needed by this job are stored in `hdfs:/host:port//user/rwgdrummer` (`-hdfsbase`), the input type is `gpxpoint` (`-query.typeNames`), and the data store parameters are loaded from `my_store`. geowave analytic nn -emn 2 -emx 6 -pmd 1000 -oop /user/rwgdrummer_out -orc 4 -hdfs localhost:53000 -jobtracker localhost:8032 -hdfsbase /user/rwgdrummer --query.typeNames gpxpoint my_store [[analytic-nn-execution]] ==== EXECUTION To execute nearest neighbor search in GeoWave, we use the concept of a "partitioner" to partition all data on the hilbert curve into square segments for the purposes of parallelizing the search. The default partitioner will multiply this value by 2 and use that for the actual partition sizes. Because of this, the terminology is a bit confusing, but the `-pmd` option is actually the most important variable here, describing the max distance for a point to be considered a neighbor to another point. ================================================ FILE: docs/content/commands/manpages/analytic/geowave-spatialjoin.txt ================================================ //:= geowave-analytic-spatialjoin(1) :doctype: manpage [[analytic-spatialjoin-name]] ==== NAME geowave-analytic-spatialjoin - Spatial join using Spark [[analytic-spatialjoin-synopsis]] ==== SYNOPSIS geowave analytic spatialjoin [options] [[analytic-spatialjoin-description]] ==== DESCRIPTION This command executes a spatial join, taking two input types and outputting features from each side that match a given predicate. [[analytic-spatialjoin-options]] ==== OPTIONS *-n, --name* __:: The Spark application name. Default is `GeoWave Spark SQL`. *-ho, --host* __:: The Spark driver host. Default is `localhost`. *-m, --master* __:: The Spark master designation. Default is `yarn`. *-pc, --partCount* __:: The default partition count to set for Spark RDDs. Should be big enough to support the largest RDD that will be used. Sets `spark.default.parallelism`. *-lt, --leftTypeName* __:: Feature type name of left store to use in join. *-ol, --outLeftTypeName* __:: Feature type name of left join results. *-rt, --rightTypeName* __:: Feature type name of right store to use in join. *-or, --outRightTypeName* __:: Feature type name of right join results. *-p, --predicate* __:: Name of the UDF function to use when performing spatial join. Default is `GeomIntersects`. *-r, --radius* __:: Used for distance join predicate and other spatial operations that require a scalar radius. Default is 0.01. *-not, --negative*:: Used for testing a negative result from geometry predicate. i.e `GeomIntersects() == false`. [[analytic-spatialjoin-examples]] ==== EXAMPLES Using a local Spark cluster, join all features from a `hail` data type in the `my_store` store that intersect features from a `boundary` type in the `other_store` store and output the left results to `left` and `right` types in the `my_store` data store. geowave analytic spatialjoin -m local -lt hail -rt boundary -ol left -or right my_store other_store my_store ================================================ FILE: docs/content/commands/manpages/analytic/geowave-sql.txt ================================================ //:= geowave-analytic-sql(1) :doctype: manpage [[analytic-sql-name]] ==== NAME geowave-analytic-sql - SparkSQL queries [[analytic-sql-synopsis]] ==== SYNOPSIS geowave analytic sql [options] [[analytic-sql-description]] ==== DESCRIPTION This command executes a Spark SQL query against a given data store, e.g. `select * from [|] where `. An alternate way of querying vector data is by using the `vector query` command, which does not use Spark, but provides a more robust set of querying capabilities. [[analytic-sql-options]] ==== OPTIONS *-n, --name* __:: The Spark application name. Default is `GeoWave Spark SQL`. *-ho, --host* __:: The Spark driver host. Default is `localhost`. *-m, --master* __:: The Spark master designation. Default is `yarn`. *--csv* __:: The output CSV file name. *--out* __:: The output data store name. *--outtype* __:: The output type to output results to. *-s, --show* __:: Number of result rows to display. Default is 20. [[analytic-sql-examples]] ==== EXAMPLES Select all features from the `hail` type in the `my_store` data store using a local Spark cluster: geowave analytic sql -m local "select * from my_store|hail" ================================================ FILE: docs/content/commands/manpages/config/geowave-aws.txt ================================================ //:= geowave-config-aws(1) :doctype: manpage [[config-aws-name]] ==== NAME geowave-config-aws - configure GeoWave CLI for AWS S3 connections [[config-aws-synopsis]] ==== SYNOPSIS geowave config aws [[config-aws-description]] ==== DESCRIPTION This command creates a local configuration for AWS S3 connections that is used by commands that interface with S3. [[config-aws-examples]] ==== EXAMPLES Configure GeoWave to use an S3 bucket on `us-west-2` called `mybucket`: geowave config aws https://s3.us-west-2.amazonaws.com/mybucket ================================================ FILE: docs/content/commands/manpages/config/geowave-geoserver.txt ================================================ //:= geowave-config-geoserver(1) :doctype: manpage [[config-geoserver-name]] ==== NAME geowave-config-geoserver - configure GeoWave CLI to connect to a GeoServer instance [[config-geoserver-synopsis]] ==== SYNOPSIS geowave config geoserver [options] [[config-geoserver-description]] ==== DESCRIPTION This command creates a local configuration for connecting to GeoServer which is used by `geoserver` or `gs` commands. [[config-geoserver-options]] ==== OPTIONS *-p, --password* __:: GeoServer Password - Can be specified as 'pass:', 'file:', 'propfile::', 'env:', or stdin *-u, --username* __:: GeoServer User *-ws, --workspace* __:: GeoServer Default Workspace [[config-geoserver-ssl-options]] ==== SSL CONFIGURATION OPTIONS *--sslKeyManagerAlgorithm* __:: Specify the algorithm to use for the keystore. *--sslKeyManagerProvider* __:: Specify the key manager factory provider. *--sslKeyPassword* __:: Specify the password to be used to access the server certificate from the specified keystore file. Can be specified as `pass:`, `file:`, `propfile::`, `env:`, or `stdin`. *--sslKeyStorePassword* __:: Specify the password to use to access the keystore file. Can be specified as `pass:`, `file:`, `propfile::`, `env:`, or `stdin`. *--sslKeyStorePath* __:: Specify the absolute path to where the keystore file is located on system. The keystore contains the server certificate to be loaded. *--sslKeyStoreProvider* __:: Specify the name of the keystore provider to be used for the server certificate. *--sslKeyStoreType* __:: The type of keystore file to be used for the server certificate. *--sslSecurityProtocol* __:: Specify the Transport Layer Security (TLS) protocol to use when connecting to the server. By default, the system will use TLS. *--sslTrustManagerAlgorithm* __:: Specify the algorithm to use for the truststore. *--sslTrustManagerProvider* __:: Specify the trust manager factory provider. *--sslTrustStorePassword* __:: Specify the password to use to access the truststore file. Can be specified as `pass:`, `file:`, `propfile::`, `env:`, or `stdin`. *--sslTrustStorePath* __:: Specify the absolute path to where truststore file is located on system. The truststore file is used to validate client certificates. *--sslTrustStoreProvider* __:: Specify the name of the truststore provider to be used for the server certificate. *--sslTrustStoreType* __:: Specify the type of key store used for the truststore, i.e. JKS (Java KeyStore). [[config-geoserver-examples]] ==== EXAMPLES Configure GeoWave to use locally running GeoServer: geowave config geoserver "http://localhost:8080/geoserver" Configure GeoWave to use GeoServer running on another host: geowave config geoserver "${HOSTNAME}:8080" Configure GeoWave to use a particular workspace on a GeoServer instance: geowave config geoserver -ws myWorkspace "http://localhost:8080/geoserver" ================================================ FILE: docs/content/commands/manpages/config/geowave-hdfs.txt ================================================ //:= geowave-config-hdfs(1) :doctype: manpage [[config-hdfs-name]] ==== NAME geowave-config-hdfs - configure the GeoWave CLI to connect to HDFS [[config-hdfs-synopsis]] ==== SYNOPSIS geowave config hdfs [[config-hdfs-description]] ==== DESCRIPTION This command creates a local configuration for HDFS connections, which is used by commands that interface with HDFS. [[config-hdfs-examples]] ==== EXAMPLES Configure GeoWave to use locally running HDFS: geowave config hdfs localhost:8020 ================================================ FILE: docs/content/commands/manpages/config/geowave-list.txt ================================================ //:= geowave-config-list(1) :doctype: manpage [[config-list-name]] ==== NAME geowave-config-list - list all configured properties [[config-list-synopsis]] ==== SYNOPSIS geowave config list [options] [[config-list-description]] ==== DESCRIPTION This command will list all properties in the local configuration. This list can be filtered with a regular expression using the `-f` or `--filter` options. A useful regular expression might be a store name, to see all of the configured properties for a particular data store. [[config-list-options]] ==== OPTIONS *-f, --filter* __:: Filter list by a regular expression. [[config-list-examples]] ==== EXAMPLES List all configuration properties: geowave config list List all configuration properties on a data store called `example`: geowave config list -f example ================================================ FILE: docs/content/commands/manpages/config/geowave-newcryptokey.txt ================================================ //:= geowave-config-newcryptokey(1) :doctype: manpage [[config-newcryptokey-name]] ==== NAME geowave-config-newcryptokey - generate a new security cryptography key for use with configuration properties [[config-newcryptokey-synopsis]] ==== SYNOPSIS geowave config newcryptokey [[config-newcryptokey-description]] ==== DESCRIPTION This command will generate a new security cryptography key for use with configuration properties. This is primarily used if there is a need to re-encrypt the local configurations based on a new security token, should the old one have been compromised. [[config-newcryptokey-examples]] ==== EXAMPLES Generate a new cryptography key: geowave config newcryptokey ================================================ FILE: docs/content/commands/manpages/config/geowave-set.txt ================================================ //:= geowave-config-set(1) :doctype: manpage [[config-set-name]] ==== NAME geowave-config-set - sets a property in the local configuration [[config-set-synopsis]] ==== SYNOPSIS geowave config set [options] [[config-set-description]] ==== DESCRIPTION This command sets a property in the local configuration. This can be used to update a particular configured property of a data store. [[config-set-options]] ==== OPTIONS *--password*:: Specify that the value being set is a password and should be encrypted in the configuration. [[config-set-examples]] ==== EXAMPLES Update the batch write size of a RocksDB data store named `example`: geowave config set store.example.opts.batchWriteSize 1000 Update the password for an Accumulo data store named `example`: geowave config set --password store.example.opts.password someNewPassword ================================================ FILE: docs/content/commands/manpages/geoserver/geowave-addcs.txt ================================================ //:= geowave-gs-cs-add(1) :doctype: manpage [[gs-cs-add-name]] ==== NAME geowave-gs-cs-add - Add a coverage store to GeoServer [[gs-cs-add-synopsis]] ==== SYNOPSIS geowave gs cs add [options] geowave geoserver coveragestore add [options] [[gs-cs-add-description]] ==== DESCRIPTION This command adds a coverage store to the configured GeoServer instance. It requires that a GeoWave store has already been added. [[gs-cs-add-options]] ==== OPTIONS *-cs, --coverageStore* __:: The name of the coverage store to add. *-histo, --equalizeHistogramOverride*:: This parameter will override the behavior to always perform histogram equalization if a histogram exists. *-interp, --interpolationOverride* __:: This will override the default interpolation stored for each layer. Valid values are 0, 1, 2, 3 for NearestNeighbor, Bilinear, Bicubic, and Bicubic (polynomial variant) respectively. *-scale, --scaleTo8Bit*:: By default, integer values will automatically be scaled to 8-bit and floating point values will not. This can be overridden setting this option. *-ws, --workspace* __:: The GeoServer workspace to add the coverage store to. [[gs-cs-add-examples]] ==== EXAMPLES Add a coverage store called `cov_store` to GeoServer using the `my_store` GeoWave store: geowave gs cs add -cs cov_store my_store ================================================ FILE: docs/content/commands/manpages/geoserver/geowave-addcv.txt ================================================ //:= geowave-gs-cv-add(1) :doctype: manpage [[gs-cv-add-name]] ==== NAME geowave-gs-cv-add - Add a coverage to GeoServer [[gs-cv-add-synopsis]] ==== SYNOPSIS geowave gs cv add [options] geowave geoserver coverage add [options] [[gs-cv-add-description]] ==== DESCRIPTION This command adds a coverage to the configured GeoServer instance. [[gs-cv-add-options]] ==== OPTIONS *$$*$$ -cs, --cvgstore* __:: Coverage store name. *-ws, --workspace* __:: GeoServer workspace to add the coverage to. [[gs-cv-add-examples]] ==== EXAMPLES Add a coverage called `cov` to the `cov_store` coverage store on the configured GeoServer instance: geowave gs cv add -cs cov_store cov ================================================ FILE: docs/content/commands/manpages/geoserver/geowave-addds.txt ================================================ //:= geowave-gs-ds-add(1) :doctype: manpage [[gs-cv-add-name]] ==== NAME geowave-gs-ds-add - Add a data store to GeoServer [[gs-cv-add-synopsis]] ==== SYNOPSIS geowave gs ds add [options] geowave geoserver datastore add [options] [[gs-cv-add-description]] ==== DESCRIPTION This command adds a GeoWave data store to GeoServer as a GeoWave store. [[gs-cv-add-options]] ==== OPTIONS *-ds, --datastore* __:: The name of the new GeoWave store to add to GeoServer. *-ws, --workspace* __:: The GeoServer workspace to use for the store. [[gs-cv-add-examples]] ==== EXAMPLES Add a GeoWave data store `example` as a GeoWave store in GeoServer called `my_store`: geowave gs ds add -ds my_store example ================================================ FILE: docs/content/commands/manpages/geoserver/geowave-addfl.txt ================================================ //:= geowave-gs-fl-add(1) :doctype: manpage [[gs-fl-add-name]] ==== NAME geowave-gs-fl-add - Add a feature layer to GeoServer [[gs-fl-add-synopsis]] ==== SYNOPSIS geowave gs fl add [options] geowave geoserver featurelayer add [options] [[gs-fl-add-description]] ==== DESCRIPTION This command adds a feature layer from a GeoWave store to the configured GeoServer instance. [[gs-fl-add-options]] ==== OPTIONS *$$*$$ -ds, --datastore* __:: The GeoWave store (on GeoServer) to add the layer from. *-ws, --workspace* __:: The GeoServer workspace to use. [[gs-fl-add-examples]] ==== EXAMPLES Add a layer called `hail` from the `my_store` GeoWave store: geowave gs fl add -ds my_store hail ================================================ FILE: docs/content/commands/manpages/geoserver/geowave-addlayer.txt ================================================ //:= geowave-gs-layer-add(1) :doctype: manpage [[gs-layer-add-name]] ==== NAME geowave-gs-layer-add - Add a GeoServer layer from the given GeoWave data store [[gs-layer-add-synopsis]] ==== SYNOPSIS geowave gs layer add [options] geowave geoserver layer add [options] [[gs-layer-add-description]] ==== DESCRIPTION This command adds a layer from the given GeoWave data store to the configured GeoServer instance. Unlike `gs fl add`, this command adds a layer directly from a GeoWave data store, automatically creating the GeoWave store for it in GeoServer. [[gs-layer-add-options]] ==== OPTIONS *-t, --typeName* __:: Add the type with the given name to GeoServer. *-a, --add* __:: Add all layers of the given type to GeoServer. Possible values are `ALL`, `RASTER`, and `VECTOR`. *-sld, --setStyle* _\n", "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
idlocationstate_namecapital_nameestablished
010POINT (-157.7989705 21.3280681)HawaiiHonolulu1845-01-01 00:00:00
12POINT (-112.125051 33.6054149)ArizonaPhoenix1889-01-01 00:00:00
217POINT (-91.11141859999999 30.441474)LouisianaBaton Rouge1880-01-01 00:00:00
323POINT (-90.1888874 32.3103284)MississippiJackson1821-01-01 00:00:00
442POINT (-97.7534014 30.3077609)TexasAustin1839-01-01 00:00:00
515POINT (-95.70803100000001 39.0130545)KansasTopeka1856-01-01 00:00:00
635POINT (-97.4791974 35.4826479)OklahomaOklahoma City1910-01-01 00:00:00
73POINT (-92.33792750000001 34.7240049)ArkansasLittle Rock1821-01-01 00:00:00
824POINT (-92.1624049 38.5711659)MissouriJefferson City1826-01-01 00:00:00
914POINT (-93.606516 41.5666699)IowaDes Moines1857-01-01 00:00:00
1022POINT (-93.10605339999999 44.9397075)MinnesotaSaint Paul1849-01-01 00:00:00
1126POINT (-96.6907283 40.800609)NebraskaLincoln1867-01-01 00:00:00
1240POINT (-100.3205385 44.3708241)South DakotaPierre1889-01-01 00:00:00
1349POINT (-104.7674045 41.1475325)WyomingCheyenne1869-01-01 00:00:00
145POINT (-104.8551114 39.7643389)ColoradoDenver1867-01-01 00:00:00
1530POINT (-105.983036 35.6824934)New MexicoSanta Fe1610-01-01 00:00:00
1643POINT (-111.920485 40.7766079)UtahSalt Lake City1858-01-01 00:00:00
1711POINT (-116.2338979 43.6008061)IdahoBoise1865-01-01 00:00:00
1836POINT (-123.0282074 44.9329915)OregonSalem1855-01-01 00:00:00
1927POINT (-119.7526546 39.1678334)NevadaCarson City1861-01-01 00:00:00
204POINT (-121.4429125 38.5615405)CaliforniaSacramento1854-01-01 00:00:00
211POINT (-134.1765792 58.3844634)AlaskaJuneau1906-01-01 00:00:00
2246POINT (-122.8938687 47.0393335)WashingtonOlympia1853-01-01 00:00:00
2325POINT (-112.0156939 46.5933579)MontanaHelena1875-01-01 00:00:00
2433POINT (-100.7670546 46.809076)North DakotaBismarck1883-01-01 00:00:00
2518POINT (-69.730692 44.3334319)MaineAugusta1832-01-01 00:00:00
2644POINT (-72.5687199 44.2739708)VermontMontpelier1805-01-01 00:00:00
2720POINT (-71.0571571 42.3133735)MassachusettsBoston1630-01-01 00:00:00
2828POINT (-71.5626055 43.2308015)New HampshireConcord1808-01-01 00:00:00
2938POINT (-71.42118050000001 41.8169925)Rhode IslandProvidence1900-01-01 00:00:00
306POINT (-72.680087 41.7656874)ConnecticutHartford1875-01-01 00:00:00
317POINT (-75.5134199 39.1564159)DelawareDover1777-01-01 00:00:00
3232POINT (-78.6450559 35.843768)North CarolinaRaleigh1792-01-01 00:00:00
3345POINT (-77.49326139999999 37.524661)VirginiaRichmond1780-01-01 00:00:00
3419POINT (-76.5046945 38.9724689)MarylandAnnapolis1694-01-01 00:00:00
3537POINT (-76.8804255 40.2821445)PennsylvaniaHarrisburg1812-01-01 00:00:00
3629POINT (-74.7741221 40.2162772)New JerseyTrenton1784-01-01 00:00:00
3731POINT (-73.8113997 42.6681399)New YorkAlbany1797-01-01 00:00:00
3834POINT (-82.99082900000001 39.9829515)OhioColumbus1816-01-01 00:00:00
3921POINT (-84.559032 42.7086815)MichiganLansing1847-01-01 00:00:00
4048POINT (-89.4064204 43.0849935)WisconsinMadison1838-01-01 00:00:00
4112POINT (-89.6708313 39.7638375)IllinoisSpringfield1837-01-01 00:00:00
4213POINT (-86.13275 39.7797845)IndianaIndianapolis1825-01-01 00:00:00
4316POINT (-84.8666254 38.1944455)KentuckyFrankfort1792-01-01 00:00:00
4441POINT (-86.7852455 36.1866405)TennesseeNashville1826-01-01 00:00:00
459POINT (-84.420604 33.7677129)GeorgiaAtlanta1868-01-01 00:00:00
4647POINT (-81.6405384 38.3560436)West VirginiaCharleston1885-01-01 00:00:00
4739POINT (-80.9375649 34.0375089)South CarolinaColumbia1786-01-01 00:00:00
488POINT (-84.25685590000001 30.4671395)FloridaTallahassee1824-01-01 00:00:00
490POINT (-86.2460375 32.343799)AlabamaMontgomery1846-01-01 00:00:00
\n", "" ], "text/plain": [ " id location state_name capital_name \\\n", "0 10 POINT (-157.7989705 21.3280681) Hawaii Honolulu \n", "1 2 POINT (-112.125051 33.6054149) Arizona Phoenix \n", "2 17 POINT (-91.11141859999999 30.441474) Louisiana Baton Rouge \n", "3 23 POINT (-90.1888874 32.3103284) Mississippi Jackson \n", "4 42 POINT (-97.7534014 30.3077609) Texas Austin \n", "5 15 POINT (-95.70803100000001 39.0130545) Kansas Topeka \n", "6 35 POINT (-97.4791974 35.4826479) Oklahoma Oklahoma City \n", "7 3 POINT (-92.33792750000001 34.7240049) Arkansas Little Rock \n", "8 24 POINT (-92.1624049 38.5711659) Missouri Jefferson City \n", "9 14 POINT (-93.606516 41.5666699) Iowa Des Moines \n", "10 22 POINT (-93.10605339999999 44.9397075) Minnesota Saint Paul \n", "11 26 POINT (-96.6907283 40.800609) Nebraska Lincoln \n", "12 40 POINT (-100.3205385 44.3708241) South Dakota Pierre \n", "13 49 POINT (-104.7674045 41.1475325) Wyoming Cheyenne \n", "14 5 POINT (-104.8551114 39.7643389) Colorado Denver \n", "15 30 POINT (-105.983036 35.6824934) New Mexico Santa Fe \n", "16 43 POINT (-111.920485 40.7766079) Utah Salt Lake City \n", "17 11 POINT (-116.2338979 43.6008061) Idaho Boise \n", "18 36 POINT (-123.0282074 44.9329915) Oregon Salem \n", "19 27 POINT (-119.7526546 39.1678334) Nevada Carson City \n", "20 4 POINT (-121.4429125 38.5615405) California Sacramento \n", "21 1 POINT (-134.1765792 58.3844634) Alaska Juneau \n", "22 46 POINT (-122.8938687 47.0393335) Washington Olympia \n", "23 25 POINT (-112.0156939 46.5933579) Montana Helena \n", "24 33 POINT (-100.7670546 46.809076) North Dakota Bismarck \n", "25 18 POINT (-69.730692 44.3334319) Maine Augusta \n", "26 44 POINT (-72.5687199 44.2739708) Vermont Montpelier \n", "27 20 POINT (-71.0571571 42.3133735) Massachusetts Boston \n", "28 28 POINT (-71.5626055 43.2308015) New Hampshire Concord \n", "29 38 POINT (-71.42118050000001 41.8169925) Rhode Island Providence \n", "30 6 POINT (-72.680087 41.7656874) Connecticut Hartford \n", "31 7 POINT (-75.5134199 39.1564159) Delaware Dover \n", "32 32 POINT (-78.6450559 35.843768) North Carolina Raleigh \n", "33 45 POINT (-77.49326139999999 37.524661) Virginia Richmond \n", "34 19 POINT (-76.5046945 38.9724689) Maryland Annapolis \n", "35 37 POINT (-76.8804255 40.2821445) Pennsylvania Harrisburg \n", "36 29 POINT (-74.7741221 40.2162772) New Jersey Trenton \n", "37 31 POINT (-73.8113997 42.6681399) New York Albany \n", "38 34 POINT (-82.99082900000001 39.9829515) Ohio Columbus \n", "39 21 POINT (-84.559032 42.7086815) Michigan Lansing \n", "40 48 POINT (-89.4064204 43.0849935) Wisconsin Madison \n", "41 12 POINT (-89.6708313 39.7638375) Illinois Springfield \n", "42 13 POINT (-86.13275 39.7797845) Indiana Indianapolis \n", "43 16 POINT (-84.8666254 38.1944455) Kentucky Frankfort \n", "44 41 POINT (-86.7852455 36.1866405) Tennessee Nashville \n", "45 9 POINT (-84.420604 33.7677129) Georgia Atlanta \n", "46 47 POINT (-81.6405384 38.3560436) West Virginia Charleston \n", "47 39 POINT (-80.9375649 34.0375089) South Carolina Columbia \n", "48 8 POINT (-84.25685590000001 30.4671395) Florida Tallahassee \n", "49 0 POINT (-86.2460375 32.343799) Alabama Montgomery \n", "\n", " established \n", "0 1845-01-01 00:00:00 \n", "1 1889-01-01 00:00:00 \n", "2 1880-01-01 00:00:00 \n", "3 1821-01-01 00:00:00 \n", "4 1839-01-01 00:00:00 \n", "5 1856-01-01 00:00:00 \n", "6 1910-01-01 00:00:00 \n", "7 1821-01-01 00:00:00 \n", "8 1826-01-01 00:00:00 \n", "9 1857-01-01 00:00:00 \n", "10 1849-01-01 00:00:00 \n", "11 1867-01-01 00:00:00 \n", "12 1889-01-01 00:00:00 \n", "13 1869-01-01 00:00:00 \n", "14 1867-01-01 00:00:00 \n", "15 1610-01-01 00:00:00 \n", "16 1858-01-01 00:00:00 \n", "17 1865-01-01 00:00:00 \n", "18 1855-01-01 00:00:00 \n", "19 1861-01-01 00:00:00 \n", "20 1854-01-01 00:00:00 \n", "21 1906-01-01 00:00:00 \n", "22 1853-01-01 00:00:00 \n", "23 1875-01-01 00:00:00 \n", "24 1883-01-01 00:00:00 \n", "25 1832-01-01 00:00:00 \n", "26 1805-01-01 00:00:00 \n", "27 1630-01-01 00:00:00 \n", "28 1808-01-01 00:00:00 \n", "29 1900-01-01 00:00:00 \n", "30 1875-01-01 00:00:00 \n", "31 1777-01-01 00:00:00 \n", "32 1792-01-01 00:00:00 \n", "33 1780-01-01 00:00:00 \n", "34 1694-01-01 00:00:00 \n", "35 1812-01-01 00:00:00 \n", "36 1784-01-01 00:00:00 \n", "37 1797-01-01 00:00:00 \n", "38 1816-01-01 00:00:00 \n", "39 1847-01-01 00:00:00 \n", "40 1838-01-01 00:00:00 \n", "41 1837-01-01 00:00:00 \n", "42 1825-01-01 00:00:00 \n", "43 1792-01-01 00:00:00 \n", "44 1826-01-01 00:00:00 \n", "45 1868-01-01 00:00:00 \n", "46 1885-01-01 00:00:00 \n", "47 1786-01-01 00:00:00 \n", "48 1824-01-01 00:00:00 \n", "49 1846-01-01 00:00:00 " ] }, "execution_count": 25, "metadata": {}, "output_type": "execute_result" } ], "source": [ "from pandas import DataFrame\n", "\n", "# Query everything\n", "query = VectorQueryBuilder().build()\n", "results = datastore.query(query)\n", "\n", "# Load the results into a pandas dataframe\n", "dataframe = DataFrame.from_records([feature.to_dict() for feature in results])\n", "\n", "# Display the dataframe\n", "dataframe" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.7.0" } }, "nbformat": 4, "nbformat_minor": 2 } ================================================ FILE: examples/data/notebooks/zeppelin/GDELT-Quick-Start.json ================================================ {"paragraphs":[{"text":"%md\n## Welcome to the GeoWave KMeans GDELT Example (EMR Version).\n##### This is a live note - you can run the code yourself.\n\n### Setup\n

\nThe only prerequisite to running this example is increasing your shell interpreter's timeout.
\nGo to the Interpreter page, and scroll down to the 'sh' section. Click on the 'edit' button.

\nSet the 'shell.command.timeout.millisecs' entry to 600000 (10 minutes).\n

\n\n### Execution\n

\nThe list of paragraphs below needs to be run sequentially.
\nStart at the top, and click the play button in each paragraph, waiting for completion.
\nEach paragraph is labeled and commented so you can tell what's happening. A paragraph will be marked
\nwith a FINISHED indicator next to the play button when it has run without error.

\nEnjoy!\n

","dateUpdated":"2018-04-24T18:23:55+0000","config":{"tableHide":false,"editorSetting":{"language":"markdown","editOnDblClick":true},"colWidth":12,"editorMode":"ace/mode/markdown","editorHide":true,"results":{},"enabled":true},"settings":{"params":{},"forms":{}},"results":{"code":"SUCCESS","msg":[{"type":"HTML","data":"

Welcome to the GeoWave KMeans GDELT Example (EMR Version).

\n
This is a live note - you can run the code yourself.
\n

Setup

\n

\nThe only prerequisite to running this example is increasing your shell interpreter's timeout.
\nGo to the Interpreter page, and scroll down to the 'sh' section. Click on the 'edit' button.

\nSet the 'shell.command.timeout.millisecs' entry to 600000 (10 minutes).\n

\n

Execution

\n

\nThe list of paragraphs below needs to be run sequentially.
\nStart at the top, and click the play button in each paragraph, waiting for completion.
\nEach paragraph is labeled and commented so you can tell what's happening. A paragraph will be marked
\nwith a FINISHED indicator next to the play button when it has run without error.

\nEnjoy!\n

\n"}]},"apps":[],"jobName":"paragraph_1524594235256_1133111646","id":"20170814-190601_1767735731","dateCreated":"2018-04-24T18:23:55+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"focus":true,"$$hashKey":"object:6431"},{"title":"Get the Data","text":"%sh\n# download the GDELT data\ncd /mnt/tmp\nwget s3.amazonaws.com/geowave/latest/scripts/emr/quickstart/geowave-env.sh\nsource geowave-env.sh\nmkdir gdelt\ncd gdelt\nwget http://data.gdeltproject.org/events/md5sums\nfor file in `cat md5sums | cut -d' ' -f3 | grep \"^${TIME_REGEX}\"` ; \\\ndo wget http://data.gdeltproject.org/events/$file ; done\nmd5sum -c md5sums 2>&1 | grep \"^${TIME_REGEX}\"","dateUpdated":"2018-04-24T18:23:55+0000","config":{"editorSetting":{"language":"sh","editOnDblClick":false},"colWidth":12,"editorMode":"ace/mode/sh","title":true,"results":{},"enabled":true},"settings":{"params":{"TIME_REGEX":""},"forms":{"TIME_REGEX":{"name":"TIME_REGEX","defaultValue":"","hidden":false,"$$hashKey":"object:6847"}}},"results":{"msg":[{"type":"TEXT","data":""},{"type":"TEXT","data":""}]},"apps":[],"jobName":"paragraph_1524594235259_1133496395","id":"20170913-084103_31433354","dateCreated":"2018-04-24T18:23:55+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:6432"},{"title":"Configure GeoWave Datastores and Ingest Data","text":"%sh\n\nsource /mnt/tmp/geowave-env.sh\n\n# configure geowave connection params for hbase stores \"gdelt\" and \"kmeans\"\ngeowave store add gdelt --gwNamespace geowave.gdelt -t hbase --zookeeper $HOSTNAME:2181\ngeowave store add kmeans --gwNamespace geowave.kmeans -t hbase --zookeeper $HOSTNAME:2181\n\n# configure a spatial index\ngeowave index add gdelt gdeltspatial -t spatial --partitionStrategy round_robin --numPartitions $NUM_PARTITIONS\n\n# run the ingest for a 10x10 deg bounding box over Europe\ngeowave ingest localtogw /mnt/tmp/gdelt gdelt gdeltspatial -f gdelt \\\n--gdelt.cql \"BBOX(geometry, 0, 50, 10, 60)\"","dateUpdated":"2018-04-24T18:23:55+0000","config":{"tableHide":false,"editorSetting":{"language":"sh","editOnDblClick":false},"colWidth":12,"editorMode":"ace/mode/sh","editorHide":false,"title":true,"results":{},"enabled":true},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1524594235260_1131572651","id":"20170809-181755_1512238840","dateCreated":"2018-04-24T18:23:55+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:6433"},{"title":"Configure GeoServer","text":"%sh\n# set up geoserver\ngeowave config geoserver \"$HOSTNAME:8000\"\n\n# add gdelt layer to geoserver\ngeowave gs layer add gdelt -id gdeltevent\n\n# enable subsampling on the gdelt layer\ncd /mnt/tmp\nwget s3.amazonaws.com/geowave/latest/scripts/emr/quickstart/SubsamplePoints.sld\ngeowave gs style add SubsamplePoints -sld /mnt/tmp/SubsamplePoints.sld\ngeowave gs style set gdeltevent --styleName SubsamplePoints\n","dateUpdated":"2018-04-24T18:23:55+0000","config":{"tableHide":false,"editorSetting":{"language":"sh","editOnDblClick":false},"colWidth":12,"editorMode":"ace/mode/sh","editorHide":false,"title":true,"results":{},"enabled":true},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1524594235260_1131572651","id":"20170913-084818_2077241202","dateCreated":"2018-04-24T18:23:55+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:6434"},{"text":"%spark\n//Load Java environment vars, and convert to Scala map\nimport scala.collection.JavaConversions._\nval jenvironmentVars = System.getenv()\n//Use environmentVars map to pull environment vars for use in spark\nval environmentVars = mapAsScalaMap(jenvironmentVars)\nfor ((k,v) <- environmentVars) println(s\"key: $k, value: $v\")\n\n//Bind the hostname to the angular frontend to be used in map creation script\nz.angularBind(\"hostname\", environmentVars.getOrElse(\"HOSTNAME\", \"localhost\"))","dateUpdated":"2018-04-24T18:23:55+0000","config":{"colWidth":12,"editorMode":"ace/mode/scala","results":{},"enabled":true,"editorSetting":{"language":"scala","editOnDblClick":false}},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1524594235260_1131572651","id":"20171127-213250_865940522","dateCreated":"2018-04-24T18:23:55+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:6435"},{"text":"%spark\n//Import classes from spark\nimport org.apache.spark.api.java.JavaSparkContext\n//DataFrame = type alias Dataset\nimport org.apache.spark.sql.DataFrame\nimport spark.implicits._\n\n//Import classes from geowave\nimport org.locationtech.geowave.datastore.hbase.cli.config.HBaseRequiredOptions\nimport org.locationtech.geowave.datastore.accumulo.cli.config.AccumuloRequiredOptions\nimport org.locationtech.geowave.analytic.spark.RDDOptions\nimport org.locationtech.geowave.analytic.spark.GeoWaveRDDLoader\nimport org.locationtech.geowave.analytic.spark.kmeans.KMeansRunner\nimport org.locationtech.geowave.core.store.query.QueryOptions\nimport org.locationtech.geowave.analytic.spark.GeoWaveRDD\nimport org.locationtech.geowave.analytic.spark.sparksql.SimpleFeatureDataFrame\nimport org.locationtech.geowave.core.index.ByteArrayId","dateUpdated":"2018-04-24T18:24:33+0000","config":{"colWidth":12,"editorMode":"ace/mode/scala","results":{},"enabled":true,"editorSetting":{"language":"scala","editOnDblClick":false}},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1524594235260_1131572651","id":"20171127-213312_624447354","dateCreated":"2018-04-24T18:23:55+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:6436"},{"text":"%spark\n//Grab hostname from environment vars\nval hostname = environmentVars.getOrElse(\"HOSTNAME\", \"invalid-host\")\nprintln(s\"hostname= $hostname\")\n\n//Setup datastores\nval input_store = new HBaseRequiredOptions()\ninput_store.setZookeeper(hostname + \":2181\")\ninput_store.setGeowaveNamespace(\"geowave.gdelt\")\n\nval output_store = new HBaseRequiredOptions()\noutput_store.setZookeeper(hostname + \":2181\")\noutput_store.setGeowaveNamespace(\"geowave.kmeans\")\n\n//Create instances of store plugin options, and KMeansRunner\nval input_store_plugin = input_store.createPluginOptions()\nval output_store_plugin = output_store.createPluginOptions()\nval jsc = JavaSparkContext.fromSparkContext(sc)\nval kmeans_runner = new KMeansRunner()","dateUpdated":"2018-04-24T18:23:55+0000","config":{"colWidth":12,"editorMode":"ace/mode/scala","results":{},"enabled":true,"editorSetting":{"language":"scala","editOnDblClick":false}},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1524594235261_1131187902","id":"20171127-213341_1095676113","dateCreated":"2018-04-24T18:23:55+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:6437"},{"text":"%sh\n#clear old potential runs\ngeowave store clear kmeans\n\n# configure a spatial index\ngeowave index add kmeans gdeltspatial -t spatial --partitionStrategy round_robin --numPartitions $NUM_PARTITIONS\n","dateUpdated":"2018-04-24T18:23:55+0000","config":{"colWidth":12,"editorMode":"ace/mode/sh","results":{},"enabled":true,"editorSetting":{"language":"sh","editOnDblClick":false}},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1524594235261_1131187902","id":"20171127-213427_1902869877","dateCreated":"2018-04-24T18:23:55+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:6438"},{"title":"Run KMeans on GDELT Subset","text":"\n//set the appropriate properties of the runner\nkmeans_runner.setJavaSparkContext(JavaSparkContext.fromSparkContext(sc))\nkmeans_runner.setAdapterId(\"gdeltevent\")\nkmeans_runner.setInputDataStore(input_store_plugin)\nkmeans_runner.setOutputDataStore(output_store_plugin)\nkmeans_runner.setCqlFilter(\"BBOX(geometry, 0, 50, 10, 60)\")\nkmeans_runner.setCentroidTypeName(\"mycentroids\")\nkmeans_runner.setHullTypeName(\"myhulls\")\nkmeans_runner.setGenerateHulls(true)\nkmeans_runner.setComputeHullData(true)\n\n//execute the kmeans runner\nkmeans_runner.run()\n","dateUpdated":"2018-04-24T18:23:55+0000","config":{"tableHide":false,"editorSetting":{"language":"scala","editOnDblClick":false},"colWidth":12,"editorMode":"ace/mode/scala","editorHide":false,"title":true,"results":{},"enabled":true},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1524594235261_1131187902","id":"20170809-194032_1817638679","dateCreated":"2018-04-24T18:23:55+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:6439"},{"title":"Add KMeans Results to GeoServer","text":"%sh\n\n# add the centroids layer\ngeowave gs layer add kmeans -id mycentroids\ngeowave gs style set mycentroids --styleName point\n\n# add the hulls layer\ngeowave gs layer add kmeans -id myhulls\ngeowave gs style set myhulls --styleName line","dateUpdated":"2018-04-24T18:23:55+0000","config":{"tableHide":false,"editorSetting":{"language":"sh","editOnDblClick":false},"colWidth":12,"editorMode":"ace/mode/sh","title":true,"results":{},"enabled":true},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1524594235261_1131187902","id":"20170817-030121_1271873891","dateCreated":"2018-04-24T18:23:55+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:6440"},{"text":"%angular\n\n

GeoWave Leaflet Map

\n
\n
\n\n","dateUpdated":"2018-04-24T18:23:55+0000","config":{"tableHide":false,"editorSetting":{"language":"scala","editOnDblClick":true},"colWidth":12,"editorMode":"ace/mode/undefined","editorHide":true,"results":{},"enabled":true},"settings":{"params":{},"forms":{}},"results":{"code":"SUCCESS","msg":[{"type":"ANGULAR","data":"\n

GeoWave Leaflet Map

\n
\n\n"}]},"apps":[],"jobName":"paragraph_1524594235261_1131187902","id":"20170817-030613_874309201","dateCreated":"2018-04-24T18:23:55+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:6441"},{"title":"Load KMeans Centroid CSV into DataFrame","text":"%spark\n\ndef create_dataframe(adapter_name : String) : DataFrame = {\n //Create the dataframe and get a rdd for the output of kmeans\n var sf_df = new SimpleFeatureDataFrame(spark)\n val adapter_id = new ByteArrayId(adapter_name)\n \n var queryOptions = null : Option[QueryOptions]\n val adapterIt = output_store_plugin.createAdapterStore().getAdapters()\n while (adapterIt.hasNext()) {\n val adapter = adapterIt.next()\n if (adapter.getAdapterId().equals(adapter_id)) {\n val adapterForQuery = adapter\n queryOptions = Some(new QueryOptions(adapterForQuery))\n }\n }\n val loadOpts = new RDDOptions()\n loadOpts.setQueryOptions(queryOptions.getOrElse(null))\n val output_rdd = GeoWaveRDDLoader.loadRDD(sc, output_store_plugin, loadOpts))\n sf_df.init(output_store_plugin, adapter_id)\n \n return sf_df.getDataFrame(output_rdd)\n}\n\nvar df = create_dataframe(\"mycentroids\")\ndf.show()\n\n// Convert geom string to lat/long\ncase class KMeansRow(lat: Double, lon: Double, ClusterIndex : Int)\nval kmeansData = df.map(row => {\n val geom_index = row.fieldIndex(\"geom\")\n val geom = row.getString(geom_index)\n val cluster_index = row.getInt(row.fieldIndex(\"ClusterIndex\"))\n val lat_start = geom.lastIndexOf(\" \") + 1\n val lat_end = geom.lastIndexOf(\")\")\n val lat = geom.substring(lat_start, lat_end)\n val lonStart = geom.indexOf(\"(\") + 1\n val lonStop = geom.indexOf(\" \", lonStart)\n val lon = geom.substring(lonStart, lonStop)\n KMeansRow(lat=lat.toDouble, lon=lon.toDouble, ClusterIndex=cluster_index)\n })\n// send the results to the front end (Leaflet map)\nz.angularBind(\"pins\", kmeansData.collect())\n// register a view for SQL queries\nkmeansData.createOrReplaceTempView(\"kmeans\")","dateUpdated":"2018-04-24T18:25:14+0000","config":{"tableHide":false,"editorSetting":{"language":"scala","editOnDblClick":false},"colWidth":12,"editorMode":"ace/mode/scala","editorHide":false,"title":true,"results":{},"enabled":true},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1524594235262_1132342148","id":"20170809-201803_119430460","dateCreated":"2018-04-24T18:23:55+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:6442"},{"text":"%sql\nselect lat as Latitude, lon as Longitude from kmeans","dateUpdated":"2018-04-24T18:23:55+0000","config":{"editorSetting":{"language":"sql","editOnDblClick":false},"colWidth":12,"editorMode":"ace/mode/sql","editorHide":false,"results":{},"enabled":true},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1524594235262_1132342148","id":"20171127-213757_297409837","dateCreated":"2018-04-24T18:23:55+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:6443"},{"text":"%angular\r\n\r\n\r\n

Client-side Integration

\r\n
\r\n\r\n","dateUpdated":"2018-04-24T18:23:55+0000","config":{"tableHide":false,"editorSetting":{"language":"text","editOnDblClick":true},"colWidth":8,"editorMode":"ace/mode/undefined","editorHide":true,"results":{},"enabled":true},"settings":{"params":{},"forms":{}},"results":{"code":"SUCCESS","msg":[{"type":"ANGULAR","data":"\r\n

Client-side Integration

\r\n
\r\n\r\n"}]},"apps":[],"jobName":"paragraph_1524594235262_1132342148","id":"20170809-021534_2122057818","dateCreated":"2018-04-24T18:23:55+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:6444"}],"name":"GDELT-Quick-Start","id":"2DCXNJ4J6","angularObjects":{"2BRWU4WXC:shared_process":[],"2AM1YV5CU:shared_process":[],"2AJXGMUUJ:shared_process":[],"2ANGGHHMQ:shared_process":[],"2AKK3QQXU:shared_process":[]},"config":{"looknfeel":"default","personalizedMode":"false"},"info":{}} ================================================ FILE: examples/data/notebooks/zeppelin/GeoWave-GPX-Demo.json ================================================ {"paragraphs":[{"text":"%md\n## Welcome to the GeoWave GPX KMeans Example (EMR Version).\n##### This is a live note - you can run the code yourself.\n\n### Setup\n

\nThe only prerequisite to running this example is increasing your shell interpreter's timeout.
\nGo to the Interpreter page, and scroll down to the 'sh' section. Click on the 'edit' button.

\nSet the 'shell.command.timeout.millisecs' entry to 600000 (10 minutes).\n

\n\n### Execution\n

\nThe list of paragraphs below needs to be run sequentially.
\nStart at the top, and click the play button in each paragraph, waiting for completion.
\nEach paragraph is labeled and commented so you can tell what's happening. A paragraph will be marked
\nwith a FINISHED indicator next to the play button when it has run without error.

\nEnjoy!\n

","dateUpdated":"2018-04-24T18:26:03+0000","config":{"tableHide":false,"editorSetting":{"language":"markdown","editOnDblClick":true},"colWidth":12,"editorMode":"ace/mode/markdown","editorHide":true,"results":{},"enabled":true},"settings":{"params":{},"forms":{}},"results":{"code":"SUCCESS","msg":[{"type":"HTML","data":"
\n

Welcome to the GeoWave GPX KMeans Example (EMR Version).

\n
This is a live note - you can run the code yourself.
\n

Setup

\n

\nThe only prerequisite to running this example is increasing your shell interpreter's timeout.
\nGo to the Interpreter page, and scroll down to the 'sh' section. Click on the 'edit' button.

\nSet the 'shell.command.timeout.millisecs' entry to 600000 (10 minutes).\n

\n

Execution

\n

\nThe list of paragraphs below needs to be run sequentially.
\nStart at the top, and click the play button in each paragraph, waiting for completion.
\nEach paragraph is labeled and commented so you can tell what's happening. A paragraph will be marked
\nwith a FINISHED indicator next to the play button when it has run without error.

\nEnjoy!\n

\n
"}]},"apps":[],"jobName":"paragraph_1524594363559_-596518550","id":"20170814-190601_1767735731","dateCreated":"2018-04-24T18:26:03+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"focus":true,"$$hashKey":"object:3508"},{"title":"Import GPX Data","text":"%sh\ns3-dist-cp --src=s3://geowave-gpx-data/gpx --dest=hdfs://$HOSTNAME:8020/tmp/\n\n/opt/accumulo/bin/accumulo shell -u root -p secret -e \"importtable geowave.germany_gpx_SPATIAL_IDX /tmp/spatial\"\n/opt/accumulo/bin/accumulo shell -u root -p secret -e \"importtable geowave.germany_gpx_GEOWAVE_METADATA /tmp/metadata\"","dateUpdated":"2018-04-24T18:26:03+0000","config":{"tableHide":false,"editorSetting":{"language":"sh","editOnDblClick":false},"colWidth":12,"editorMode":"ace/mode/sh","editorHide":false,"title":true,"results":{},"enabled":true},"settings":{"params":{"SOUTH":"","EAST":"","NORTH":"","WEST":""},"forms":{}},"results":{"code":"SUCCESS","msg":[{"type":"TEXT","data":"17/11/27 19:39:37 INFO s3distcp.S3DistCp: Running with args: -libjars /usr/share/aws/emr/s3-dist-cp/lib/guava-15.0.jar,/usr/share/aws/emr/s3-dist-cp/lib/s3-dist-cp-2.7.0.jar,/usr/share/aws/emr/s3-dist-cp/lib/s3-dist-cp.jar --src=s3://geowave-gpx-data/gpx --dest=hdfs://ip-10-0-0-36:8020/tmp/ \n17/11/27 19:39:37 INFO s3distcp.S3DistCp: S3DistCp args: --src=s3://geowave-gpx-data/gpx --dest=hdfs://ip-10-0-0-36:8020/tmp/ \n17/11/27 19:39:37 INFO s3distcp.S3DistCp: Using output path 'hdfs:/tmp/eb857b4f-b23c-4303-8ec4-13ad7f90b49c/output'\n17/11/27 19:39:37 INFO s3distcp.S3DistCp: GET http://169.254.169.254/latest/meta-data/placement/availability-zone result: us-east-1f\n17/11/27 19:39:40 INFO s3distcp.S3DistCp: DefaultAWSCredentialsProviderChain is used to create AmazonS3Client. KeyId: ASIAJA5NHL2X27HCG4FA\n17/11/27 19:39:41 INFO s3distcp.S3DistCp: Skipping key 'gpx/' because it ends with '/'\n17/11/27 19:39:41 INFO s3distcp.S3DistCp: Skipping key 'gpx/metadata/' because it ends with '/'\n17/11/27 19:39:41 INFO s3distcp.S3DistCp: Skipping key 'gpx/spatial/' because it ends with '/'\n17/11/27 19:39:41 INFO s3distcp.FileInfoListing: Opening new file: hdfs:/tmp/eb857b4f-b23c-4303-8ec4-13ad7f90b49c/files/1\n17/11/27 19:39:41 INFO s3distcp.S3DistCp: Created 1 files to copy 64 files \n17/11/27 19:39:41 INFO s3distcp.S3DistCp: Reducer number: 63\n17/11/27 19:39:41 INFO impl.TimelineClientImpl: Timeline service address: http://ip-10-0-0-36.ec2.internal:8188/ws/v1/timeline/\n17/11/27 19:39:41 INFO client.RMProxy: Connecting to ResourceManager at ip-10-0-0-36.ec2.internal/10.0.0.36:8032\n17/11/27 19:39:41 INFO input.FileInputFormat: Total input paths to process : 1\n17/11/27 19:39:41 INFO mapreduce.JobSubmitter: number of splits:1\n17/11/27 19:39:41 INFO mapreduce.JobSubmitter: Submitting tokens for job: job_1511810909522_0002\n17/11/27 19:39:42 INFO impl.YarnClientImpl: Submitted application application_1511810909522_0002\n17/11/27 19:39:42 INFO mapreduce.Job: The url to track the job: http://ip-10-0-0-36.ec2.internal:20888/proxy/application_1511810909522_0002/\n17/11/27 19:39:42 INFO mapreduce.Job: Running job: job_1511810909522_0002\n17/11/27 19:39:48 INFO mapreduce.Job: Job job_1511810909522_0002 running in uber mode : false\n17/11/27 19:39:48 INFO mapreduce.Job: map 0% reduce 0%\n17/11/27 19:39:52 INFO mapreduce.Job: map 100% reduce 0%\n17/11/27 19:39:57 INFO mapreduce.Job: map 100% reduce 6%\n17/11/27 19:39:58 INFO mapreduce.Job: map 100% reduce 10%\n17/11/27 19:39:59 INFO mapreduce.Job: map 100% reduce 13%\n17/11/27 19:40:00 INFO mapreduce.Job: map 100% reduce 16%\n17/11/27 19:40:01 INFO mapreduce.Job: map 100% reduce 22%\n17/11/27 19:40:02 INFO mapreduce.Job: map 100% reduce 25%\n17/11/27 19:40:03 INFO mapreduce.Job: map 100% reduce 30%\n17/11/27 19:40:04 INFO mapreduce.Job: map 100% reduce 37%\n17/11/27 19:40:05 INFO mapreduce.Job: map 100% reduce 51%\n17/11/27 19:40:06 INFO mapreduce.Job: map 100% reduce 57%\n17/11/27 19:40:08 INFO mapreduce.Job: map 100% reduce 68%\n17/11/27 19:40:09 INFO mapreduce.Job: map 100% reduce 71%\n17/11/27 19:40:10 INFO mapreduce.Job: map 100% reduce 79%\n17/11/27 19:40:11 INFO mapreduce.Job: map 100% reduce 81%\n17/11/27 19:40:12 INFO mapreduce.Job: map 100% reduce 83%\n17/11/27 19:40:13 INFO mapreduce.Job: map 100% reduce 86%\n17/11/27 19:40:14 INFO mapreduce.Job: map 100% reduce 87%\n17/11/27 19:40:15 INFO mapreduce.Job: map 100% reduce 95%\n17/11/27 19:40:16 INFO mapreduce.Job: map 100% reduce 97%\n17/11/27 19:40:17 INFO mapreduce.Job: map 100% reduce 98%\n17/11/27 19:40:18 INFO mapreduce.Job: map 100% reduce 100%\n17/11/27 19:40:40 INFO mapreduce.Job: Job job_1511810909522_0002 completed successfully\n17/11/27 19:40:40 INFO mapreduce.Job: Counters: 54\n\tFile System Counters\n\t\tFILE: Number of bytes read=5864\n\t\tFILE: Number of bytes written=8397776\n\t\tFILE: Number of read operations=0\n\t\tFILE: Number of large read operations=0\n\t\tFILE: Number of write operations=0\n\t\tHDFS: Number of bytes read=8501\n\t\tHDFS: Number of bytes written=11363269772\n\t\tHDFS: Number of read operations=321\n\t\tHDFS: Number of large read operations=0\n\t\tHDFS: Number of write operations=190\n\t\tS3: Number of bytes read=11363269772\n\t\tS3: Number of bytes written=0\n\t\tS3: Number of read operations=0\n\t\tS3: Number of large read operations=0\n\t\tS3: Number of write operations=0\n\tJob Counters \n\t\tLaunched map tasks=1\n\t\tLaunched reduce tasks=63\n\t\tRack-local map tasks=1\n\t\tTotal time spent by all maps in occupied slots (ms)=107136\n\t\tTotal time spent by all reduces in occupied slots (ms)=66421344\n\t\tTotal time spent by all map tasks (ms)=2232\n\t\tTotal time spent by all reduce tasks (ms)=691889\n\t\tTotal vcore-milliseconds taken by all map tasks=2232\n\t\tTotal vcore-milliseconds taken by all reduce tasks=691889\n\t\tTotal megabyte-milliseconds taken by all map tasks=3428352\n\t\tTotal megabyte-milliseconds taken by all reduce tasks=2125483008\n\tMap-Reduce Framework\n\t\tMap input records=64\n\t\tMap output records=64\n\t\tMap output bytes=8758\n\t\tMap output materialized bytes=5612\n\t\tInput split bytes=151\n\t\tCombine input records=0\n\t\tCombine output records=0\n\t\tReduce input groups=64\n\t\tReduce shuffle bytes=5612\n\t\tReduce input records=64\n\t\tReduce output records=0\n\t\tSpilled Records=128\n\t\tShuffled Maps =63\n\t\tFailed Shuffles=0\n\t\tMerged Map outputs=63\n\t\tGC time elapsed (ms)=14524\n\t\tCPU time spent (ms)=630930\n\t\tPhysical memory (bytes) snapshot=30090629120\n\t\tVirtual memory (bytes) snapshot=297350909952\n\t\tTotal committed heap usage (bytes)=40637038592\n\tShuffle Errors\n\t\tBAD_ID=0\n\t\tCONNECTION=0\n\t\tIO_ERROR=0\n\t\tWRONG_LENGTH=0\n\t\tWRONG_MAP=0\n\t\tWRONG_REDUCE=0\n\tFile Input Format Counters \n\t\tBytes Read=8350\n\tFile Output Format Counters \n\t\tBytes Written=0\n17/11/27 19:40:40 INFO s3distcp.S3DistCp: Try to recursively delete hdfs:/tmp/eb857b4f-b23c-4303-8ec4-13ad7f90b49c/tempspace\nSLF4J: Class path contains multiple SLF4J bindings.\nSLF4J: Found binding in [jar:file:/opt/accumulo-1.8.1/lib/slf4j-log4j12.jar!/org/slf4j/impl/StaticLoggerBinder.class]\nSLF4J: Found binding in [jar:file:/usr/lib/hadoop/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]\nSLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.\nSLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]\n2017-11-27 19:40:41,726 [conf.ConfigSanityCheck] WARN : Use of instance.dfs.uri and instance.dfs.dir are deprecated. Consider using instance.volumes instead.\n2017-11-27 19:40:42,468 [htrace.SpanReceiverBuilder] ERROR: SpanReceiverBuilder cannot find SpanReceiver class org.apache.accumulo.tracer.ZooTraceClient: disabling span receiver.\n2017-11-27 19:40:42,468 [trace.DistributedTrace] WARN : Failed to load SpanReceiver org.apache.accumulo.tracer.ZooTraceClient\nSLF4J: Class path contains multiple SLF4J bindings.\nSLF4J: Found binding in [jar:file:/opt/accumulo-1.8.1/lib/slf4j-log4j12.jar!/org/slf4j/impl/StaticLoggerBinder.class]\nSLF4J: Found binding in [jar:file:/usr/lib/hadoop/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]\nSLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.\nSLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]\n2017-11-27 19:40:46,302 [conf.ConfigSanityCheck] WARN : Use of instance.dfs.uri and instance.dfs.dir are deprecated. Consider using instance.volumes instead.\n2017-11-27 19:40:47,035 [htrace.SpanReceiverBuilder] ERROR: SpanReceiverBuilder cannot find SpanReceiver class org.apache.accumulo.tracer.ZooTraceClient: disabling span receiver.\n2017-11-27 19:40:47,036 [trace.DistributedTrace] WARN : Failed to load SpanReceiver org.apache.accumulo.tracer.ZooTraceClient\n2017-11-27 19:40:47,467 [impl.TableOperationsImpl] INFO : Imported table sets 'table.iterator.minc.STATS_COMBINER' to '10,org.locationtech.geowave.datastore.accumulo.MergingCombiner'. Ensure this class is on Accumulo classpath.\n2017-11-27 19:40:47,467 [impl.TableOperationsImpl] INFO : Imported table sets 'table.iterator.majc.STATS_COMBINER' to '10,org.locationtech.geowave.datastore.accumulo.MergingCombiner'. Ensure this class is on Accumulo classpath.\n2017-11-27 19:40:47,467 [impl.TableOperationsImpl] INFO : Imported table sets 'table.iterator.scan.STATS_COMBINER' to '10,org.locationtech.geowave.datastore.accumulo.MergingCombiner'. Ensure this class is on Accumulo classpath.\n"}]},"apps":[],"jobName":"paragraph_1524594363559_-596518550","id":"20170815-204020_1185378225","dateCreated":"2018-04-24T18:26:03+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:3509"},{"text":"%spark\n\n//Load Java environment vars, and convert to Scala map\nimport scala.collection.JavaConversions._\nval jenvironmentVars = System.getenv()\n//Use environmentVars map to pull environment vars for use in spark\nval environmentVars = mapAsScalaMap(jenvironmentVars)\nfor ((k,v) <- environmentVars) println(s\"key: $k, value: $v\")\n\n//Bind the hostname to the angular frontend to be used in map creation script\nz.angularBind(\"hostname\", environmentVars.getOrElse(\"HOSTNAME\", \"localhost\"))\n","dateUpdated":"2018-04-24T18:26:03+0000","config":{"colWidth":12,"editorMode":"ace/mode/scala","results":{},"enabled":true,"editorSetting":{"language":"scala","editOnDblClick":false}},"settings":{"params":{},"forms":{}},"results":{"code":"SUCCESS","msg":[{"type":"TEXT","data":"import scala.collection.JavaConversions._\njenvironmentVars: java.util.Map[String,String] =\n{PATH=/usr/local/sbin:/usr/local/bin:/usr/bin:/usr/sbin:/sbin:/bin, ZEPPELIN_PORT=8890, BASH_FUNC_run_prestart()=() { su -s /bin/bash $SVC_USER -c \"cd $WORKING_DIR && $EXEC_PATH --config '$CONF_DIR' start > /dev/null 2>&1\"\n}, ZEPPELIN_LOG_DIR=/var/log/zeppelin, HADOOP_CONF_DIR=/etc/hadoop/conf, SPARK_MASTER_WEBUI_PORT=8080, ZEPPELIN_WAR=/usr/lib/zeppelin/zeppelin-web-0.7.3.war, ZEPPELIN_ENCODING=UTF-8, SPARK_SUBMIT_OPTIONS=--driver-memory 11059M --executor-memory 9830M --jars /usr/local/geowave/tools/geowave-tools-0.9.6-apache.jar --conf 'spark.executorEnv.PYTHONPATH=/usr/lib/spark/python/lib/py4j-src.zip:/usr/lib/spark/python/:{{PWD}}/pyspark.zip{{PWD}}/py4j-src.zip' --conf spark.yarn.isPython=true, PIDFILE=/var/run/zeppelin/z...environmentVars: scala.collection.mutable.Map[String,String] =\nMap(PATH -> /usr/local/sbin:/usr/local/bin:/usr/bin:/usr/sbin:/sbin:/bin, ZEPPELIN_PORT -> 8890, BASH_FUNC_run_prestart() -> () { su -s /bin/bash $SVC_USER -c \"cd $WORKING_DIR && $EXEC_PATH --config '$CONF_DIR' start > /dev/null 2>&1\"\n}, ZEPPELIN_LOG_DIR -> /var/log/zeppelin, HADOOP_CONF_DIR -> /etc/hadoop/conf, SPARK_MASTER_WEBUI_PORT -> 8080, ZEPPELIN_WAR -> /usr/lib/zeppelin/zeppelin-web-0.7.3.war, ZEPPELIN_ENCODING -> UTF-8, SPARK_SUBMIT_OPTIONS -> --driver-memory 11059M --executor-memory 9830M --jars /usr/local/geowave/tools/geowave-tools-0.9.6-apache.jar --conf 'spark.executorEnv.PYTHONPATH=/usr/lib/spark/python/lib/py4j-src.zip:/usr/lib/spark/python/:{{PWD}}/pyspark.zip{{PWD}}/py4j-src.zip' --conf spark.yar...key: PATH, value: /usr/local/sbin:/usr/local/bin:/usr/bin:/usr/sbin:/sbin:/bin\nkey: ZEPPELIN_PORT, value: 8890\nkey: BASH_FUNC_run_prestart(), value: () { su -s /bin/bash $SVC_USER -c \"cd $WORKING_DIR && $EXEC_PATH --config '$CONF_DIR' start > /dev/null 2>&1\"\n}\nkey: ZEPPELIN_LOG_DIR, value: /var/log/zeppelin\nkey: HADOOP_CONF_DIR, value: /etc/hadoop/conf\nkey: SPARK_MASTER_WEBUI_PORT, value: 8080\nkey: ZEPPELIN_WAR, value: /usr/lib/zeppelin/zeppelin-web-0.7.3.war\nkey: ZEPPELIN_ENCODING, value: UTF-8\nkey: SPARK_SUBMIT_OPTIONS, value: --driver-memory 11059M --executor-memory 9830M --jars /usr/local/geowave/tools/geowave-tools-0.9.6-apache.jar --conf 'spark.executorEnv.PYTHONPATH=/usr/lib/spark/python/lib/py4j-src.zip:/usr/lib/spark/python/:{{PWD}}/pyspark.zip{{PWD}}/py4j-src.zip' --conf spark.yarn.isPython=true\nkey: PIDFILE, value: /var/run/zeppelin/zeppelin.pid\nkey: ZEPPELIN_NICENESS, value: 0\nkey: SPARK_ENV_LOADED, value: 1\nkey: JAVA_OPTS, value: -Dfile.encoding=UTF-8 -Xms1024m -Xmx1024m -XX:MaxPermSize=512m -Dlog4j.configuration=file:///etc/zeppelin/conf/log4j.properties -Dzeppelin.log.file=/var/log/zeppelin/zeppelin-zeppelin-ip-10-0-0-36.log -Dfile.encoding=UTF-8 -Xms1024m -Xmx1024m -XX:MaxPermSize=512m -Dlog4j.configuration=file:///etc/zeppelin/conf/log4j.properties\nkey: DESC, value: Zeppelin\nkey: JAVA_INTP_OPTS, value: -Dfile.encoding=UTF-8 -Dlog4j.configuration=file:///etc/zeppelin/conf/log4j.properties -Dzeppelin.log.file=/var/log/zeppelin/zeppelin-interpreter-spark-zeppelin-ip-10-0-0-36.log\nkey: EXEC_PATH, value: /usr/lib/zeppelin/bin/zeppelin-daemon.sh\nkey: SLEEP_TIME, value: 10\nkey: ZEPPELIN_CONF_DIR, value: /etc/zeppelin/conf\nkey: LD_LIBRARY_PATH, value: /usr/lib/hadoop/lib/native:/usr/lib/hadoop-lzo/lib/native\nkey: HADOOP_HOME_WARN_SUPPRESS, value: true\nkey: LOGNAME, value: zeppelin\nkey: JSVC_HOME, value: /usr/lib/bigtop-utils\nkey: PWD, value: /var/lib/zeppelin\nkey: HADOOP_PREFIX, value: /usr/lib/hadoop\nkey: ZEPPELIN_PID, value: /var/run/zeppelin/zeppelin-interpreter-spark-zeppelin-ip-10-0-0-36.pid\nkey: PYTHONPATH, value: /usr/lib/spark/python/lib/py4j-0.10.4-src.zip:/usr/lib/spark/python/:\nkey: HIVE_SERVER2_THRIFT_BIND_HOST, value: 0.0.0.0\nkey: SPARK_SUBMIT, value: /usr/lib/spark/bin/spark-submit\nkey: SHELL, value: /bin/bash\nkey: WORKING_DIR, value: /var/lib/zeppelin\nkey: ZEPPELIN_INTP_MEM, value: -Xms1024m -Xmx1024m -XX:MaxPermSize=512m\nkey: SPARK_MASTER_PORT, value: 7077\nkey: HADOOP_YARN_HOME, value: /usr/lib/hadoop-yarn\nkey: UPSTART_INSTANCE, value: \nkey: SPARK_MASTER_IP, value: ip-10-0-0-36.ec2.internal\nkey: DAEMON_FLAGS, value: \nkey: HADOOP_HOME, value: /usr/lib/hadoop\nkey: DAEMON, value: zeppelin\nkey: SHLVL, value: 4\nkey: SPARK_LOG_DIR, value: /var/log/spark\nkey: MASTER, value: yarn-client\nkey: UPSTART_JOB, value: zeppelin\nkey: JAVA_HOME, value: /usr/lib/jvm/java-openjdk\nkey: CONF_DIR, value: /etc/zeppelin/conf\nkey: TERM, value: linux\nkey: XFILESEARCHPATH, value: /usr/dt/app-defaults/%L/Dt\nkey: SPARK_WORKER_DIR, value: /var/run/spark/work\nkey: LANG, value: en_US.UTF-8\nkey: SPARK_SCALA_VERSION, value: 2.10\nkey: HADOOP_LIBEXEC_DIR, value: /usr/lib/hadoop/libexec\nkey: ZEPPELIN_WAR_TEMPDIR, value: /var/run/zeppelin/webapps\nkey: SPARK_HOME, value: /usr/lib/spark\nkey: ZEPPELIN_NOTEBOOK_DIR, value: /var/lib/zeppelin/notebook\nkey: HADOOP_HDFS_HOME, value: /usr/lib/hadoop-hdfs\nkey: ZEPPELIN_RUNNER, value: /usr/lib/jvm/java-openjdk/bin/java\nkey: HADOOP_MAPRED_HOME, value: /usr/lib/hadoop-mapreduce\nkey: HADOOP_COMMON_HOME, value: /usr/lib/hadoop\nkey: PYTHONHASHSEED, value: 0\nkey: ZEPPELIN_HOME, value: /usr/lib/zeppelin\nkey: HIVE_CONF_DIR, value: /etc/hive/conf\nkey: USER, value: zeppelin\nkey: CLASSPATH, value: :/usr/lib/hadoop-lzo/lib/*:/usr/lib/hadoop/hadoop-aws.jar:/usr/share/aws/aws-java-sdk/*:/usr/share/aws/emr/emrfs/conf:/usr/share/aws/emr/emrfs/lib/*:/usr/share/aws/emr/emrfs/auxlib/*:/usr/share/aws/hmclient/lib/aws-glue-datacatalog-spark-client.jar\nkey: ZEPPELIN_PID_DIR, value: /var/run/zeppelin\nkey: ZEPPELIN_MEM, value: -Xms1024m -Xmx1024m -XX:MaxPermSize=512m\nkey: SPARK_DAEMON_JAVA_OPTS, value: -XX:OnOutOfMemoryError='kill -9 %p'\nkey: HOSTNAME, value: ip-10-0-0-36\nkey: ZEPPELIN_IDENT_STRING, value: zeppelin\nkey: NLSPATH, value: /usr/dt/lib/nls/msg/%L/%N.cat\nkey: STANDALONE_SPARK_MASTER_HOST, value: ip-10-0-0-36.ec2.internal\nkey: SPARK_PUBLIC_DNS, value: ip-10-0-0-36.ec2.internal\nkey: SVC_USER, value: zeppelin\nkey: SPARK_WORKER_PORT, value: 7078\nkey: ZEPPELIN_INTERPRETER_REMOTE_RUNNER, value: bin/interpreter.sh\nkey: HIVE_SERVER2_THRIFT_PORT, value: 10001\nkey: HOME, value: /var/lib/zeppelin\nkey: SPARK_WORKER_WEBUI_PORT, value: 8081\n"}]},"apps":[],"jobName":"paragraph_1524594363559_-596518550","id":"20171117-145757_486146312","dateCreated":"2018-04-24T18:26:03+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:3510"},{"title":"Configure GeoWave","text":"%sh\n# clear out potential old runs\ngeowave store clear kmeans_hbase\ngeowave store rm kmeans_hbase\ngeowave store clear germany_gpx_accumulo\ngeowave store rm germany_gpx_accumulo\n\n# configure geowave connection params for name stores \"germany_gpx_accumulo\" and \"kmeans_hbase\"\ngeowave store add germany_gpx_accumulo --gwNamespace geowave.germany_gpx -t accumulo --zookeeper $HOSTNAME:2181 --instance accumulo --user root --password secret\ngeowave store add kmeans_hbase --gwNamespace geowave.kmeans -t hbase --zookeeper $HOSTNAME:2181\n\n# set up geoserver\ngeowave config geoserver \"$HOSTNAME:8000\"\n\n# add gpx layer\ngeowave gs layer add germany_gpx_accumulo -id gpxpoint\nwget s3.amazonaws.com/geowave/latest/scripts/emr/quickstart/SubsamplePoints.sld\ngeowave gs style add SubsamplePoints -sld SubsamplePoints.sld\ngeowave gs style set gpxpoint --styleName SubsamplePoints\n","dateUpdated":"2018-04-24T18:26:03+0000","config":{"tableHide":false,"editorSetting":{"language":"sh","editOnDblClick":false},"colWidth":12,"editorMode":"ace/mode/sh","editorHide":false,"title":true,"results":{},"enabled":true},"settings":{"params":{},"forms":{}},"results":{"code":"SUCCESS","msg":[{"type":"TEXT","data":"geoserver.url=ip-10-0-0-36:8000\n\n27 Nov 19:41:55 WARN [client.ClientConfiguration] - Found no client.conf in default paths. Using default client configuration values.\nNov 27, 2017 7:41:56 PM org.geoserver.platform.GeoServerExtensions checkContext\nWARNING: Extension lookup 'GeoServerResourceLoader', but ApplicationContext is unset.\nNov 27, 2017 7:41:56 PM org.geoserver.platform.GeoServerExtensions checkContext\nWARNING: Extension lookup 'GeoServerResourceLoader', but ApplicationContext is unset.\nNov 27, 2017 7:41:56 PM org.geoserver.platform.GeoServerExtensions checkContext\nWARNING: Extension lookup 'ExtensionFilter', but ApplicationContext is unset.\nNov 27, 2017 7:41:56 PM org.geoserver.platform.GeoServerExtensions checkContext\nWARNING: Extension lookup 'ExtensionProvider', but ApplicationContext is unset.\nNov 27, 2017 7:41:56 PM org.geoserver.platform.GeoServerExtensions checkContext\nWARNING: Extension lookup 'ExtensionFilter', but ApplicationContext is unset.\nNov 27, 2017 7:41:56 PM org.geoserver.platform.GeoServerExtensions checkContext\nWARNING: Extension lookup 'GeoServerResourceLoader', but ApplicationContext is unset.\nNov 27, 2017 7:41:56 PM org.geoserver.platform.GeoServerExtensions checkContext\nWARNING: Extension lookup 'GeoServerResourceLoader', but ApplicationContext is unset.\nNov 27, 2017 7:41:56 PM org.geoserver.platform.GeoServerExtensions checkContext\nWARNING: Extension lookup 'ExtensionFilter', but ApplicationContext is unset.\nNov 27, 2017 7:41:56 PM org.geoserver.platform.GeoServerExtensions checkContext\nWARNING: Extension lookup 'ExtensionProvider', but ApplicationContext is unset.\nNov 27, 2017 7:41:56 PM org.geoserver.platform.GeoServerExtensions checkContext\nWARNING: Extension lookup 'ExtensionFilter', but ApplicationContext is unset.\nAdd GeoServer layer for 'germany_gpx_accumulo: OK : {\n \"description\": \"Successfully added:\",\n \"layers\": [ {\n \"id\": \"gpxpoint\",\n \"type\": \"vector\"\n }]\n}\n--2017-11-27 19:42:01-- http://s3.amazonaws.com/geowave/latest/scripts/emr/quickstart/SubsamplePoints.sld\nResolving s3.amazonaws.com (s3.amazonaws.com)... 54.231.115.50\nConnecting to s3.amazonaws.com (s3.amazonaws.com)|54.231.115.50|:80... connected.\nHTTP request sent, awaiting response... 200 OK\nLength: 2237 (2.2K) [binary/octet-stream]\nSaving to: ‘SubsamplePoints.sld’\n\n 0K .. 100% 546M=0s\n\n2017-11-27 19:42:01 (546 MB/s) - ‘SubsamplePoints.sld’ saved [2237/2237]\n\nAdd style for 'SubsamplePoints' on GeoServer: OK\nSet style for GeoServer layer 'gpxpoint: OK\n"}]},"apps":[],"jobName":"paragraph_1524594363560_-598442294","id":"20170809-181755_1512238840","dateCreated":"2018-04-24T18:26:03+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:3511"},{"text":"%spark\n//Import classes from spark\nimport org.apache.spark.api.java.JavaSparkContext\n//DataFrame = type alias Dataset\nimport org.apache.spark.sql.DataFrame\nimport spark.implicits._\n\n//Import classes from geowave\nimport org.locationtech.geowave.datastore.hbase.cli.config.HBaseRequiredOptions\nimport org.locationtech.geowave.datastore.accumulo.cli.config.AccumuloRequiredOptions\nimport org.locationtech.geowave.analytic.spark.RDDOptions\nimport org.locationtech.geowave.analytic.spark.GeoWaveRDDLoader\nimport org.locationtech.geowave.analytic.spark.kmeans.KMeansRunner\nimport org.locationtech.geowave.core.store.query.QueryOptions\nimport org.locationtech.geowave.analytic.spark.GeoWaveRDD\nimport org.locationtech.geowave.analytic.spark.sparksql.SimpleFeatureDataFrame\nimport org.locationtech.geowave.core.index.ByteArrayId","dateUpdated":"2018-04-24T18:26:46+0000","config":{"colWidth":12,"editorMode":"ace/mode/scala","results":{},"enabled":true,"editorSetting":{"language":"scala","editOnDblClick":false}},"settings":{"params":{},"forms":{}},"results":{"code":"SUCCESS","msg":[{"type":"TEXT","data":"import org.apache.spark.api.java.JavaSparkContext\nimport org.apache.spark.sql.DataFrame\nimport spark.implicits._\nimport org.locationtech.geowave.datastore.hbase.operations.config.HBaseRequiredOptions\nimport org.locationtech.geowave.datastore.accumulo.operations.config.AccumuloRequiredOptions\nimport org.locationtech.geowave.analytic.spark.kmeans.KMeansRunner\nimport org.locationtech.geowave.core.store.query.QueryOptions\nimport org.locationtech.geowave.analytic.spark.GeoWaveRDD\nimport org.locationtech.geowave.analytic.spark.sparksql.SimpleFeatureDataFrame\nimport org.locationtech.geowave.core.index.ByteArrayId\n"}]},"apps":[],"jobName":"paragraph_1524594363560_-598442294","id":"20171117-143415_1121588696","dateCreated":"2018-04-24T18:26:03+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:3512"},{"text":"%spark\n\n//Grab hostname from environment vars\nval hostname = environmentVars.getOrElse(\"HOSTNAME\", \"invalid-host\")\nprintln(s\"hostname= $hostname\")\n\n//Setup datastores\nval input_store = new AccumuloRequiredOptions()\ninput_store.setInstance(\"accumulo\")\ninput_store.setUser(\"root\")\ninput_store.setPassword(\"secret\")\ninput_store.setZookeeper(hostname + \":2181\")\ninput_store.setGeowaveNamespace(\"geowave.germany_gpx\")\n\nval output_store = new HBaseRequiredOptions()\noutput_store.setZookeeper(hostname + \":2181\")\noutput_store.setGeowaveNamespace(\"geowave.kmeans\")\n\n//Create instances of store plugin options, and KMeansRunner\nval input_store_plugin = input_store.createPluginOptions()\nval output_store_plugin = output_store.createPluginOptions()\nval jsc = JavaSparkContext.fromSparkContext(sc)\nval kmeans_runner = new KMeansRunner()","dateUpdated":"2018-04-24T18:26:03+0000","config":{"colWidth":12,"editorMode":"ace/mode/scala","results":{},"enabled":true,"editorSetting":{"language":"scala","editOnDblClick":false}},"settings":{"params":{},"forms":{}},"results":{"code":"SUCCESS","msg":[{"type":"TEXT","data":"hostname: String = ip-10-0-0-36\nhostname= ip-10-0-0-36\ninput_store: org.locationtech.geowave.datastore.accumulo.operations.config.AccumuloRequiredOptions = org.locationtech.geowave.datastore.accumulo.operations.config.AccumuloRequiredOptions@bc516ff\noutput_store: org.locationtech.geowave.datastore.hbase.operations.config.HBaseRequiredOptions = org.locationtech.geowave.datastore.hbase.operations.config.HBaseRequiredOptions@32b51512\ninput_store_plugin: org.locationtech.geowave.core.store.operations.remote.options.DataStorePluginOptions = org.locationtech.geowave.core.store.operations.remote.options.DataStorePluginOptions@75169b65\noutput_store_plugin: org.locationtech.geowave.core.store.operations.remote.options.DataStorePluginOptions = org.locationtech.geowave.core.store.operations.remote.options.DataStorePluginOptions@20c7db21\njsc: org.apache.spark.api.java.JavaSparkContext = org.apache.spark.api.java.JavaSparkContext@3bf8841e\nkmeans_runner: org.locationtech.geowave.analytic.spark.kmeans.KMeansRunner = org.locationtech.geowave.analytic.spark.kmeans.KMeansRunner@41475ce7\n"}]},"apps":[],"jobName":"paragraph_1524594363560_-598442294","id":"20171117-144307_1205081062","dateCreated":"2018-04-24T18:26:03+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:3513"},{"text":"%sh\n#clear old potential runs\ngeowave store clear kmeans_hbase","dateUpdated":"2018-04-24T18:26:03+0000","config":{"colWidth":12,"editorMode":"ace/mode/sh","results":{},"enabled":true,"editorSetting":{"language":"sh","editOnDblClick":false}},"settings":{"params":{},"forms":{}},"results":{"code":"SUCCESS","msg":[{"type":"TEXT","data":"27 Nov 14:56:59 INFO [zookeeper.RecoverableZooKeeper] - Process identifier=hconnection-0x7d94beb9 connecting to ZooKeeper ensemble=ip-10-0-0-14:2181\n27 Nov 14:57:00 INFO [client.HBaseAdmin] - Started disable of geowave.kmeans_GEOWAVE_METADATA\n27 Nov 14:57:08 INFO [client.HBaseAdmin] - Disabled geowave.kmeans_GEOWAVE_METADATA\n27 Nov 14:57:17 INFO [client.HBaseAdmin] - Deleted geowave.kmeans_GEOWAVE_METADATA\n27 Nov 14:57:17 INFO [client.HBaseAdmin] - Started disable of geowave.kmeans_SPATIAL_IDX\n27 Nov 14:57:21 INFO [client.HBaseAdmin] - Disabled geowave.kmeans_SPATIAL_IDX\n27 Nov 14:58:29 INFO [client.HBaseAdmin] - Deleted geowave.kmeans_SPATIAL_IDX\n"}]},"apps":[],"jobName":"paragraph_1524594363560_-598442294","id":"20171122-192044_1893177986","dateCreated":"2018-04-24T18:26:03+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:3514"},{"text":"%spark\n//set the appropriate properties of the runner\nkmeans_runner.setJavaSparkContext(JavaSparkContext.fromSparkContext(sc))\nkmeans_runner.setAdapterId(\"gpxpoint\")\nkmeans_runner.setNumClusters(8)\nkmeans_runner.setInputDataStore(input_store_plugin)\nkmeans_runner.setOutputDataStore(output_store_plugin)\nkmeans_runner.setCqlFilter(\"BBOX(geometry, 13.3, 52.45, 13.5, 52.5)\")\nkmeans_runner.setCentroidTypeName(\"mycentroids\")\nkmeans_runner.setHullTypeName(\"myhulls\")\nkmeans_runner.setGenerateHulls(true)\nkmeans_runner.setComputeHullData(true)\n\n//execute the kmeans runner\nkmeans_runner.run()","dateUpdated":"2018-04-24T18:26:03+0000","config":{"colWidth":12,"editorMode":"ace/mode/scala","results":{},"enabled":true,"editorSetting":{"language":"scala","editOnDblClick":false}},"settings":{"params":{},"forms":{}},"results":{"code":"SUCCESS","msg":[]},"apps":[],"jobName":"paragraph_1524594363560_-598442294","id":"20171117-150524_1487053014","dateCreated":"2018-04-24T18:26:03+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:3515"},{"title":"Add KMeans Results to GeoServer","text":"%sh\n\n# add the centroids layer\ngeowave gs layer add kmeans_hbase -id mycentroids\ngeowave gs style set mycentroids --styleName point\n\n# add the hulls layer\ngeowave gs layer add kmeans_hbase -id myhulls\ngeowave gs style set myhulls --styleName line","dateUpdated":"2018-04-24T18:26:03+0000","config":{"tableHide":false,"editorSetting":{"language":"sh","editOnDblClick":false},"colWidth":12,"editorMode":"ace/mode/sh","title":true,"results":{},"enabled":true},"settings":{"params":{},"forms":{}},"results":{"code":"SUCCESS","msg":[{"type":"TEXT","data":"26 Nov 20:35:31 INFO [zookeeper.RecoverableZooKeeper] - Process identifier=hconnection-0x4e928fbf connecting to ZooKeeper ensemble=ip-10-0-0-106:2181\nNov 26, 2017 8:35:33 PM org.geoserver.platform.GeoServerExtensions checkContext\nWARNING: Extension lookup 'GeoServerResourceLoader', but ApplicationContext is unset.\nNov 26, 2017 8:35:33 PM org.geoserver.platform.GeoServerExtensions checkContext\nWARNING: Extension lookup 'GeoServerResourceLoader', but ApplicationContext is unset.\nNov 26, 2017 8:35:33 PM org.geoserver.platform.GeoServerExtensions checkContext\nWARNING: Extension lookup 'ExtensionFilter', but ApplicationContext is unset.\nNov 26, 2017 8:35:33 PM org.geoserver.platform.GeoServerExtensions checkContext\nWARNING: Extension lookup 'ExtensionProvider', but ApplicationContext is unset.\nNov 26, 2017 8:35:33 PM org.geoserver.platform.GeoServerExtensions checkContext\nWARNING: Extension lookup 'ExtensionFilter', but ApplicationContext is unset.\nNov 26, 2017 8:35:33 PM org.geoserver.platform.GeoServerExtensions checkContext\nWARNING: Extension lookup 'GeoServerResourceLoader', but ApplicationContext is unset.\nNov 26, 2017 8:35:33 PM org.geoserver.platform.GeoServerExtensions checkContext\nWARNING: Extension lookup 'GeoServerResourceLoader', but ApplicationContext is unset.\nNov 26, 2017 8:35:33 PM org.geoserver.platform.GeoServerExtensions checkContext\nWARNING: Extension lookup 'ExtensionFilter', but ApplicationContext is unset.\nNov 26, 2017 8:35:33 PM org.geoserver.platform.GeoServerExtensions checkContext\nWARNING: Extension lookup 'ExtensionProvider', but ApplicationContext is unset.\nNov 26, 2017 8:35:33 PM org.geoserver.platform.GeoServerExtensions checkContext\nWARNING: Extension lookup 'ExtensionFilter', but ApplicationContext is unset.\nAdd GeoServer layer for 'kmeans_hbase: OK : {\n \"description\": \"Successfully added:\",\n \"layers\": [ {\n \"id\": \"mycentroids\",\n \"type\": \"vector\"\n }]\n}\nSet style for GeoServer layer 'mycentroids: OK\n26 Nov 20:35:38 INFO [zookeeper.RecoverableZooKeeper] - Process identifier=hconnection-0x4e928fbf connecting to ZooKeeper ensemble=ip-10-0-0-106:2181\nNov 26, 2017 8:35:39 PM org.geoserver.platform.GeoServerExtensions checkContext\nWARNING: Extension lookup 'GeoServerResourceLoader', but ApplicationContext is unset.\nNov 26, 2017 8:35:39 PM org.geoserver.platform.GeoServerExtensions checkContext\nWARNING: Extension lookup 'GeoServerResourceLoader', but ApplicationContext is unset.\nNov 26, 2017 8:35:39 PM org.geoserver.platform.GeoServerExtensions checkContext\nWARNING: Extension lookup 'ExtensionFilter', but ApplicationContext is unset.\nNov 26, 2017 8:35:39 PM org.geoserver.platform.GeoServerExtensions checkContext\nWARNING: Extension lookup 'ExtensionProvider', but ApplicationContext is unset.\nNov 26, 2017 8:35:39 PM org.geoserver.platform.GeoServerExtensions checkContext\nWARNING: Extension lookup 'ExtensionFilter', but ApplicationContext is unset.\nNov 26, 2017 8:35:39 PM org.geoserver.platform.GeoServerExtensions checkContext\nWARNING: Extension lookup 'GeoServerResourceLoader', but ApplicationContext is unset.\nNov 26, 2017 8:35:39 PM org.geoserver.platform.GeoServerExtensions checkContext\nWARNING: Extension lookup 'GeoServerResourceLoader', but ApplicationContext is unset.\nNov 26, 2017 8:35:39 PM org.geoserver.platform.GeoServerExtensions checkContext\nWARNING: Extension lookup 'ExtensionFilter', but ApplicationContext is unset.\nNov 26, 2017 8:35:39 PM org.geoserver.platform.GeoServerExtensions checkContext\nWARNING: Extension lookup 'ExtensionProvider', but ApplicationContext is unset.\nNov 26, 2017 8:35:39 PM org.geoserver.platform.GeoServerExtensions checkContext\nWARNING: Extension lookup 'ExtensionFilter', but ApplicationContext is unset.\nAdd GeoServer layer for 'kmeans_hbase: OK : {\n \"description\": \"Successfully added:\",\n \"layers\": [ {\n \"id\": \"myhulls\",\n \"type\": \"vector\"\n }]\n}\nSet style for GeoServer layer 'myhulls: OK\n"}]},"apps":[],"jobName":"paragraph_1524594363561_-598827043","id":"20170817-030121_1271873891","dateCreated":"2018-04-24T18:26:03+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:3516"},{"text":"%angular\n\n

GeoWave Leaflet Map

\n
\n
\n\n","dateUpdated":"2018-04-24T18:26:03+0000","config":{"tableHide":false,"editorSetting":{"language":"scala","editOnDblClick":true},"colWidth":12,"editorMode":"ace/mode/undefined","editorHide":true,"results":{},"enabled":true},"settings":{"params":{},"forms":{}},"results":{"code":"SUCCESS","msg":[{"type":"ANGULAR","data":"\n

GeoWave Leaflet Map

\n
\n
\n"}]},"apps":[],"jobName":"paragraph_1524594363561_-598827043","id":"20170817-030613_874309201","dateCreated":"2018-04-24T18:26:03+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:3517"},{"title":"Load KMeans Centroid CSV into DataFrame","text":"%spark\n\ndef create_dataframe(adapter_name : String) : DataFrame = {\n //Create the dataframe and get a rdd for the output of kmeans\n var sf_df = new SimpleFeatureDataFrame(spark)\n val adapter_id = new ByteArrayId(adapter_name)\n \n var queryOptions = null : Option[QueryOptions]\n val adapterIt = output_store_plugin.createAdapterStore().getAdapters()\n while (adapterIt.hasNext()) {\n val adapter = adapterIt.next()\n if (adapter.getAdapterId().equals(adapter_id)) {\n val adapterForQuery = adapter\n queryOptions = Some(new QueryOptions(adapterForQuery))\n }\n }\n val loadOpts = new RDDOptions()\n loadOpts.setQueryOptions(queryOptions.getOrElse(null))\n val output_rdd = GeoWaveRDDLoader.loadRDD(sc, output_store_plugin, loadOpts))\n sf_df.init(output_store_plugin, adapter_id)\n \n return sf_df.getDataFrame(output_rdd)\n}\n\nvar df = create_dataframe(\"mycentroids\")\ndf.show()\n\n// Convert geom string to lat/long\ncase class KMeansRow(lat: Double, lon: Double, ClusterIndex : Int)\nval kmeansData = df.map(row => {\n val geom_index = row.fieldIndex(\"geom\")\n val geom = row.getString(geom_index)\n val cluster_index = row.getInt(row.fieldIndex(\"ClusterIndex\"))\n val lat_start = geom.lastIndexOf(\" \") + 1\n val lat_end = geom.lastIndexOf(\")\")\n val lat = geom.substring(lat_start, lat_end)\n val lonStart = geom.indexOf(\"(\") + 1\n val lonStop = geom.indexOf(\" \", lonStart)\n val lon = geom.substring(lonStart, lonStop)\n KMeansRow(lat=lat.toDouble, lon=lon.toDouble, ClusterIndex=cluster_index)\n })\n// send the results to the front end (Leaflet map)\nz.angularBind(\"pins\", kmeansData.collect())\n// register a view for SQL queries\nkmeansData.createOrReplaceTempView(\"kmeans\")\n","dateUpdated":"2018-04-24T18:27:15+0000","config":{"tableHide":false,"editorSetting":{"language":"scala","editOnDblClick":false},"colWidth":12,"editorMode":"ace/mode/scala","editorHide":false,"title":true,"results":{},"enabled":true},"settings":{"params":{},"forms":{}},"results":{"code":"SUCCESS","msg":[{"type":"TEXT","data":"create_dataframe: (adapter_name: String)org.apache.spark.sql.DataFrame\ndf: org.apache.spark.sql.DataFrame = [geom: string, ClusterIndex: int]\n+--------------------+------------+\n| geom|ClusterIndex|\n+--------------------+------------+\n|POINT (13.3195852...| 0|\n|POINT (13.3139355...| 5|\n|POINT (13.3392341...| 6|\n|POINT (13.4709106...| 3|\n|POINT (13.3619528...| 4|\n|POINT (13.3888137...| 1|\n|POINT (13.4312593...| 2|\n|POINT (13.4756306...| 7|\n+--------------------+------------+\n\ndefined class KMeansRow\nkmeansData: org.apache.spark.sql.Dataset[KMeansRow] = [lat: double, lon: double ... 1 more field]\n"}]},"apps":[],"jobName":"paragraph_1524594363561_-598827043","id":"20170814-174640_830156690","dateCreated":"2018-04-24T18:26:03+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:3518"},{"title":"Display the KMeans Centroids Table","text":"%sql\nselect lat as Latitude, lon as Longitude from kmeans","dateUpdated":"2018-04-24T18:26:03+0000","config":{"editorSetting":{"language":"sql","editOnDblClick":false},"colWidth":8,"editorMode":"ace/mode/sql","editorHide":false,"title":true,"results":{"0":{"graph":{"mode":"table","height":300,"optionOpen":false},"helium":{}}},"enabled":true},"settings":{"params":{},"forms":{}},"results":{"code":"SUCCESS","msg":[{"type":"TABLE","data":"Latitude\tLongitude\n52.46351825250708\t13.319585220066669\n52.48782846644896\t13.313935518966645\n52.48274354548128\t13.339234131447801\n52.491618128998084\t13.470910672079846\n52.479834322332394\t13.361952882747175\n52.48307815695488\t13.388813779887156\n52.48417492312525\t13.43125930391005\n52.46475242616019\t13.475630651565233\n"}]},"apps":[],"jobName":"paragraph_1524594363561_-598827043","id":"20170809-203309_1972137502","dateCreated":"2018-04-24T18:26:03+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:3519"},{"text":"%angular\r\n\r\n\r\n

Client-side Integration

\r\n
\r\n\r\n","dateUpdated":"2018-04-24T18:26:03+0000","config":{"tableHide":false,"editorSetting":{"language":"text","editOnDblClick":true},"colWidth":8,"editorMode":"ace/mode/undefined","editorHide":true,"results":{},"enabled":true},"settings":{"params":{},"forms":{}},"results":{"code":"SUCCESS","msg":[{"type":"ANGULAR","data":"\r\n

Client-side Integration

\r\n
\r\n\r\n"}]},"apps":[],"jobName":"paragraph_1524594363562_-597672796","id":"20170809-021534_2122057818","dateCreated":"2018-04-24T18:26:03+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:3520"},{"text":"%spark\n","dateUpdated":"2018-04-24T18:26:03+0000","config":{"colWidth":12,"editorMode":"ace/mode/scala","results":{},"enabled":true,"editorSetting":{"language":"scala","editOnDblClick":false}},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1524594363562_-597672796","id":"20171127-001231_707705103","dateCreated":"2018-04-24T18:26:03+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:3521"}],"name":"GeoWave-GPX-Demo","id":"2DBNPY7JC","angularObjects":{"2BRWU4WXC:shared_process":[],"2AM1YV5CU:shared_process":[],"2AJXGMUUJ:shared_process":[],"2ANGGHHMQ:shared_process":[],"2AKK3QQXU:shared_process":[]},"config":{"looknfeel":"default","personalizedMode":"false"},"info":{}} ================================================ FILE: examples/data/notebooks/zeppelin/README.md ================================================ # Zeppelin Notebook Examples This folder contains example notebooks for Zeppelin ## GDELT Quick Start This notebook combines steps from the GeoWave Quick Start Guide to download and ingest GDELT data, and then demonstrates various levels of integration with KMeans analytic, SparkSQL queries and GeoServer + Leaflet map output. ## GeoWave GPX Demo This notebook demonstrates interaction with a large dataset. The GPX data is in exported Accumulo tables on S3 and will be imported (rather than ingested). It is similar to the GDELT notebook in that it has a combination of SparkSQL, KMeans and GeoServer display. ### Getting Started In order to run these notebook examples, you'll need to have a working GeoWave installation. Please see the [GeoWave Quick Start Guide](http://locationtech.github.io/geowave/quickstart.html) for instructions. ================================================ FILE: examples/data/slds/DistributedRender.sld ================================================ Distributed Render - Blue Line Default Line with GeoWave Distributed Rendering enabled A sample style that draws a line using GeoWave's distributed rendering data rule1 Blue Line A solid blue line with a 1 pixel width #0000FF ================================================ FILE: examples/data/slds/KDEColorMap.sld ================================================ KDE Color Map raster Feature 1 3 ================================================ FILE: examples/data/slds/SubsamplePoints.sld ================================================ Subsample At Requested Map Resolution Subsample An example of how to handle large datasets in a WMS request by subsampling the data within GeoWave based on the pixel resolution. data pixelSize 1.5 outputBBOX wms_bbox outputWidth wms_width outputHeight wms_height Basic Red Square Red Square A 3 pixel square with a red fill and no stroke square #FF0000 3 ================================================ FILE: examples/java-api/pom.xml ================================================ 4.0.0 org.locationtech.geowave geowave-parent 2.0.2-SNAPSHOT ../../ geowave-example GeoWave Examples ${project.artifactId}-${project.version}-examples org.apache.spark spark-core_2.12 org.locationtech.geowave geowave-analytic-spark org.locationtech.geowave geowave-datastore-accumulo ${project.version} org.locationtech.geowave geowave-adapter-vector ${project.version} org.locationtech.geowave geowave-adapter-raster ${project.version} com.github.spotbugs spotbugs-annotations examples-singlejar org.apache.maven.plugins maven-shade-plugin 2.2 package shade junit:junit junit/framework/** org/junit/** org/junit/experimental/** org/junit/runners/** *:* META-INF/*.SF META-INF/*.DSA META-INF/*.RSA false false ${examples.finalName} ================================================ FILE: examples/java-api/src/main/java/org/locationtech/geowave/examples/ExamplePersistableRegistry.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.examples; import org.locationtech.geowave.core.index.persist.PersistableRegistrySpi; import org.locationtech.geowave.examples.adapter.CustomAdapterExample.POIBasicDataAdapter; import org.locationtech.geowave.examples.index.CustomIndexExample.UUIDConstraints; import org.locationtech.geowave.examples.index.CustomIndexExample.UUIDIndexStrategy; import org.locationtech.geowave.examples.ingest.plugin.CustomIngestPlugin; public class ExamplePersistableRegistry implements PersistableRegistrySpi { @Override public PersistableIdAndConstructor[] getSupportedPersistables() { return new PersistableIdAndConstructor[] { new PersistableIdAndConstructor((short) 20000, POIBasicDataAdapter::new), new PersistableIdAndConstructor((short) 20001, UUIDIndexStrategy::new), new PersistableIdAndConstructor((short) 20002, UUIDConstraints::new), new PersistableIdAndConstructor((short) 20003, CustomIngestPlugin::new)}; } } ================================================ FILE: examples/java-api/src/main/java/org/locationtech/geowave/examples/adapter/BasicDataTypeAdapterExample.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.examples.adapter; import java.io.IOException; import java.util.Date; import org.geotools.filter.text.cql2.CQLException; import org.geotools.geometry.jts.JTSFactoryFinder; import org.geotools.util.factory.FactoryRegistryException; import org.locationtech.geowave.core.geotime.adapter.annotation.GeoWaveSpatialField; import org.locationtech.geowave.core.geotime.adapter.annotation.GeoWaveTemporalField; import org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder; import org.locationtech.geowave.core.geotime.store.query.SpatialTemporalConstraintsBuilderImpl; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.adapter.BasicDataTypeAdapter; import org.locationtech.geowave.core.store.adapter.annotation.GeoWaveDataType; import org.locationtech.geowave.core.store.adapter.annotation.GeoWaveField; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataStoreFactory; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.Query; import org.locationtech.geowave.core.store.api.QueryBuilder; import org.locationtech.geowave.core.store.api.Writer; import org.locationtech.geowave.core.store.memory.MemoryRequiredOptions; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.io.ParseException; import org.locationtech.jts.io.WKTReader; /** * This class provides an example of how to create a {@link DataTypeAdapter} for a custom data type. * This allows a user to directly write, index, and query their own data types from a GeoWave data * store without having to translate to and from a `SimpleFeature`. It differs from the * {@link CustomAdapterExample} in that it does not require a new adapter to be registered with the * persistable registry and is not suitable for some more complex data types. The basic data type * adapter uses reflection or annotations to infer the fields of a data type. */ public class BasicDataTypeAdapterExample { private DataStore dataStore; private DataTypeAdapter adapter; private DataTypeAdapter annotatedAdapter; private Index spatialIndex; public static void main(final String[] args) throws IOException, CQLException { final BasicDataTypeAdapterExample example = new BasicDataTypeAdapterExample(); example.run(); } public void run() { // Create an in-memory data store to use with this example dataStore = DataStoreFactory.createDataStore(new MemoryRequiredOptions()); // Create the adapter for our POI class with the type name `POI` adapter = BasicDataTypeAdapter.newAdapter("POI", POI.class, "name"); // Create the adapter for our Annotated POI class with the type name `AnnotatedPOI` annotatedAdapter = BasicDataTypeAdapter.newAdapter("AnnotatedPOI", AnnotatedPOI.class, "alternateFieldName"); // Create the spatial index spatialIndex = new SpatialIndexBuilder().createIndex(); // Add the types to the data store with the spatial index dataStore.addType(adapter, spatialIndex); dataStore.addType(annotatedAdapter, spatialIndex); // Ingest the data into a spatial index ingestData(); // Perform a spatial query on the data querySpatial(); } private void ingestData() { try (Writer writer = dataStore.createWriter(adapter.getTypeName())) { // We can directly write `POI` instances to the data store writer.write(new POI("Eiffel Tower", 48.858093, 2.294694)); writer.write(new POI("Roman Colosseum", 41.890167, 12.492269)); writer.write(new POI("Great Pyramid of Giza", 29.979176, 31.134357)); writer.write(new POI("Mount Everest", 27.986065, 86.922623)); } try (Writer writer = dataStore.createWriter(annotatedAdapter.getTypeName())) { // We can directly write `AnnotatedPOI` instances to the data store writer.write(new AnnotatedPOI("Eiffel Tower", new Date(), 48.858093, 2.294694)); writer.write(new AnnotatedPOI("Roman Colosseum", new Date(), 41.890167, 12.492269)); writer.write(new AnnotatedPOI("Great Pyramid of Giza", new Date(), 29.979176, 31.134357)); writer.write(new AnnotatedPOI("Mount Everest", new Date(), 27.986065, 86.922623)); } } private void querySpatial() { try { // This bounding box represents approximately Europe, so only the European POIs will be // queried final String queryPolygonDefinition = "POLYGON (( " + "-10.55 35.96, " + "-10.55 71.30, " + "56.16 71.30, " + "56.16 35.96, " + "-10.55 35.96" + "))"; final Geometry queryPolygon = new WKTReader(JTSFactoryFinder.getGeometryFactory()).read(queryPolygonDefinition); final QueryConstraints queryConstraints = new SpatialTemporalConstraintsBuilderImpl().spatialConstraints(queryPolygon).build(); // Query the POI adapter final Query query = QueryBuilder.newBuilder(POI.class).addTypeName(adapter.getTypeName()).indexName( spatialIndex.getName()).constraints(queryConstraints).build(); System.out.println( "Executing query on POI adapter, expecting to match Roman Colosseum and Eiffel Tower..."); try (final CloseableIterator iterator = dataStore.query(query)) { while (iterator.hasNext()) { System.out.println("Query match: " + iterator.next().getName()); } } // Now query the annotated POI adapter final Query annotatedQuery = QueryBuilder.newBuilder(AnnotatedPOI.class).addTypeName( annotatedAdapter.getTypeName()).indexName(spatialIndex.getName()).constraints( queryConstraints).build(); System.out.println( "Executing query on Annotated POI adapter, expecting to match Roman Colosseum and Eiffel Tower..."); try (final CloseableIterator iterator = dataStore.query(annotatedQuery)) { while (iterator.hasNext()) { System.out.println("Query match: " + iterator.next().getName()); } } } catch (FactoryRegistryException | ParseException e) { } } /** * Our custom data type that we want to store inside GeoWave. It contains a name, latitude, * longitude, a public string field, and a private string field. Any field that has both an * accessor and mutator, or is public will be added to the adapter. Private fields without an * accessor and mutator will be ignored. */ public static class POI { private String name; private Double latitude; private Double longitude; public String publicField; private String privateField = "ignored"; /** * A no-args constructor is required for the `BasicDataTypeAdapter` to create new instances. */ protected POI() {} public POI(final String name, final Double latitude, final Double longitude) { this.name = name; this.latitude = latitude; this.longitude = longitude; } public void setName(final String name) { this.name = name; } public String getName() { return name; } public void setLatitude(final Double latitude) { this.latitude = latitude; } public Double getLatitude() { return latitude; } public void setLongitude(final Double longitude) { this.longitude = longitude; } public Double getLongitude() { return longitude; } public String getPrivateField() { return privateField; } } /** * Another way to create a data type for the `BasicDataTypeAdapter` is to annotate it with GeoWave * field annotations. These annotations provide an additional level of control over the way each * field is interpreted. In an annotated class, annotated fields allow the user to specify index * hints, alternate field names, and a coordinate reference system for spatial fields. * Additionally, the annotated field may be private or final. When using an annotated data type, * any non-annotated fields will be ignored. Field annotations will only be used if the class is * annotated with `@GeoWaveDataType`. */ @GeoWaveDataType public static class AnnotatedPOI { @GeoWaveField(name = "alternateFieldName") private final String name; @GeoWaveTemporalField(timeIndexHint = true) private final Date date; @GeoWaveSpatialField(latitudeIndexHint = true, crs = "EPSG:4326") private final Double latitude; @GeoWaveSpatialField(longitudeIndexHint = true, crs = "EPSG:4326") private final Double longitude; protected AnnotatedPOI() { name = null; date = null; latitude = null; longitude = null; } public AnnotatedPOI( final String name, final Date date, final Double latitude, final Double longitude) { this.name = name; this.date = date; this.latitude = latitude; this.longitude = longitude; } public String getName() { return name; } public Double getLatitude() { return latitude; } public Double getLongitude() { return longitude; } } } ================================================ FILE: examples/java-api/src/main/java/org/locationtech/geowave/examples/adapter/CustomAdapterExample.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.examples.adapter; import java.io.IOException; import org.geotools.filter.text.cql2.CQLException; import org.geotools.geometry.jts.JTSFactoryFinder; import org.geotools.referencing.crs.DefaultGeographicCRS; import org.geotools.util.factory.FactoryRegistryException; import org.locationtech.geowave.core.geotime.adapter.SpatialFieldDescriptorBuilder; import org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder; import org.locationtech.geowave.core.geotime.store.query.SpatialTemporalConstraintsBuilderImpl; import org.locationtech.geowave.core.geotime.store.query.api.SpatialTemporalConstraintsBuilder; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.adapter.AbstractDataTypeAdapter; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.adapter.FieldDescriptorBuilder; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataStoreFactory; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.Query; import org.locationtech.geowave.core.store.api.QueryBuilder; import org.locationtech.geowave.core.store.api.Writer; import org.locationtech.geowave.core.store.memory.MemoryRequiredOptions; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.io.ParseException; import org.locationtech.jts.io.WKTReader; /** * This class provides an example of how to create a {@link DataTypeAdapter} for a custom data type. * This allows a user to directly write, index, and query their own data types from a GeoWave data * store without having to translate to and from a `SimpleFeature`. A custom data adapter * implementation may be useful for data types that are too complex for the * {@link BasicDataTypeAdapter}, such as when annotations cannot be added or the fields cannot be * properly inferred. */ public class CustomAdapterExample { private DataStore dataStore; private DataTypeAdapter adapter; private Index spatialIndex; public static void main(final String[] args) throws IOException, CQLException { final CustomAdapterExample example = new CustomAdapterExample(); example.run(); } public void run() { // Create an in-memory data store to use with this example dataStore = DataStoreFactory.createDataStore(new MemoryRequiredOptions()); // Create our custom adapter with the type name `POI` adapter = new POIBasicDataAdapter("POI"); // Create the spatial index spatialIndex = new SpatialIndexBuilder().createIndex(); // Add the type to the data store with the spatial index dataStore.addType(adapter, spatialIndex); // Ingest the data into a spatial index ingestData(); // Perform a spatial query on the data querySpatial(); } private void ingestData() { try (Writer writer = dataStore.createWriter(adapter.getTypeName())) { // With our custom adapter, we can directly write `POI` instances to the data store writer.write(new POI("Eiffel Tower", 48.858093, 2.294694)); writer.write(new POI("Roman Colosseum", 41.890167, 12.492269)); writer.write(new POI("Great Pyramid of Giza", 29.979176, 31.134357)); writer.write(new POI("Mount Everest", 27.986065, 86.922623)); } } private void querySpatial() { // Because we have hinted to GeoWave that our type contains spatial data, we can utilize spatial // constraints when querying try { // This bounding box represents approximately Europe, so only the European POIs will be // queried final String queryPolygonDefinition = "POLYGON (( " + "-10.55 35.96, " + "-10.55 71.30, " + "56.16 71.30, " + "56.16 35.96, " + "-10.55 35.96" + "))"; final Geometry queryPolygon = new WKTReader(JTSFactoryFinder.getGeometryFactory()).read(queryPolygonDefinition); final SpatialTemporalConstraintsBuilder spatialConstraintsBuilder = new SpatialTemporalConstraintsBuilderImpl(); final Query query = QueryBuilder.newBuilder(POI.class).addTypeName(adapter.getTypeName()).indexName( spatialIndex.getName()).constraints( spatialConstraintsBuilder.spatialConstraints(queryPolygon).build()).build(); System.out.println("Executing query, expecting to match Roman Colosseum and Eiffel Tower..."); try (final CloseableIterator iterator = dataStore.query(query)) { while (iterator.hasNext()) { System.out.println("Query match: " + iterator.next().getName()); } } } catch (FactoryRegistryException | ParseException e) { } } /** * Our custom data type that we want to store inside GeoWave. It contains a name, latitude, and * longitude. */ public static class POI { private final String name; private final Double latitude; private final Double longitude; public POI(final String name, final Double latitude, final Double longitude) { this.name = name; this.latitude = latitude; this.longitude = longitude; } public String getName() { return name; } public Double getLatitude() { return latitude; } public Double getLongitude() { return longitude; } } /** * The simplest way to implement a data adapter for a custom data type is to extend the * {@link AbstractDataTypeAdapter} and implement the methods that read and write the custom type. * It's important to note that any adapter that extends the `AbstractDataTypeAdapter` must be * added to the persistable registry. */ public static class POIBasicDataAdapter extends AbstractDataTypeAdapter { public static final String NAME_FIELD_NAME = "name"; public static final String LATITUDE_FIELD_NAME = "lat"; public static final String LONGITUDE_FIELD_NAME = "lon"; // We create a field descriptor for each field in our data type to tell GeoWave how to handle // the data. For the latitude and longitude fields, we provide index hints that identify those // fields as such, as well as a `CoordinateReferenceSystem` so that our type will be properly // transformed if the index has a different CRS. private static final FieldDescriptor NAME_FIELD = new FieldDescriptorBuilder<>(String.class).fieldName(NAME_FIELD_NAME).build(); private static final FieldDescriptor LATITUDE_FIELD = new SpatialFieldDescriptorBuilder<>(Double.class).fieldName(LATITUDE_FIELD_NAME).crs( DefaultGeographicCRS.WGS84).latitudeIndexHint().build(); private static final FieldDescriptor LONGITUDE_FIELD = new SpatialFieldDescriptorBuilder<>(Double.class).fieldName(LONGITUDE_FIELD_NAME).crs( DefaultGeographicCRS.WGS84).longitudeIndexHint().build(); private static final FieldDescriptor[] FIELDS = new FieldDescriptor[] {NAME_FIELD, LATITUDE_FIELD, LONGITUDE_FIELD}; public POIBasicDataAdapter() {} public POIBasicDataAdapter(final String typeName) { super(typeName, FIELDS, NAME_FIELD); } @Override public Object getFieldValue(final POI entry, final String fieldName) { switch (fieldName) { case NAME_FIELD_NAME: return entry.name; case LATITUDE_FIELD_NAME: return entry.latitude; case LONGITUDE_FIELD_NAME: return entry.longitude; } return null; } @Override public POI buildObject(final Object dataId, final Object[] fieldValues) { return new POI((String) fieldValues[0], (Double) fieldValues[1], (Double) fieldValues[2]); } } } ================================================ FILE: examples/java-api/src/main/java/org/locationtech/geowave/examples/aggregation/binning/SpatialBinningAggregationExample.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.examples.aggregation.binning; import java.math.BigDecimal; import java.util.Arrays; import java.util.List; import java.util.Map; import org.apache.commons.lang3.tuple.Pair; import org.geotools.feature.AttributeTypeBuilder; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.feature.simple.SimpleFeatureTypeBuilder; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.core.geotime.binning.SpatialBinningType; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.core.geotime.store.query.aggregate.SpatialSimpleFeatureBinningStrategy; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.store.api.AggregationQuery; import org.locationtech.geowave.core.store.api.AggregationQueryBuilder; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataStoreFactory; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.Writer; import org.locationtech.geowave.core.store.memory.MemoryRequiredOptions; import org.locationtech.geowave.core.store.query.aggregate.FieldNameParam; import org.locationtech.geowave.core.store.query.aggregate.FieldSumAggregation; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import com.google.common.collect.ImmutableList; import com.google.common.collect.Maps; /** * This class provides an example of how to create a binned aggregation for your data. You may want * to use a binned aggregation if you need to sort your data into buckets and process the buckets * individually.

This example counts the population after grouping the data by geohash, by S3, * and by S2. */ public class SpatialBinningAggregationExample { public static void main(final String[] args) { // this example shows binning using geohashes but it can easily use Google's S2 or Uber's H3 as // well for spatial binning final SimpleFeatureType featureType = getSimpleFeatureType(); // Points (to be ingested into GeoWave Data Store) final List cannedFeatures = ImmutableList.of( buildSimpleFeature(featureType, "Loc1", new Coordinate(-77.0352, 38.8895), 12), buildSimpleFeature(featureType, "Loc2", new Coordinate(-77.0366, 38.8977), 13), buildSimpleFeature(featureType, "Loc3", new Coordinate(-76.8644, 38.9078), 8), buildSimpleFeature(featureType, "Loc4", new Coordinate(-76.350677, 38.9641511), 15), buildSimpleFeature(featureType, "Loc5", new Coordinate(-77.3384112, 38.416091), 7), buildSimpleFeature(featureType, "Loc6", new Coordinate(-67.0352, 28.8895), 3), buildSimpleFeature(featureType, "Loc7", new Coordinate(-67.0366, 28.8977), 99), buildSimpleFeature(featureType, "Loc8", new Coordinate(-66.8644, 28.9078), 0), buildSimpleFeature(featureType, "Loc9", new Coordinate(-66.350677, 28.9641511), 1), buildSimpleFeature(featureType, "Loc10", new Coordinate(-67.3384112, 28.416091), 23)); final Index index = SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()); final DataStore dataStore = DataStoreFactory.createDataStore(new MemoryRequiredOptions()); final FeatureDataAdapter adapter = new FeatureDataAdapter(featureType); // Ingest cannedFeatures into the DataStore. dataStore.addType(adapter, index); try (Writer indexWriter = dataStore.createWriter(adapter.getTypeName())) { for (final SimpleFeature sf : cannedFeatures) { indexWriter.write(sf); } } // calculate the population count for each precision from 1 to 6. // a geohash like g5c is a hash that is contained by the geohash g5, // which is contained by the geohash g. final Map allResults = Maps.newHashMapWithExpectedSize(100); for (int i = 6; i > 0; i--) { // execute a binned Aggregation, return the results // -1 maxBins means no max. allResults.putAll( executeBinningAggregation(i, index.getName(), adapter.getTypeName(), dataStore, -1)); } System.out.printf( "Results for precision 1-6: %s%n", Arrays.toString( allResults.entrySet().stream().map( e -> Pair.of( SpatialBinningType.GEOHASH.binToString(e.getKey().getBytes()), e.getValue())).map(p -> p.getKey() + "=" + p.getValue()).toArray( String[]::new))); System.out.printf( "Results just for precision 6: %s%n", Arrays.toString( allResults.entrySet().stream().filter((e) -> e.getKey().getBytes().length == 6).map( e -> Pair.of( SpatialBinningType.GEOHASH.binToString(e.getKey().getBytes()), e.getValue())).map(p -> p.getKey() + "=" + p.getValue()).toArray( String[]::new))); // when maxBins is used, it will simply drop any new data that comes in. final Map maxed = executeBinningAggregation(8, index.getName(), adapter.getTypeName(), dataStore, 5); System.out.printf( "Results limited to the first 5 bins: %s%n", Arrays.toString( maxed.entrySet().stream().map( e -> Pair.of( SpatialBinningType.GEOHASH.binToString(e.getKey().getBytes()), e.getValue())).map(p -> p.getKey() + "=" + p.getValue()).toArray( String[]::new))); } /** * This method creates a binning aggregation that groups the data in the dataStore by the given * precision, and sums all of the entries in the group. * * @param precision The geohash precision to use during binning. * @param indexName The index to query * @param typeName The name of the registered type adapter to use for serialization purposes. * @param dataStore where we have stored the data that we will aggregate. * @return Aggregated and computed data. Each entry has a key that is the geohash, and a value * that is the population in that geohash. */ private static Map executeBinningAggregation( final int precision, final String indexName, final String typeName, final DataStore dataStore, final int maxBins) { final AggregationQueryBuilder queryBuilder = AggregationQueryBuilder.newBuilder(); queryBuilder.indexName(indexName); // Use `.count` instead of `aggregate` if you simply want to count the amount of rows // queryBuilder.count("geometry"); // aggregate uses a provided aggregation to form data. queryBuilder.aggregate(typeName, new FieldSumAggregation(new FieldNameParam("population"))); // `.bin` uses the current aggregation (the VectorSumAggregation in this case), // but adds a binning strategy on top of it. // each bin uses a fresh aggregation, so there is no contamination between aggregations. // P here is BinningAggregationOptions But Java lets us elide it. // NOTE: here's where SpatialBinningType could instead be Google's S2 or Uber's H3 if desired final AggregationQuery, SimpleFeature> agg = queryBuilder.buildWithBinningStrategy( new SpatialSimpleFeatureBinningStrategy(SpatialBinningType.GEOHASH, precision, true), maxBins); // Aggregate the data in the dataStore with the AggregationQuery. return dataStore.aggregate(agg); } /** * A helper that constructs the SimpleFeatureType used in this example. */ private static SimpleFeatureType getSimpleFeatureType() { final String name = "ExampleSimpleFeatureType"; final SimpleFeatureTypeBuilder sftBuilder = new SimpleFeatureTypeBuilder(); final AttributeTypeBuilder atBuilder = new AttributeTypeBuilder(); sftBuilder.setName(name); // the location name isn't used in this example, its just here for show! sftBuilder.add(atBuilder.binding(String.class).nillable(false).buildDescriptor("locationName")); // this is used for the grouping (the `.bin` call). sftBuilder.add(atBuilder.binding(Geometry.class).nillable(false).buildDescriptor("geometry")); // this is the field that is summed in each group, as defined by the `.aggregate` call. sftBuilder.add(atBuilder.binding(Integer.class).nillable(false).buildDescriptor("population")); return sftBuilder.buildFeatureType(); } /** * Just a helper method to create a SimpleFeature to the specifications used in this example. */ private static SimpleFeature buildSimpleFeature( final SimpleFeatureType featureType, final String locationName, final Coordinate coordinate, final int population) { final SimpleFeatureBuilder builder = new SimpleFeatureBuilder(featureType); builder.set("locationName", locationName); builder.set("geometry", GeometryUtils.GEOMETRY_FACTORY.createPoint(coordinate)); builder.set("population", population); return builder.buildFeature(locationName); } } ================================================ FILE: examples/java-api/src/main/java/org/locationtech/geowave/examples/index/CustomIndexExample.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.examples.index; import java.io.IOException; import java.util.UUID; import org.geotools.feature.AttributeTypeBuilder; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.feature.simple.SimpleFeatureTypeBuilder; import org.geotools.filter.text.cql2.CQLException; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder; import org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.index.ByteArrayRange; import org.locationtech.geowave.core.index.CustomIndexStrategy; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.QueryRanges; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataStoreFactory; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.Writer; import org.locationtech.geowave.core.store.index.CustomIndex; import org.locationtech.geowave.core.store.memory.MemoryRequiredOptions; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import com.google.common.collect.Lists; /** * This class is intended to provide a self-contained, easy-to-follow example of how a custom index * can be created. In this example, we will create a UUID index that can be used alongside a spatial * index in order to efficiently query features by a String UUID field that each feature has. */ public class CustomIndexExample { private DataStore dataStore; private SimpleFeatureType simpleFeatureType; private FeatureDataAdapter adapter; private Index spatialIndex; private Index customIndex; private final String uuid1 = UUID.randomUUID().toString(); private final String uuid2 = UUID.randomUUID().toString(); private final String uuid3 = UUID.randomUUID().toString(); private final String uuid4 = UUID.randomUUID().toString(); public static void main(final String[] args) throws IOException, CQLException { final CustomIndexExample example = new CustomIndexExample(); example.run(); } public void run() { // Create an in-memory data store to use with this example dataStore = DataStoreFactory.createDataStore(new MemoryRequiredOptions()); // Create the simple feature type for our data simpleFeatureType = getSimpleFeatureType(); // Create an adapter for our features adapter = new FeatureDataAdapter(simpleFeatureType); // Create the spatial index spatialIndex = new SpatialIndexBuilder().createIndex(); // Create our custom index using the UUID index strategy customIndex = new CustomIndex<>(new UUIDIndexStrategy("uuid"), "customIdx"); // Add the type to the data store with the spatial and custom indices dataStore.addType(adapter, spatialIndex, customIndex); // Ingest the data into a spatial index and our custom index ingestData(); // Perform a spatial query on the data querySpatial(); // Perform a UUID query on the data queryUUID(); } public void ingestData() { try (Writer writer = dataStore.createWriter(adapter.getTypeName())) { writer.write(buildSimpleFeature("feature1", new Coordinate(0, 0), uuid1)); writer.write(buildSimpleFeature("feature2", new Coordinate(1, 1), uuid2)); writer.write(buildSimpleFeature("feature3", new Coordinate(2, 2), uuid3)); writer.write(buildSimpleFeature("feature4", new Coordinate(3, 3), uuid4)); // Entries with the same UUID will be placed next to each other in the index writer.write(buildSimpleFeature("feature5", new Coordinate(4, 4), uuid2)); } } public void querySpatial() { System.out.println("Executing query, expecting to match feature2, feature3, and feature4..."); final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder(); try (final CloseableIterator iterator = dataStore.query( bldr.indexName(spatialIndex.getName()).addTypeName(adapter.getTypeName()).constraints( bldr.constraintsFactory().cqlConstraints( "BBOX(geometry,0.5,0.5,3.5,3.5)")).build())) { while (iterator.hasNext()) { System.out.println("Query match: " + iterator.next().getID()); } } } public void queryUUID() { System.out.println("Executing query, expecting to match feature1 with UUID [" + uuid1 + "]..."); VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder(); // When querying our custom index, we can provide our custom constraints by using the // customConstraints function on the constraints factory. try (final CloseableIterator iterator = dataStore.query( bldr.indexName(customIndex.getName()).addTypeName(adapter.getTypeName()).constraints( bldr.constraintsFactory().customConstraints(new UUIDConstraints(uuid1))).build())) { while (iterator.hasNext()) { System.out.println("Query match: " + iterator.next().getID()); } } System.out.println( "Executing query, expecting to match feature2 and feature5 with UUID [" + uuid2 + "]..."); bldr = VectorQueryBuilder.newBuilder(); try (final CloseableIterator iterator = dataStore.query( bldr.indexName(customIndex.getName()).addTypeName(adapter.getTypeName()).constraints( bldr.constraintsFactory().customConstraints(new UUIDConstraints(uuid2))).build())) { while (iterator.hasNext()) { System.out.println("Query match: " + iterator.next().getID()); } } } private SimpleFeatureType getSimpleFeatureType() { final String NAME = "ExampleType"; final SimpleFeatureTypeBuilder sftBuilder = new SimpleFeatureTypeBuilder(); final AttributeTypeBuilder atBuilder = new AttributeTypeBuilder(); sftBuilder.setName(NAME); sftBuilder.add(atBuilder.binding(Geometry.class).nillable(false).buildDescriptor("geometry")); sftBuilder.add(atBuilder.binding(String.class).nillable(false).buildDescriptor("uuid")); return sftBuilder.buildFeatureType(); } private SimpleFeature buildSimpleFeature( final String featureId, final Coordinate coordinate, final String uuid) { final SimpleFeatureBuilder builder = new SimpleFeatureBuilder(simpleFeatureType); builder.set("geometry", GeometryUtils.GEOMETRY_FACTORY.createPoint(coordinate)); builder.set("uuid", uuid); return builder.buildFeature(featureId); } /** * This index strategy will index data by using an attribute of a simple feature as the sort key * in the index. This implementation allows the user to supply the field name for the UUID field * to offer some flexibility. */ public static class UUIDIndexStrategy implements CustomIndexStrategy { private String uuidField; public UUIDIndexStrategy() {} public UUIDIndexStrategy(final String uuidField) { this.uuidField = uuidField; } /** * Store any data needed to persist this index strategy. */ @Override public byte[] toBinary() { return StringUtils.stringToBinary(uuidField); } /** * Load the index strategy UUID field from binary. */ @Override public void fromBinary(final byte[] bytes) { uuidField = StringUtils.stringFromBinary(bytes); } /** * The method supplies all of the insertion IDs needed for a given entry. It is possible to * insert the same SimpleFeature multiple times in the index under different insertion IDs, but * for this case we only need to use the UUID as the lone insertion ID. * * @param entry the feature to generate sort keys for. * @return the insertion IDs for the given feature */ @Override public InsertionIds getInsertionIds(final SimpleFeature entry) { final String featureUUID = (String) entry.getAttribute(uuidField); return new InsertionIds(Lists.newArrayList(StringUtils.stringToBinary(featureUUID))); } /** * This method generates the query ranges to be used by the data store implementation to * retrieve features from the database. For this example, we are only interested in querying for * an exact UUID, so we can simply use the desired UUID as the query range. */ @Override public QueryRanges getQueryRanges(final UUIDConstraints constraints) { final byte[] sortKey = StringUtils.stringToBinary(constraints.uuid()); return new QueryRanges(new ByteArrayRange(sortKey, sortKey)); } @Override public Class getConstraintsClass() { return UUIDConstraints.class; } } /** * This class serves as constraints for our UUID index strategy. Since we only need to query for * exact UUIDs, the constraints class is fairly straightforward. We only need a single UUID String * to use as our constraint. */ public static class UUIDConstraints implements Persistable { private String uuid; public UUIDConstraints() {} public UUIDConstraints(final String uuid) { this.uuid = uuid; } public String uuid() { return uuid; } /** * Serialize any data needed to persist this constraint. */ @Override public byte[] toBinary() { return StringUtils.stringToBinary(uuid); } /** * Load the UUID constraint from binary. */ @Override public void fromBinary(final byte[] bytes) { uuid = StringUtils.stringFromBinary(bytes); } } } ================================================ FILE: examples/java-api/src/main/java/org/locationtech/geowave/examples/ingest/SimpleIngest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.examples.ingest; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.concurrent.TimeUnit; import org.geotools.feature.AttributeTypeBuilder; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.feature.simple.SimpleFeatureTypeBuilder; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder; import org.locationtech.geowave.core.geotime.index.api.SpatialTemporalIndexBuilder; import org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataStoreFactory; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.Writer; import org.locationtech.geowave.core.store.memory.MemoryRequiredOptions; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; public class SimpleIngest { public static final String FEATURE_NAME = "GridPoint"; public static final String GEOMETRY_FIELD = "geometry"; public static void main(final String[] args) { final SimpleIngest si = new SimpleIngest(); final DataStore geowaveDataStore = DataStoreFactory.createDataStore(new MemoryRequiredOptions()); si.writeExampleData(geowaveDataStore); System.out.println("Finished ingesting data"); } /** * Here we will change the ingest mechanism to use a producer/consumer pattern */ protected void writeExampleData(final DataStore geowaveDataStore) { // In order to store data we need to determine the type of data store final SimpleFeatureType point = createPointFeatureType(); // This a factory class that builds simple feature objects based on the // type passed final SimpleFeatureBuilder pointBuilder = new SimpleFeatureBuilder(point); // This is an adapter, that is needed to describe how to persist the // data type passed final GeotoolsFeatureDataAdapter dataTypeAdapter = createDataAdapter(point); // This describes how to index the data final Index index = createSpatialIndex(); geowaveDataStore.addType(dataTypeAdapter, index); // make sure to close the index writer (a try-with-resources block such // as this automatically closes the resource when exiting the block) try (Writer indexWriter = geowaveDataStore.createWriter(dataTypeAdapter.getTypeName())) { // build a grid of points across the globe at each whole // lattitude/longitude intersection for (final SimpleFeature sft : getGriddedFeatures(pointBuilder, 1000)) { indexWriter.write(sft); } } } public static List getGriddedFeatures( final SimpleFeatureBuilder pointBuilder, final int firstFeatureId) { // features require a featureID - this should be uniqiue per data type // adapter ID // (i.e. writing a new feature with the same feature id for the same // data type adapter will // overwrite the existing feature) int featureId = firstFeatureId; final List feats = new ArrayList<>(); // January 1 00:00:00, 2021 final long epochTime = 1609459200000L; for (int longitude = -180; longitude <= 180; longitude += 5) { for (int latitude = -90; latitude <= 90; latitude += 5) { pointBuilder.set( GEOMETRY_FIELD, GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(longitude, latitude))); pointBuilder.set( "TimeStamp", new Date( epochTime + TimeUnit.DAYS.toMillis(longitude + 180) + TimeUnit.MINUTES.toMillis(latitude + 90))); pointBuilder.set("Latitude", latitude); pointBuilder.set("Longitude", longitude); // Note since trajectoryID and comment are marked as nillable we // don't need to set them (they default to null). final SimpleFeature sft = pointBuilder.buildFeature(String.valueOf(featureId)); feats.add(sft); featureId++; } } return feats; } public static SimpleFeature createRandomFeature( final SimpleFeatureBuilder pointBuilder, final int featureId) { final double latitude = (Math.random() * 340) - 170; final double longitude = (Math.random() * 160) - 80; pointBuilder.set( GEOMETRY_FIELD, GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(latitude, longitude))); pointBuilder.set("TimeStamp", new Date()); pointBuilder.set("Latitude", latitude); pointBuilder.set("Longitude", longitude); return pointBuilder.buildFeature(String.valueOf(featureId)); } /** * * The dataadapter interface describes how to serialize a data type. Here we are using an * implementation that understands how to serialize OGC SimpleFeature types. * * @param sft simple feature type you want to generate an adapter from * @return data adapter that handles serialization of the sft simple feature type */ public static GeotoolsFeatureDataAdapter createDataAdapter( final SimpleFeatureType sft) { return new FeatureDataAdapter(sft); } /** * * We need an index model that tells us how to index the data - the index determines -What * fields are indexed -The precision of the index -The range of the index (min/max values) -The * range type (bounded/unbounded) -The number of "levels" (different precisions, needed when the * values indexed has ranges on any dimension) * * @return GeoWave index for a default SPATIAL index */ public static Index createSpatialIndex() { // Reasonable values for spatial and spatial-temporal are provided // through index builders. // They are intended to be a reasonable starting place - though creating // a custom index may provide better // performance as the distribution/characterization of the data is well // known. There are many such customizations available through setters // on the builder. // for example to create a spatial-temporal index with 8 randomized // partitions (pre-splits on accumulo or hbase) and a temporal bias // (giving more precision to time than space) you could do something // like this: // @formatter:off // return new SpatialTemporalIndexBuilder().setBias(Bias.TEMPORAL).setNumPartitions(8); // @formatter:on return new SpatialIndexBuilder().createIndex(); } public static Index createSpatialTemporalIndex() { return new SpatialTemporalIndexBuilder().createIndex(); } /** * * A simple feature is just a mechanism for defining attributes (a feature is just a collection * of attributes + some metadata) We need to describe what our data looks like so the serializer * (FeatureDataAdapter for this case) can know how to store it. Features/Attributes are also a * general convention of GIS systems in general. * * @return Simple Feature definition for our demo point feature */ public static SimpleFeatureType createPointFeatureType() { final SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder(); final AttributeTypeBuilder ab = new AttributeTypeBuilder(); // Names should be unique (at least for a given GeoWave namespace) - // think about names in the same sense as a full classname // The value you set here will also persist through discovery - so when // people are looking at a dataset they will see the // type names associated with the data. builder.setName(FEATURE_NAME); // The data is persisted in a sparse format, so if data is nullable it // will not take up any space if no values are persisted. // Data which is included in the primary index (in this example // lattitude/longtiude) can not be null // Calling out latitude an longitude separately is not strictly needed, // as the geometry contains that information. But it's // convienent in many use cases to get a text representation without // having to handle geometries. builder.add(ab.binding(Geometry.class).nillable(false).buildDescriptor(GEOMETRY_FIELD)); builder.add(ab.binding(Date.class).nillable(true).buildDescriptor("TimeStamp")); builder.add(ab.binding(Double.class).nillable(false).buildDescriptor("Latitude")); builder.add(ab.binding(Double.class).nillable(false).buildDescriptor("Longitude")); builder.add(ab.binding(String.class).nillable(true).buildDescriptor("TrajectoryID")); builder.add(ab.binding(String.class).nillable(true).buildDescriptor("Comment")); return builder.buildFeatureType(); } } ================================================ FILE: examples/java-api/src/main/java/org/locationtech/geowave/examples/ingest/bulk/GeonamesDataFileInputFormat.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.examples.ingest.bulk; import java.io.IOException; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.input.LineRecordReader; /** * GeoNames provides exports by country (see http://download.geonames.org/export/dump/). These files contain one tab-delimited entry per * line. */ public class GeonamesDataFileInputFormat extends FileInputFormat { @Override public RecordReader createRecordReader( final InputSplit split, final TaskAttemptContext context) throws IOException, InterruptedException { return new LineRecordReader(); } } ================================================ FILE: examples/java-api/src/main/java/org/locationtech/geowave/examples/ingest/bulk/GeonamesSimpleFeatureType.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.examples.ingest.bulk; import org.geotools.feature.AttributeTypeBuilder; import org.geotools.feature.simple.SimpleFeatureTypeBuilder; import org.locationtech.jts.geom.Geometry; import org.opengis.feature.simple.SimpleFeatureType; /** Provides a static method to obtain an instance of a SimpleFeatureType for Geonames data */ public class GeonamesSimpleFeatureType { private static final String FEATURE_NAME = "GeonamesPoint"; private static SimpleFeatureType simpleFeatureType; private GeonamesSimpleFeatureType() { // prevent instantiation } public static SimpleFeatureType getInstance() { if (simpleFeatureType == null) { simpleFeatureType = createGeonamesPointType(); } return simpleFeatureType; } private static SimpleFeatureType createGeonamesPointType() { final SimpleFeatureTypeBuilder sftBuilder = new SimpleFeatureTypeBuilder(); final AttributeTypeBuilder atBuilder = new AttributeTypeBuilder(); sftBuilder.setName(FEATURE_NAME); sftBuilder.add(atBuilder.binding(Geometry.class).nillable(false).buildDescriptor("geometry")); sftBuilder.add(atBuilder.binding(Double.class).nillable(false).buildDescriptor("Latitude")); sftBuilder.add(atBuilder.binding(Double.class).nillable(false).buildDescriptor("Longitude")); sftBuilder.add(atBuilder.binding(String.class).nillable(false).buildDescriptor("Location")); return sftBuilder.buildFeatureType(); } } ================================================ FILE: examples/java-api/src/main/java/org/locationtech/geowave/examples/ingest/bulk/SimpleFeatureToAccumuloKeyValueMapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.examples.ingest.bulk; import java.io.IOException; import java.util.List; import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.data.KeyValue; import org.apache.accumulo.core.data.Value; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Mapper; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.VisibilityHandler; import org.locationtech.geowave.core.store.base.BaseDataStoreUtils; import org.locationtech.geowave.core.store.data.visibility.UnconstrainedVisibilityHandler; import org.locationtech.geowave.core.store.metadata.InternalAdapterStoreImpl; import org.locationtech.geowave.datastore.accumulo.util.AccumuloKeyValuePairGenerator; import org.locationtech.jts.geom.Coordinate; import org.opengis.feature.simple.SimpleFeature; public class SimpleFeatureToAccumuloKeyValueMapper extends Mapper { private final DataTypeAdapter adapter = new FeatureDataAdapter(GeonamesSimpleFeatureType.getInstance()); // this is not the most robust way to assign an internal adapter ID // but is simple and will work in a majority of cases private final InternalDataAdapter internalAdapter = adapter.asInternalAdapter( InternalAdapterStoreImpl.getLazyInitialAdapterId(adapter.getTypeName())); private final Index index = SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()); private final AdapterToIndexMapping indexMapping = BaseDataStoreUtils.mapAdapterToIndex(internalAdapter, index); private final VisibilityHandler visibilityHandler = new UnconstrainedVisibilityHandler(); private final AccumuloKeyValuePairGenerator generator = new AccumuloKeyValuePairGenerator<>(internalAdapter, index, indexMapping, visibilityHandler); private SimpleFeature simpleFeature; private List keyValuePairs; private final SimpleFeatureBuilder builder = new SimpleFeatureBuilder(GeonamesSimpleFeatureType.getInstance()); private String[] geonamesEntryTokens; private String geonameId; private double longitude; private double latitude; private String location; @Override protected void map(final LongWritable key, final Text value, final Context context) throws IOException, InterruptedException { simpleFeature = parseGeonamesValue(value); // build Geowave-formatted Accumulo [Key,Value] pairs keyValuePairs = generator.constructKeyValuePairs(simpleFeature); // output each [Key,Value] pair to shuffle-and-sort phase where we rely // on MapReduce to sort by Key for (final KeyValue accumuloKeyValuePair : keyValuePairs) { context.write(accumuloKeyValuePair.getKey(), accumuloKeyValuePair.getValue()); } } private SimpleFeature parseGeonamesValue(final Text value) { geonamesEntryTokens = value.toString().split("\\t"); // Exported Geonames entries are // tab-delimited geonameId = geonamesEntryTokens[0]; location = geonamesEntryTokens[1]; latitude = Double.parseDouble(geonamesEntryTokens[4]); longitude = Double.parseDouble(geonamesEntryTokens[5]); return buildSimpleFeature(geonameId, longitude, latitude, location); } private SimpleFeature buildSimpleFeature( final String featureId, final double longitude, final double latitude, final String location) { builder.set( "geometry", GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(longitude, latitude))); builder.set("Latitude", latitude); builder.set("Longitude", longitude); builder.set("Location", location); return builder.buildFeature(featureId); } } ================================================ FILE: examples/java-api/src/main/java/org/locationtech/geowave/examples/ingest/plugin/CustomIngestFormat.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.examples.ingest.plugin; import org.locationtech.geowave.adapter.vector.ingest.MinimalSimpleFeatureIngestFormat; import org.locationtech.geowave.adapter.vector.ingest.MinimalSimpleFeatureIngestPlugin; import org.locationtech.geowave.core.store.ingest.IngestFormatOptions; /** * In order for the custom ingest plugin to be usable via the GeoWave CLI, it must be registered as * an available format. This can be done by extending the {@link MinimalSimpleFeatureIngestFormat} * class and registering the new class. * * The ingest format can be registered by adding it to * `src/main/resources/META-INF/services/org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi`. */ public class CustomIngestFormat extends MinimalSimpleFeatureIngestFormat { @Override public String getIngestFormatName() { return "geonames"; } @Override public String getIngestFormatDescription() { return "Example custom ingest format for geonames text file"; } @Override protected MinimalSimpleFeatureIngestPlugin newPluginInstance(final IngestFormatOptions options) { return new CustomIngestPlugin(); } } ================================================ FILE: examples/java-api/src/main/java/org/locationtech/geowave/examples/ingest/plugin/CustomIngestPlugin.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.examples.ingest.plugin; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.net.URL; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.locationtech.geowave.adapter.vector.ingest.MinimalSimpleFeatureIngestPlugin; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.examples.ingest.bulk.GeonamesSimpleFeatureType; import org.locationtech.jts.geom.Coordinate; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; /** * The ingest plugin does the work of translating a URL to a set of SimpleFeatures that can be * ingested into GeoWave. While the ingest system offers options for Avro serialization and ingest * from HDFS, the {@link MinimalSimpleFeatureIngestPlugin} is the simplest way to create a plugin * that ingests data that uses a custom format from the local file system. * * For examples of more complex ingest plugins, including ones that support Avro serialization and * mapreduce ingest, see the formats that are built in to GeoWave. */ public class CustomIngestPlugin extends MinimalSimpleFeatureIngestPlugin { /** * Overriding this method allows the plugin to automatically disregard any file that does not * match the given file extension. This can be useful as an early-out to avoid having to perform * further processing on the file to see if it's supported. If this method is not overriden, all * files that match the ingest URL will be checked for support. */ @Override public String[] getFileExtensionFilters() { return new String[] {"txt"}; } /** * In this example, we'll just assume that the provided file is valid for this format if we are * able to parse the first line as a valid entry. All files in the ingest directory that match the * file extension filters will be passed through this function before being processed. */ @Override public boolean supportsFile(final URL file) { try { try (final GeonamesFeatureReader reader = new GeonamesFeatureReader(file)) { reader.hasNext(); } } catch (final IOException | RuntimeException e) { return false; } return true; } /** * Return all feature types that will be used by the plugin. */ @Override protected SimpleFeatureType[] getTypes() { return new SimpleFeatureType[] {GeonamesSimpleFeatureType.getInstance()}; } /** * Return all of the features from the given URL */ @Override protected CloseableIterator getFeatures(final URL input) { try { return new GeonamesFeatureReader(input); } catch (final IOException e) { throw new RuntimeException("Unable to read features from URL " + input.toString() + ".", e); } } /** * This class reads features line by line from a text file and converts them to SimpleFeatures. */ private static class GeonamesFeatureReader implements CloseableIterator { private final BufferedReader reader; private SimpleFeature next = null; private final SimpleFeatureBuilder builder = new SimpleFeatureBuilder(GeonamesSimpleFeatureType.getInstance()); public GeonamesFeatureReader(final URL input) throws IOException { final InputStream inputStream = input.openStream(); final InputStreamReader inputStreamReader = new InputStreamReader(inputStream, StringUtils.UTF8_CHARSET); reader = new BufferedReader(inputStreamReader); } private SimpleFeature parseEntry(final String entry) { final String[] tokens = entry.split("\\t"); // Exported Geonames entries are tab-delimited final String location = tokens[1]; final double latitude = Double.parseDouble(tokens[4]); final double longitude = Double.parseDouble(tokens[5]); builder.set( "geometry", GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(longitude, latitude))); builder.set("Latitude", latitude); builder.set("Longitude", longitude); builder.set("Location", location); return builder.buildFeature(tokens[0]); } private void computeNext() { try { final String nextLine = reader.readLine(); if (nextLine != null) { next = parseEntry(nextLine); } } catch (final IOException e) { throw new RuntimeException("Encountered an error while reading Geonames.", e); } } @Override public boolean hasNext() { if (next == null) { computeNext(); } return next != null; } @Override public SimpleFeature next() { if (next == null) { computeNext(); } final SimpleFeature retValue = next; next = null; return retValue; } @Override public void close() { try { reader.close(); } catch (final IOException e) { throw new RuntimeException("Encountered an error while closing Geonames file.", e); } } } } ================================================ FILE: examples/java-api/src/main/java/org/locationtech/geowave/examples/ingest/plugin/CustomIngestPluginExample.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.examples.ingest.plugin; import java.io.File; import java.net.URISyntaxException; import org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataStoreFactory; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.IngestOptions; import org.locationtech.geowave.core.store.memory.MemoryRequiredOptions; import org.opengis.feature.simple.SimpleFeature; /** * This class provides an example of how to ingest data that's in a non-standard format using a * custom ingest plugin that transforms the data into SimpleFeatures. */ public class CustomIngestPluginExample { private DataStore dataStore; private Index spatialIndex; public static void main(final String[] args) throws URISyntaxException { final CustomIngestPluginExample example = new CustomIngestPluginExample(); example.run(); } public void run() throws URISyntaxException { // Create an in-memory data store to use with this example dataStore = DataStoreFactory.createDataStore(new MemoryRequiredOptions()); // Create the spatial index spatialIndex = new SpatialIndexBuilder().createIndex(); // Configure ingest options to use our custom plugin final IngestOptions.Builder ingestOptions = IngestOptions.newBuilder(); // Set our custom ingest plugin as the format to use for the ingest ingestOptions.format(new CustomIngestPlugin()); // Get the path of the geonames text file from the example resources final File geonamesFile = new File(CustomIngestPlugin.class.getClassLoader().getResource("geonames.txt").toURI()); // Ingest the data dataStore.ingest(geonamesFile.getAbsolutePath(), ingestOptions.build(), spatialIndex); // Perform a query on the data try (final CloseableIterator iterator = dataStore.query(null)) { while (iterator.hasNext()) { System.out.println("Query match: " + iterator.next().getAttribute("Location")); } } } } ================================================ FILE: examples/java-api/src/main/java/org/locationtech/geowave/examples/query/CQLQueryExample.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.examples.query; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import org.geotools.feature.AttributeTypeBuilder; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.feature.simple.SimpleFeatureTypeBuilder; import org.geotools.filter.text.cql2.CQLException; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataStoreFactory; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.Writer; import org.locationtech.geowave.core.store.memory.MemoryRequiredOptions; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; /** * This class is intended to provide a self-contained, easy-to-follow example of a few GeoTools * queries against GeoWave. For simplicity, a MiniAccumuloCluster is spun up and a few points from * the DC area are ingested (Washington Monument, White House, FedEx Field). Two queries are * executed against this data set. */ public class CQLQueryExample { private static DataStore dataStore; private static final Index index = SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()); // Points (to be ingested into GeoWave Data Store) private static final Coordinate washingtonMonument = new Coordinate(-77.0352, 38.8895); private static final Coordinate whiteHouse = new Coordinate(-77.0366, 38.8977); private static final Coordinate fedexField = new Coordinate(-76.8644, 38.9078); private static final Coordinate bayBridgeAirport = new Coordinate(-76.350677, 38.9641511); private static final Coordinate wideWater = new Coordinate(-77.3384112, 38.416091); private static final Map cannedData = new HashMap<>(); static { cannedData.put("Washington Monument", washingtonMonument); cannedData.put("White House", whiteHouse); cannedData.put("FedEx Field", fedexField); cannedData.put("Bay Bridge Airport", bayBridgeAirport); cannedData.put("Wide Water Beach", wideWater); } static final FeatureDataAdapter ADAPTER = new FeatureDataAdapter(getPointSimpleFeatureType()); public static void main(final String[] args) throws IOException, CQLException { dataStore = DataStoreFactory.createDataStore(new MemoryRequiredOptions()); // ingest 3 points represented as SimpleFeatures: Washington Monument, // White House, FedEx Field ingestCannedData(); // execute a query for a bounding box executeCQLQuery(); } private static void executeCQLQuery() throws IOException, CQLException { System.out.println("Executing query, expecting to match two points..."); final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder(); try (final CloseableIterator iterator = dataStore.query( bldr.indexName(index.getName()).addTypeName(ADAPTER.getTypeName()).constraints( bldr.constraintsFactory().cqlConstraints( "BBOX(geometry,-77.6167,38.6833,-76.6,38.9200) and locationName like 'W%'")).build())) { while (iterator.hasNext()) { System.out.println("Query match: " + iterator.next().getID()); } } } private static void ingestCannedData() throws IOException { final List points = new ArrayList<>(); System.out.println("Building SimpleFeatures from canned data set..."); for (final Entry entry : cannedData.entrySet()) { System.out.println("Added point: " + entry.getKey()); points.add(buildSimpleFeature(entry.getKey(), entry.getValue())); } System.out.println("Ingesting canned data..."); dataStore.addType(ADAPTER, index); try (Writer indexWriter = dataStore.createWriter(ADAPTER.getTypeName())) { for (final SimpleFeature sf : points) { // indexWriter.write(sf); } } System.out.println("Ingest complete."); } private static SimpleFeatureType getPointSimpleFeatureType() { final String NAME = "PointSimpleFeatureType"; final SimpleFeatureTypeBuilder sftBuilder = new SimpleFeatureTypeBuilder(); final AttributeTypeBuilder atBuilder = new AttributeTypeBuilder(); sftBuilder.setName(NAME); sftBuilder.add(atBuilder.binding(String.class).nillable(false).buildDescriptor("locationName")); sftBuilder.add(atBuilder.binding(Geometry.class).nillable(false).buildDescriptor("geometry")); return sftBuilder.buildFeatureType(); } private static SimpleFeature buildSimpleFeature( final String locationName, final Coordinate coordinate) { final SimpleFeatureBuilder builder = new SimpleFeatureBuilder(getPointSimpleFeatureType()); builder.set("locationName", locationName); builder.set("geometry", GeometryUtils.GEOMETRY_FACTORY.createPoint(coordinate)); return builder.buildFeature(locationName); } } ================================================ FILE: examples/java-api/src/main/java/org/locationtech/geowave/examples/query/SpatialQueryExample.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.examples.query; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.accumulo.core.client.AccumuloException; import org.apache.accumulo.core.client.AccumuloSecurityException; import org.geotools.feature.AttributeTypeBuilder; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.feature.simple.SimpleFeatureTypeBuilder; import org.geotools.filter.text.cql2.CQLException; import org.geotools.geometry.jts.JTSFactoryFinder; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder; import org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataStoreFactory; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.Writer; import org.locationtech.geowave.core.store.memory.MemoryRequiredOptions; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.Point; import org.locationtech.jts.io.ParseException; import org.locationtech.jts.io.WKTReader; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class is intended to provide a few examples on running Geowave queries of different types: * 1- Querying by polygon a set of points. 2- Filtering on attributes of features using CQL queries * 3- Ingesting polygons, and running polygon intersect queries. You can check all points, * geometries and query accuracy in a more visual manner @ http://geojson.io/ */ public class SpatialQueryExample { private static Logger log = LoggerFactory.getLogger(SpatialQueryExample.class); private static DataStore dataStore; public static void main(final String[] args) throws AccumuloSecurityException, AccumuloException, ParseException, CQLException, IOException { final SpatialQueryExample example = new SpatialQueryExample(); log.info("Setting up datastores"); dataStore = DataStoreFactory.createDataStore(new MemoryRequiredOptions()); log.info("Running point query examples"); example.runPointExamples(); log.info("Running polygon query examples"); example.runPolygonExamples(); } /** * We'll run our point related operations. The data ingested and queried is single point based, * meaning the index constructed will be based on a point. */ private void runPointExamples() throws ParseException, CQLException, IOException { ingestPointData(); pointQuery(); } private void ingestPointData() { log.info("Ingesting point data"); ingestPointBasicFeature(); ingestPointComplexFeature(); log.info("Point data ingested"); } private void ingest( final FeatureDataAdapter adapter, final Index index, final List features) { dataStore.addType(adapter, index); try (Writer indexWriter = dataStore.createWriter(adapter.getTypeName())) { for (final SimpleFeature sf : features) { indexWriter.write(sf); } } } private void ingestPointBasicFeature() { // First, we'll build our first kind of SimpleFeature, which we'll call // "basic-feature" // We need the type builder to build the feature type final SimpleFeatureTypeBuilder sftBuilder = new SimpleFeatureTypeBuilder(); // AttributeTypeBuilder for the attributes of the SimpleFeature final AttributeTypeBuilder attrBuilder = new AttributeTypeBuilder(); // Here we're setting the SimpleFeature name. Later on, we'll be able to // query GW just by this particular feature. sftBuilder.setName("basic-feature"); // Add the attributes to the feature // Add the geometry attribute, which is mandatory for GeoWave to be able // to construct an index out of the SimpleFeature sftBuilder.add(attrBuilder.binding(Point.class).nillable(false).buildDescriptor("geometry")); // Add another attribute just to be able to filter by it in CQL sftBuilder.add(attrBuilder.binding(String.class).nillable(false).buildDescriptor("filter")); // Create the SimpleFeatureType final SimpleFeatureType sfType = sftBuilder.buildFeatureType(); // We need the adapter for all our operations with GeoWave final FeatureDataAdapter sfAdapter = new FeatureDataAdapter(sfType); // Now we build the actual features. We'll create two points. // First point final SimpleFeatureBuilder sfBuilder = new SimpleFeatureBuilder(sfType); sfBuilder.set( "geometry", GeometryUtils.GEOMETRY_FACTORY.createPoint( new Coordinate(-80.211181640625, 25.848101000701597))); sfBuilder.set("filter", "Basic-Stadium"); // When calling buildFeature, we need to pass an unique id for that // feature, or it will be overwritten. final SimpleFeature basicPoint1 = sfBuilder.buildFeature("1"); // Construct the second feature. sfBuilder.set( "geometry", GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(-80.191360, 25.777804))); sfBuilder.set("filter", "Basic-College"); final SimpleFeature basicPoint2 = sfBuilder.buildFeature("2"); final ArrayList features = new ArrayList<>(); features.add(basicPoint1); features.add(basicPoint2); // Ingest the data. For that purpose, we need the feature adapter, // the index type (the default spatial index is used here), // and an iterator of SimpleFeature ingest(sfAdapter, new SpatialIndexBuilder().createIndex(), features); } /** We're going to ingest a more complete simple feature. */ private void ingestPointComplexFeature() { // First, we'll build our second kind of SimpleFeature, which we'll call // "complex-feature" // We need the type builder to build the feature type final SimpleFeatureTypeBuilder sftBuilder = new SimpleFeatureTypeBuilder(); // AttributeTypeBuilder for the attributes of the SimpleFeature final AttributeTypeBuilder attrBuilder = new AttributeTypeBuilder(); // Here we're setting the SimpleFeature name. Later on, we'll be able to // query GW just by this particular feature. sftBuilder.setName("complex-feature"); // Add the attributes to the feature // Add the geometry attribute, which is mandatory for GeoWave to be able // to construct an index out of the SimpleFeature sftBuilder.add(attrBuilder.binding(Point.class).nillable(false).buildDescriptor("geometry")); // Add another attribute just to be able to filter by it in CQL sftBuilder.add(attrBuilder.binding(String.class).nillable(false).buildDescriptor("filter")); // Add more attributes to use with CQL filtering later on. sftBuilder.add(attrBuilder.binding(Double.class).nillable(false).buildDescriptor("latitude")); sftBuilder.add(attrBuilder.binding(Double.class).nillable(false).buildDescriptor("longitude")); // Create the SimpleFeatureType final SimpleFeatureType sfType = sftBuilder.buildFeatureType(); // We need the adapter for all our operations with GeoWave final FeatureDataAdapter sfAdapter = new FeatureDataAdapter(sfType); // Now we build the actual features. We'll create two more points. // First point final SimpleFeatureBuilder sfBuilder = new SimpleFeatureBuilder(sfType); sfBuilder.set( "geometry", GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(-80.193388, 25.780538))); sfBuilder.set("filter", "Complex-Station"); sfBuilder.set("latitude", 25.780538); sfBuilder.set("longitude", -80.193388); // When calling buildFeature, we need to pass an unique id for that // feature, or it will be overwritten. final SimpleFeature basicPoint1 = sfBuilder.buildFeature("1"); // Construct the second feature. sfBuilder.set( "geometry", GeometryUtils.GEOMETRY_FACTORY.createPoint( new Coordinate(-118.26713562011719, 33.988349152677955))); sfBuilder.set("filter", "Complex-LA"); sfBuilder.set("latitude", 33.988349152677955); sfBuilder.set("longitude", -118.26713562011719); final SimpleFeature basicPoint2 = sfBuilder.buildFeature("2"); final ArrayList features = new ArrayList<>(); features.add(basicPoint1); features.add(basicPoint2); // Ingest the data. For that purpose, we need the feature adapter, // the index type (the default spatial index is used here), // and an iterator of SimpleFeature ingest(sfAdapter, new SpatialIndexBuilder().createIndex(), features); /** After ingest, a single point might look like this in Accumulo. */ // \x1F\x11\xCB\xFC\xB6\xEFT\x00\xFFcomplex_feature4\x00\x00\x00\x0E\x00\x00\x00\x01\x00\x00\x00\x00 // complex_feature:filter [] Complex-LA // \x1F\x11\xCB\xFC\xB6\xEFT\x00\xFFcomplex_feature4\x00\x00\x00\x0E\x00\x00\x00\x01\x00\x00\x00\x00 // complex_feature:geom\x00\x00 [] // \x00\x00\x00\x00\x01\xC0]\x91\x18\xC0\x00\x00\x00@@\xFE\x829\x9B\xE3\xFC // \x1F\x11\xCB\xFC\xB6\xEFT\x00\xFFcomplex_feature4\x00\x00\x00\x0E\x00\x00\x00\x01\x00\x00\x00\x00 // complex_feature:latitude [] @@\xFE\x829\x9B\xE3\xFC // \x1F\x11\xCB\xFC\xB6\xEFT\x00\xFFcomplex_feature\x00\x00\x00\x0E\x00\x00\x00\x01\x00\x00\x00\x00 // complex_feature:longitude [] \xC0]\x91\x18\xC0\x00\x00\x } /** This query will use a specific Bounding Box, and will find only 1 point. */ private void pointQuery() throws ParseException, IOException { log.info("Running Point Query Case 2"); // First, we need to obtain the adapter for the SimpleFeature we want to // query. // We'll query complex-feature in this example. // Obtain adapter for our "complex-feature" type final String typeName = "complex-feature"; // Define the geometry to query. We'll find all points that fall inside // that geometry. final String queryPolygonDefinition = "POLYGON (( " + "-118.50059509277344 33.75688594085081, " + "-118.50059509277344 34.1521587488017, " + "-117.80502319335938 34.1521587488017, " + "-117.80502319335938 33.75688594085081, " + "-118.50059509277344 33.75688594085081" + "))"; final Geometry queryPolygon = new WKTReader(JTSFactoryFinder.getGeometryFactory()).read(queryPolygonDefinition); // Perform the query.Parameters are /** * 1- Adapter previously obtained from the feature name. 2- Default spatial index. 3- A * SpatialQuery, which takes the query geometry - aka Bounding box 4- Filters. For this example, * no filter is used. 5- Limit. Same as standard SQL limit. 0 is no limits. 6- authorizations. * For our example, "root" works. In a real , whatever authorization is associated to the user * in question. */ int count = 0; final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder(); try (final CloseableIterator iterator = dataStore.query( bldr.addTypeName(typeName).indexName("SPATIAL_IDX").addAuthorization( "root").constraints( bldr.constraintsFactory().spatialTemporalConstraints().spatialConstraints( queryPolygon).build()).build())) { while (iterator.hasNext()) { final SimpleFeature sf = iterator.next(); log.info( "Obtained SimpleFeature " + sf.getName().toString() + " - " + sf.getAttribute("filter")); count++; System.out.println("Query match: " + sf.getID()); } log.info("Should have obtained 1 feature. -> " + (count == 1)); } } /** * We'll run our polygon related operations. The data ingested and queried is single polygon * based, meaning the index constructed will be based on a Geometry. */ private void runPolygonExamples() throws ParseException, IOException { ingestPolygonFeature(); polygonQuery(); } private void ingestPolygonFeature() throws ParseException { log.info("Ingesting polygon data"); // First, we'll build our third kind of SimpleFeature, which we'll call // "polygon-feature" // We need the type builder to build the feature type final SimpleFeatureTypeBuilder sftBuilder = new SimpleFeatureTypeBuilder(); // AttributeTypeBuilder for the attributes of the SimpleFeature final AttributeTypeBuilder attrBuilder = new AttributeTypeBuilder(); // Here we're setting the SimpleFeature name. Later on, we'll be able to // query GW just by this particular feature. sftBuilder.setName("polygon-feature"); // Add the attributes to the feature // Add the geometry attribute, which is mandatory for GeoWave to be able // to construct an index out of the SimpleFeature // Will be any arbitrary geometry; in this case, a polygon. sftBuilder.add(attrBuilder.binding(Geometry.class).nillable(false).buildDescriptor("geometry")); // Add another attribute just to be able to filter by it in CQL sftBuilder.add(attrBuilder.binding(String.class).nillable(false).buildDescriptor("filter")); // Create the SimpleFeatureType final SimpleFeatureType sfType = sftBuilder.buildFeatureType(); // We need the adapter for all our operations with GeoWave final FeatureDataAdapter sfAdapter = new FeatureDataAdapter(sfType); // Now we build the actual features. We'll create one polygon. // First point final SimpleFeatureBuilder sfBuilder = new SimpleFeatureBuilder(sfType); // For ease of use, we'll create the polygon geometry with WKT format. final String polygonDefinition = "POLYGON (( " + "-80.3045654296875 25.852426562716428, " + "-80.123291015625 25.808545671771615, " + "-80.19195556640625 25.7244467526159, " + "-80.34233093261719 25.772068899816585, " + "-80.3045654296875 25.852426562716428" + "))"; final Geometry geom = new WKTReader(JTSFactoryFinder.getGeometryFactory()).read(polygonDefinition); sfBuilder.set("geometry", geom); sfBuilder.set("filter", "Polygon"); // When calling buildFeature, we need to pass an unique id for that // feature, or it will be overwritten. final SimpleFeature polygon = sfBuilder.buildFeature("1"); final ArrayList features = new ArrayList<>(); features.add(polygon); // Ingest the data. For that purpose, we need the feature adapter, // the index type (the default spatial index is used here), // and an iterator of SimpleFeature ingest(sfAdapter, new SpatialIndexBuilder().createIndex(), features); log.info("Polygon data ingested"); } /** This query will find a polygon/polygon intersection, returning one match. */ private void polygonQuery() throws ParseException, IOException { log.info("Running Point Query Case 4"); // First, we need to obtain the adapter for the SimpleFeature we want to // query. // We'll query polygon-feature in this example. // Obtain adapter for our "polygon-feature" type final String typeName = "polygon-feature"; // Define the geometry to query. We'll find all polygons that intersect // with this geometry. final String queryPolygonDefinition = "POLYGON (( " + "-80.4037857055664 25.81596330265488, " + "-80.27915954589844 25.788144792391982, " + "-80.34370422363281 25.8814655232439, " + "-80.44567108154297 25.896291175546626, " + "-80.4037857055664 25.81596330265488" + "))"; final Geometry queryPolygon = new WKTReader(JTSFactoryFinder.getGeometryFactory()).read(queryPolygonDefinition); // Perform the query.Parameters are /** * 1- Adapter previously obtained from the feature name. 2- Default spatial index. 3- A * SpatialQuery, which takes the query geometry - aka Bounding box 4- Filters. For this example, * no filter is used. 5- Limit. Same as standard SQL limit. 0 is no limits. 6- authorizations. * For our example, "root" works. In a real , whatever authorization is associated to the user * in question. */ int count = 0; final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder(); try (final CloseableIterator iterator = dataStore.query( bldr.addTypeName(typeName).indexName("SPATIAL_IDX").addAuthorization( "root").constraints( bldr.constraintsFactory().spatialTemporalConstraints().spatialConstraints( queryPolygon).build()).build())) { while (iterator.hasNext()) { final SimpleFeature sf = iterator.next(); log.info( "Obtained SimpleFeature " + sf.getName().toString() + " - " + sf.getAttribute("filter")); count++; System.out.println("Query match: " + sf.getID()); } log.info("Should have obtained 1 feature. -> " + (count == 1)); } } } ================================================ FILE: examples/java-api/src/main/java/org/locationtech/geowave/examples/query/SpatialTemporalQueryExample.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.examples.query; import java.io.IOException; import java.text.ParseException; import java.util.ArrayList; import java.util.Date; import java.util.List; import org.apache.accumulo.core.client.AccumuloException; import org.apache.accumulo.core.client.AccumuloSecurityException; import org.geotools.feature.AttributeTypeBuilder; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.feature.simple.SimpleFeatureTypeBuilder; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.adapter.vector.util.DateUtilities; import org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialTemporalOptions; import org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder; import org.locationtech.geowave.core.geotime.store.query.filter.SpatialQueryFilter.CompareOperation; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataStoreFactory; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.Writer; import org.locationtech.geowave.core.store.memory.MemoryRequiredOptions; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.referencing.operation.TransformException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class is intended to provide a self-contained, easy-to-follow example of a few GeoTools * queries against GeoWave using Spatial Temporal Data. * *

For simplicity, a MiniAccumuloCluster is spun up and a few points from the DC area are * ingested (Washington Monument, White House, FedEx Field). Two queries are executed against this * data set. */ public class SpatialTemporalQueryExample { private static final Logger LOGGER = LoggerFactory.getLogger(SpatialTemporalQueryExample.class); private DataStore dataStore; private static final Index index = SpatialTemporalDimensionalityTypeProvider.createIndexFromOptions( new SpatialTemporalOptions()); private static final FeatureDataAdapter adapter = new FeatureDataAdapter(getPointSimpleFeatureType()); // Points (to be ingested into GeoWave Data Store) private static final Coordinate washingtonMonument = new Coordinate(-77.0352, 38.8895); private static final Coordinate whiteHouse = new Coordinate(-77.0366, 38.8977); private static final Coordinate fedexField = new Coordinate(-76.8644, 38.9078); public SpatialTemporalQueryExample() {} public static void main(final String[] args) throws AccumuloException, AccumuloSecurityException, InterruptedException, IOException, ParseException, TransformException { new SpatialTemporalQueryExample().run(); } public void run() throws AccumuloException, AccumuloSecurityException, InterruptedException, IOException, ParseException, TransformException { dataStore = DataStoreFactory.createDataStore(new MemoryRequiredOptions()); // ingest 3 points represented as SimpleFeatures: Washington Monument, // White House, FedEx Field ingestCannedData(); // execute a query for a large polygon executePolygonAndTimeRangeQuery(); } private void ingestCannedData() throws IOException { final List points = new ArrayList<>(); System.out.println("Building SimpleFeatures from canned data set..."); try { points.add( buildSimpleFeature( "Washington Monument 1", washingtonMonument, DateUtilities.parseISO("2005-05-15T20:32:56Z"), DateUtilities.parseISO("2005-05-15T21:32:56Z"))); points.add( buildSimpleFeature( "Washington Monument 2", washingtonMonument, DateUtilities.parseISO("2005-05-17T20:32:56Z"), DateUtilities.parseISO("2005-05-17T21:32:56Z"))); points.add( buildSimpleFeature( "White House 1", whiteHouse, DateUtilities.parseISO("2005-05-17T20:32:56Z"), DateUtilities.parseISO("2005-05-17T21:32:56Z"))); points.add( buildSimpleFeature( "White House 2", whiteHouse, DateUtilities.parseISO("2005-05-17T19:32:56Z"), DateUtilities.parseISO("2005-05-17T20:45:56Z"))); points.add( buildSimpleFeature( "Fedex 1", fedexField, DateUtilities.parseISO("2005-05-17T20:32:56Z"), DateUtilities.parseISO("2005-05-17T21:32:56Z"))); points.add( buildSimpleFeature( "Fedex 2", fedexField, DateUtilities.parseISO("2005-05-18T19:32:56Z"), DateUtilities.parseISO("2005-05-18T20:45:56Z"))); points.add( buildSimpleFeature( "White House 3", whiteHouse, DateUtilities.parseISO("2005-05-19T19:32:56Z"), DateUtilities.parseISO("2005-05-19T20:45:56Z"))); } catch (final Exception ex) { LOGGER.warn("Could not add points", ex); } System.out.println("Ingesting canned data..."); dataStore.addType(adapter, index); try (Writer indexWriter = dataStore.createWriter(adapter.getTypeName())) { for (final SimpleFeature sf : points) { // indexWriter.write(sf); } } System.out.println("Ingest complete."); } private void executePolygonAndTimeRangeQuery() throws IOException, ParseException, TransformException { System.out.println("Executing query, expecting to match three points..."); VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder(); // Query equivalent to ECQL: // DWITHIN(geometry, POINT(-77.03521 38.8895), 13.7, kilometers) and // startTime after 2005-05-17T19:32:56Z and endTime before // 2005-05-17T22:32:56Z // // Notice the use of CompareOperations.CONTAINS. // By default, SpatialTemporalQuery and SpatialTemporalQuery use // CompareOperations.OVERLAPS // // To compose the polygon, this query creates a characteristic 'circle' // around center given a distance. // The method Geometry.buffer() works in degrees; a helper // method is available that uses metric units. The helper method // looses accuracy as the distance from the centroid grows and // the centroid moves closer the poles. final CloseableIterator iterator = dataStore.query( bldr.constraints( bldr.constraintsFactory().spatialTemporalConstraints().addTimeRange( DateUtilities.parseISO("2005-05-17T19:32:56Z"), DateUtilities.parseISO("2005-05-17T22:32:56Z")).spatialConstraints( GeometryUtils.buffer( GeometryUtils.getDefaultCRS(), GeometryUtils.GEOMETRY_FACTORY.createPoint( new Coordinate(-77.03521, 38.8895)), "meter", 13700).getKey()).spatialConstraintsCompareOperation( CompareOperation.CONTAINS).build()).build()); while (iterator.hasNext()) { System.out.println("Query match: " + iterator.next().getID()); } iterator.close(); System.out.println( "Executing query # 2 with multiple time ranges, expecting to match four points..."); bldr = VectorQueryBuilder.newBuilder(); final CloseableIterator iterator2 = dataStore.query( bldr.addTypeName(adapter.getTypeName()).indexName(index.getName()).constraints( bldr.constraintsFactory().spatialTemporalConstraints().addTimeRange( DateUtilities.parseISO("2005-05-17T19:32:56Z"), DateUtilities.parseISO("2005-05-17T22:32:56Z")).addTimeRange( DateUtilities.parseISO("2005-05-19T19:32:56Z"), DateUtilities.parseISO("2005-05-19T22:32:56Z")).spatialConstraints( GeometryUtils.buffer( GeometryUtils.getDefaultCRS(), GeometryUtils.GEOMETRY_FACTORY.createPoint( new Coordinate(-77.03521, 38.8895)), "meter", 13700).getKey()).spatialConstraintsCompareOperation( CompareOperation.CONTAINS).build()).build()); while (iterator2.hasNext()) { System.out.println("Query match: " + iterator2.next().getID()); } iterator2.close(); } private static SimpleFeatureType getPointSimpleFeatureType() { final String NAME = "PointSimpleFeatureType"; final SimpleFeatureTypeBuilder sftBuilder = new SimpleFeatureTypeBuilder(); final AttributeTypeBuilder atBuilder = new AttributeTypeBuilder(); sftBuilder.setName(NAME); sftBuilder.add(atBuilder.binding(String.class).nillable(false).buildDescriptor("locationName")); sftBuilder.add(atBuilder.binding(Geometry.class).nillable(false).buildDescriptor("geometry")); sftBuilder.add(atBuilder.binding(Date.class).nillable(false).buildDescriptor("startTime")); sftBuilder.add(atBuilder.binding(Date.class).nillable(false).buildDescriptor("endTime")); return sftBuilder.buildFeatureType(); } private static SimpleFeature buildSimpleFeature( final String locationName, final Coordinate coordinate, final Date startTime, final Date endTime) { final SimpleFeatureBuilder builder = new SimpleFeatureBuilder(getPointSimpleFeatureType()); builder.set("locationName", locationName); builder.set("geometry", GeometryUtils.GEOMETRY_FACTORY.createPoint(coordinate)); builder.set("startTime", startTime); builder.set("endTime", endTime); return builder.buildFeature(locationName); } } ================================================ FILE: examples/java-api/src/main/java/org/locationtech/geowave/examples/spark/GeoWaveRDDExample.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.examples.spark; import java.io.IOException; import org.apache.spark.SparkConf; import org.apache.spark.api.java.JavaPairRDD; import org.apache.spark.api.java.JavaSparkContext; import org.locationtech.geowave.analytic.spark.GeoWaveRDDLoader; import org.locationtech.geowave.analytic.spark.RDDOptions; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialQuery; import org.locationtech.geowave.core.store.api.QueryBuilder; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.cli.store.StoreLoader; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; import org.opengis.feature.simple.SimpleFeature; public class GeoWaveRDDExample { public GeoWaveRDDExample() {} public boolean loadRddFromStore(final String[] args) { if (args.length < 1) { System.err.println("Missing required arg 'storename'"); return false; } final String storeName = args[0]; int minSplits = -1; int maxSplits = -1; QueryConstraints query = null; if (args.length > 1) { if (args[1].equals("--splits")) { if (args.length < 4) { System.err.println("USAGE: storename --splits min max"); return false; } minSplits = Integer.parseInt(args[2]); maxSplits = Integer.parseInt(args[3]); if (args.length > 4) { if (args[4].equals("--bbox")) { if (args.length < 9) { System.err.println("USAGE: storename --splits min max --bbox west south east north"); return false; } final double west = Double.parseDouble(args[5]); final double south = Double.parseDouble(args[6]); final double east = Double.parseDouble(args[7]); final double north = Double.parseDouble(args[8]); final Geometry bbox = new GeometryFactory().toGeometry(new Envelope(west, south, east, north)); query = new ExplicitSpatialQuery(bbox); } } } else if (args[1].equals("--bbox")) { if (args.length < 6) { System.err.println("USAGE: storename --bbox west south east north"); return false; } final double west = Double.parseDouble(args[2]); final double south = Double.parseDouble(args[3]); final double east = Double.parseDouble(args[4]); final double north = Double.parseDouble(args[5]); final Geometry bbox = new GeometryFactory().toGeometry(new Envelope(west, south, east, north)); query = new ExplicitSpatialQuery(bbox); } else { System.err.println("USAGE: storename --splits min max --bbox west south east north"); return false; } } try { DataStorePluginOptions inputStoreOptions = null; final StoreLoader inputStoreLoader = new StoreLoader(storeName); if (!inputStoreLoader.loadFromConfig(ConfigOptions.getDefaultPropertyFile())) { throw new IOException("Cannot find store name: " + inputStoreLoader.getStoreName()); } inputStoreOptions = inputStoreLoader.getDataStorePlugin(); final SparkConf sparkConf = new SparkConf(); sparkConf.setAppName("GeoWaveRDD"); sparkConf.setMaster("local"); final JavaSparkContext context = new JavaSparkContext(sparkConf); final RDDOptions rddOpts = new RDDOptions(); rddOpts.setQuery(QueryBuilder.newBuilder().constraints(query).build()); rddOpts.setMinSplits(minSplits); rddOpts.setMaxSplits(maxSplits); final JavaPairRDD javaRdd = GeoWaveRDDLoader.loadRDD(context.sc(), inputStoreOptions, rddOpts).getRawRDD(); System.out.println( "DataStore " + storeName + " loaded into RDD with " + javaRdd.count() + " features."); context.close(); } catch (final IOException e) { System.err.println(e.getMessage()); } return true; } } ================================================ FILE: examples/java-api/src/main/java/org/locationtech/geowave/examples/stats/CustomStatisticExample.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.examples.stats; import java.io.IOException; import org.geotools.feature.AttributeTypeBuilder; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.feature.simple.SimpleFeatureTypeBuilder; import org.geotools.filter.text.cql2.CQLException; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataStoreFactory; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.Writer; import org.locationtech.geowave.core.store.memory.MemoryRequiredOptions; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; /** * This class is intended to provide a self-contained, easy-to-follow example of how a custom * statistic can be created an used. The example statistic is a word count statistic that can count * the number of words in a string field across an entire data set. */ public class CustomStatisticExample { private DataStore dataStore; private SimpleFeatureType simpleFeatureType; private FeatureDataAdapter adapter; private Index spatialIndex; public static void main(final String[] args) throws IOException, CQLException { final CustomStatisticExample example = new CustomStatisticExample(); example.run(); } public void run() { // Create an in-memory data store to use with this example dataStore = DataStoreFactory.createDataStore(new MemoryRequiredOptions()); // Create the simple feature type for our data simpleFeatureType = getSimpleFeatureType(); // Create an adapter for our features adapter = new FeatureDataAdapter(simpleFeatureType); // Create the spatial index spatialIndex = new SpatialIndexBuilder().createIndex(); // Add the type to the data store with the spatial and custom indices dataStore.addType(adapter, spatialIndex); // Create a word count statistic on the `str` field of our type for all words final WordCountStatistic allWords = new WordCountStatistic(); allWords.setTypeName(adapter.getTypeName()); allWords.setFieldName("str"); allWords.setMinWordLength(0); allWords.setTag("ALL_WORDS"); // Create a word count statistic on the `str` field of our type for long words final WordCountStatistic longWords = new WordCountStatistic(); longWords.setTypeName(adapter.getTypeName()); longWords.setFieldName("str"); longWords.setMinWordLength(5); longWords.setTag("LONG_WORDS"); // Add the statistics dataStore.addStatistic(allWords, longWords); // Ingest the data into a spatial index ingestData(); // Get the statistics System.out.println("Total number of words: " + dataStore.getStatisticValue(allWords)); System.out.println("Total number of long words: " + dataStore.getStatisticValue(longWords)); // You can also get the actual statistics from the data store at a later time final WordCountStatistic stat = (WordCountStatistic) dataStore.getFieldStatistic( WordCountStatistic.STATS_TYPE, adapter.getTypeName(), "str", "ALL_WORDS"); System.out.println("ALL_WORDS Statistic: " + stat.toString()); } public void ingestData() { // Create features with string fields of various word lengths try (Writer writer = dataStore.createWriter(adapter.getTypeName())) { writer.write(buildSimpleFeature("feature1", new Coordinate(0, 0), "a set of words")); writer.write(buildSimpleFeature("feature2", new Coordinate(1, 1), "another set of words")); writer.write(buildSimpleFeature("feature3", new Coordinate(2, 2), "two words")); writer.write(buildSimpleFeature("feature4", new Coordinate(3, 3), "word")); writer.write( buildSimpleFeature( "feature5", new Coordinate(4, 4), "a long string with quite a few words to count")); } } private SimpleFeatureType getSimpleFeatureType() { final String NAME = "ExampleType"; final SimpleFeatureTypeBuilder sftBuilder = new SimpleFeatureTypeBuilder(); final AttributeTypeBuilder atBuilder = new AttributeTypeBuilder(); sftBuilder.setName(NAME); sftBuilder.add(atBuilder.binding(Geometry.class).nillable(false).buildDescriptor("geometry")); sftBuilder.add(atBuilder.binding(String.class).nillable(false).buildDescriptor("str")); return sftBuilder.buildFeatureType(); } private SimpleFeature buildSimpleFeature( final String featureId, final Coordinate coordinate, final String str) { final SimpleFeatureBuilder builder = new SimpleFeatureBuilder(simpleFeatureType); builder.set("geometry", GeometryUtils.GEOMETRY_FACTORY.createPoint(coordinate)); builder.set("str", str); return builder.buildFeature(featureId); } } ================================================ FILE: examples/java-api/src/main/java/org/locationtech/geowave/examples/stats/ExampleRegisteredStatistics.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.examples.stats; import org.locationtech.geowave.core.store.statistics.StatisticsRegistrySPI; import org.locationtech.geowave.examples.stats.WordCountStatistic.WordCountValue; /** * This class allows GeoWave to discover new statistics and binning strategies on the classpath. * This allows developers to create statistics that fit their use cases in the simplest way possible * without having to worry about the inner workings of the statistics system. * * When adding new statistics via a statistics registry, the registry class needs to be added to * `src/main/resources/META-INF/services/org.locationtech.geowave.core.store.statistics.StatisticsRegistrySPI`. */ public class ExampleRegisteredStatistics implements StatisticsRegistrySPI { @Override public RegisteredStatistic[] getRegisteredStatistics() { // Register the example word count statistic with some persistable IDs that aren't being used by // GeoWave. return new RegisteredStatistic[] { new RegisteredStatistic( WordCountStatistic.STATS_TYPE, WordCountStatistic::new, WordCountValue::new, (short) 20100, (short) 20101),}; } @Override public RegisteredBinningStrategy[] getRegisteredBinningStrategies() { // New binning strategies can also be registered using this interface return new RegisteredBinningStrategy[] {}; } } ================================================ FILE: examples/java-api/src/main/java/org/locationtech/geowave/examples/stats/SpatialBinningStatisticExample.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.examples.stats; import java.util.List; import org.apache.commons.lang3.tuple.Pair; import org.geotools.feature.AttributeTypeBuilder; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.feature.simple.SimpleFeatureTypeBuilder; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.core.geotime.binning.SpatialBinningType; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.core.geotime.store.statistics.binning.SpatialFieldValueBinningStrategy; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.api.BinConstraints; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataStoreFactory; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.Writer; import org.locationtech.geowave.core.store.memory.MemoryRequiredOptions; import org.locationtech.geowave.core.store.statistics.adapter.CountStatistic; import org.locationtech.geowave.core.store.statistics.field.NumericStatsStatistic; import org.locationtech.geowave.core.store.statistics.field.Stats; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import com.google.common.collect.ImmutableList; public class SpatialBinningStatisticExample { public static void main(final String[] args) { final SimpleFeatureType featureType = getSimpleFeatureType(); // Points (to be ingested into GeoWave Data Store) final List cannedFeatures = ImmutableList.of( buildSimpleFeature(featureType, "Loc1", new Coordinate(-77.0352, 38.8895), 12), buildSimpleFeature(featureType, "Loc2", new Coordinate(-77.0366, 38.8977), 13), buildSimpleFeature(featureType, "Loc3", new Coordinate(-76.8644, 38.9078), 8), buildSimpleFeature(featureType, "Loc4", new Coordinate(-76.350677, 38.9641511), 15), buildSimpleFeature(featureType, "Loc5", new Coordinate(-77.3384112, 38.416091), 7), buildSimpleFeature(featureType, "Loc6", new Coordinate(-67.0352, 28.8895), 3), buildSimpleFeature(featureType, "Loc7", new Coordinate(-67.0366, 28.8977), 99), buildSimpleFeature(featureType, "Loc8", new Coordinate(-66.8644, 28.9078), 0), buildSimpleFeature(featureType, "Loc9", new Coordinate(-66.350677, 28.9641511), 1), buildSimpleFeature(featureType, "Loc10", new Coordinate(-67.3384112, 28.416091), 23)); final Index index = SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()); final DataStore dataStore = DataStoreFactory.createDataStore(new MemoryRequiredOptions()); final FeatureDataAdapter adapter = new FeatureDataAdapter(featureType); final Envelope bbox1 = new Envelope(-77.5, -76, 38.4, 39); final Envelope bbox2 = new Envelope(-67.5, -66, 28.4, 29); dataStore.addType(adapter, index); final CountStatistic s2Count = new CountStatistic(featureType.getTypeName()); s2Count.setTag("S2-Example"); final SpatialFieldValueBinningStrategy s2SpatialBinning = new SpatialFieldValueBinningStrategy(featureType.getGeometryDescriptor().getLocalName()); // type could be Google's S2, Uber's H3, or simple GeoHash s2SpatialBinning.setType(SpatialBinningType.S2); // precision is the character length for H3 and GeoHash which is over twice as coarse as S2 // which uses powers of two for precision (so a precision of 8 in S2 is actually a coarser // granularity than a precision of 4 in GeoHash or H3) s2SpatialBinning.setPrecision(7); s2Count.setBinningStrategy(s2SpatialBinning); final CountStatistic h3Count = new CountStatistic(featureType.getTypeName()); // stats for the same feature type should have different tags h3Count.setTag("H3-Example"); final SpatialFieldValueBinningStrategy h3SpatialBinning = new SpatialFieldValueBinningStrategy(featureType.getGeometryDescriptor().getLocalName()); // type could be Google's S2, Uber's H3, or simple GeoHash h3SpatialBinning.setType(SpatialBinningType.H3); h3SpatialBinning.setPrecision(3); h3Count.setBinningStrategy(h3SpatialBinning); final CountStatistic geohashCount = new CountStatistic(featureType.getTypeName()); geohashCount.setTag("Geohash-Example"); final SpatialFieldValueBinningStrategy geohashSpatialBinning = new SpatialFieldValueBinningStrategy(featureType.getGeometryDescriptor().getLocalName()); // type could be Google's S2, Uber's H3, or simple GeoHash geohashSpatialBinning.setType(SpatialBinningType.GEOHASH); geohashSpatialBinning.setPrecision(3); geohashCount.setBinningStrategy(geohashSpatialBinning); // you can add "empty" statistic before you've written any data, the stats will then be updated // as you write data // alternatively if you don't use the "empty" variant it will automatically calculate and update // these stats for pre-existing data before returning from the method dataStore.addEmptyStatistic(s2Count, h3Count, geohashCount); // Ingest cannedFeatures into the DataStore. try (Writer indexWriter = dataStore.createWriter(adapter.getTypeName())) { for (final SimpleFeature sf : cannedFeatures) { indexWriter.write(sf); } } System.out.println("***** S2 Binning *****"); System.out.println("** All Bins **"); try ( CloseableIterator> it = dataStore.getBinnedStatisticValues(s2Count)) { // you can get all bins while (it.hasNext()) { final Pair pair = it.next(); System.out.println( String.format( "Count: %d, Bin: %s, Bin Geometry: %s", pair.getRight(), s2SpatialBinning.binToString(pair.getLeft()), s2SpatialBinning.getType().getBinGeometry(pair.getLeft(), 7))); } } System.out.println(String.format("** Bins Within Envelope %s **", bbox1)); try (CloseableIterator> it = dataStore.getBinnedStatisticValues(s2Count, BinConstraints.ofObject(bbox1))) { // or you can get only bins within an envelope while (it.hasNext()) { final Pair pair = it.next(); System.out.println( String.format( "Count: %d, Bin: %s, Bin Geometry: %s", pair.getRight(), s2SpatialBinning.binToString(pair.getLeft()), s2SpatialBinning.getType().getBinGeometry(pair.getLeft(), 7))); } } // or you could just get the aggregated statistic value for an envelope (keep in mind this is // using the statistic bins that intersect the envelope so may be an over-estimate for bins that // only partially intersect) System.out.println( String.format( "** %d in bbox %s **", dataStore.getStatisticValue(s2Count, BinConstraints.ofObject(bbox2)), bbox2)); System.out.println("\n***** H3 Binning *****"); System.out.println("** All Bins **"); try ( CloseableIterator> it = dataStore.getBinnedStatisticValues(h3Count)) { // you can get all bins while (it.hasNext()) { final Pair pair = it.next(); System.out.println( String.format( "Count: %d, Bin: %s, Bin Geometry: %s", pair.getRight(), h3SpatialBinning.binToString(pair.getLeft()), h3SpatialBinning.getType().getBinGeometry(pair.getLeft(), 3))); } } System.out.println(String.format("** Bins Within Envelope %s **", bbox1)); try (CloseableIterator> it = dataStore.getBinnedStatisticValues(h3Count, BinConstraints.ofObject(bbox1))) { // or you can get only bins within an envelope while (it.hasNext()) { final Pair pair = it.next(); System.out.println( String.format( "Count: %d, Bin: %s, Bin Geometry: %s", pair.getRight(), h3SpatialBinning.binToString(pair.getLeft()), h3SpatialBinning.getType().getBinGeometry(pair.getLeft(), 3))); } } // or you could just get the aggregated statistic value for an envelope (keep in mind this is // using the statistic bins that intersect the envelope so may be an over-estimate for bins that // only partially intersect) System.out.println( String.format( "** %d in bbox %s **", dataStore.getStatisticValue(h3Count, BinConstraints.ofObject(bbox2)), bbox2)); System.out.println("\n***** Geohash Binning *****"); System.out.println("** All Bins **"); try (CloseableIterator> it = dataStore.getBinnedStatisticValues(geohashCount)) { // you can get all bins while (it.hasNext()) { final Pair pair = it.next(); System.out.println( String.format( "Count: %d, Bin: %s, Bin Geometry: %s", pair.getRight(), geohashSpatialBinning.binToString(pair.getLeft()), geohashSpatialBinning.getType().getBinGeometry(pair.getLeft(), 3))); } } System.out.println(String.format("** Bins Within Envelope %s **", bbox1)); try (CloseableIterator> it = dataStore.getBinnedStatisticValues(geohashCount, BinConstraints.ofObject(bbox1))) { // or you can get only bins within an envelope while (it.hasNext()) { final Pair pair = it.next(); System.out.println( String.format( "Count: %d, Bin: %s, Bin Geometry: %s", pair.getRight(), geohashSpatialBinning.binToString(pair.getLeft()), geohashSpatialBinning.getType().getBinGeometry(pair.getLeft(), 3))); } } // or you could just get the aggregated statistic value for an envelope (keep in mind this is // using the statistic bins that intersect the envelope so may be an over-estimate for bins that // only partially intersect) System.out.println( String.format( "** %d in bbox %s **", dataStore.getStatisticValue(geohashCount, BinConstraints.ofObject(bbox2)), bbox2)); // and finally just to make it clear, you can apply spatial binning to *any* statistic not just // counts // so here's an example binning numeric stats of the population (sum, avg, std dev, etc.) by an // S2 level 7 grid final NumericStatsStatistic s2PopulationStats = new NumericStatsStatistic(featureType.getTypeName(), "population"); s2PopulationStats.setTag("S2-Population-Stats"); final SpatialFieldValueBinningStrategy s2PopulationSpatialBinning = new SpatialFieldValueBinningStrategy(featureType.getGeometryDescriptor().getLocalName()); s2PopulationSpatialBinning.setType(SpatialBinningType.S2); s2PopulationSpatialBinning.setPrecision(7); s2PopulationStats.setBinningStrategy(s2PopulationSpatialBinning); // here we'll calculate the stat on add based on the already written data (rather than adding // the "empty" statistic) dataStore.addStatistic(s2PopulationStats); // and we'll run through the same set of examples of getting all the bins and then filtering by // an envelope System.out.println("\n***** S2 Population Stats Binning *****"); System.out.println("** All Bins **"); try (CloseableIterator> it = dataStore.getBinnedStatisticValues(s2PopulationStats)) { // you can get all bins while (it.hasNext()) { final Pair pair = it.next(); System.out.println( String.format( "Population: %s, Bin: %s, Bin Geometry: %s", pair.getRight(), s2PopulationSpatialBinning.binToString(pair.getLeft()), s2PopulationSpatialBinning.getType().getBinGeometry(pair.getLeft(), 3))); } } System.out.println(String.format("** Bins Within Envelope %s **", bbox1)); try (CloseableIterator> it = dataStore.getBinnedStatisticValues(s2PopulationStats, BinConstraints.ofObject(bbox1))) { // or you can get only bins within an envelope while (it.hasNext()) { final Pair pair = it.next(); System.out.println( String.format( "Population: %s, Bin: %s, Bin Geometry: %s", pair.getRight(), s2PopulationSpatialBinning.binToString(pair.getLeft()), s2PopulationSpatialBinning.getType().getBinGeometry(pair.getLeft(), 3))); } } // or you could just get the aggregated statistic value for an envelope (keep in mind this is // using the statistic bins that intersect the envelope so may be an over-estimate for bins that // only partially intersect) System.out.println( String.format( "** Population Stats '%s' in bbox %s **", dataStore.getStatisticValue(s2PopulationStats, BinConstraints.ofObject(bbox2)), bbox2)); } /** * A helper that constructs the SimpleFeatureType used in this example. */ private static SimpleFeatureType getSimpleFeatureType() { final String name = "ExampleSimpleFeatureType"; final SimpleFeatureTypeBuilder sftBuilder = new SimpleFeatureTypeBuilder(); final AttributeTypeBuilder atBuilder = new AttributeTypeBuilder(); sftBuilder.setName(name); // the location name isn't used in this example, its just here for show! sftBuilder.add(atBuilder.binding(String.class).nillable(false).buildDescriptor("locationName")); // this is used for the grouping (the `.bin` call). sftBuilder.add(atBuilder.binding(Geometry.class).nillable(false).buildDescriptor("geometry")); // this is the field that is summed in each group, as defined by the `.aggregate` call. sftBuilder.add(atBuilder.binding(Integer.class).nillable(false).buildDescriptor("population")); return sftBuilder.buildFeatureType(); } /** * Just a helper method to create a SimpleFeature to the specifications used in this example. */ private static SimpleFeature buildSimpleFeature( final SimpleFeatureType featureType, final String locationName, final Coordinate coordinate, final int population) { final SimpleFeatureBuilder builder = new SimpleFeatureBuilder(featureType); builder.set("locationName", locationName); builder.set("geometry", GeometryUtils.GEOMETRY_FACTORY.createPoint(coordinate)); builder.set("population", population); return builder.buildFeature(locationName); } } ================================================ FILE: examples/java-api/src/main/java/org/locationtech/geowave/examples/stats/WordCountStatistic.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.examples.stats; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.FieldStatistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.statistics.StatisticsDeleteCallback; import org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback; import org.locationtech.geowave.core.store.statistics.field.FieldStatisticType; import com.beust.jcommander.Parameter; public class WordCountStatistic extends FieldStatistic { public static final FieldStatisticType STATS_TYPE = new FieldStatisticType<>("WORD_COUNT"); private static final String WHITESPACE_REGEX = "\\s+"; /** * Statistics support JCommander parameters so that they can be configured when adding the * statistic via the CLI. In this case, the minimum word length for the statistic would be * configurable via the `--minWordLength ` option when adding this statistic. */ @Parameter( names = "--minWordLength", required = true, description = "The minimum word length to count.") private int minWordLength = 0; public WordCountStatistic() { super(STATS_TYPE); } public WordCountStatistic(final String typeName, final String fieldName) { super(STATS_TYPE, typeName, fieldName); } /** * Add a programmatic setter for min word length. */ public void setMinWordLength(final int length) { this.minWordLength = length; } /** * Provides a description of the statistic that will be displayed in the CLI when describing * available statistics. */ @Override public String getDescription() { return "Provides a count of all words of a string field."; } /** * Returns `true` for every class this statistic is compatible with. In our case, only `String` * types will be supported since we are doing a word count. */ @Override public boolean isCompatibleWith(final Class fieldClass) { return String.class.isAssignableFrom(fieldClass); } /** * Constructs an empty statistic value for this statistic. The state of the value should be as if * no entries have been ingested. */ @Override public WordCountValue createEmpty() { return new WordCountValue(this); } /** * The `byteLength`, `writeBytes`, and `readBytes` functions only need to be overriden if you are * adding additional configuration parameters or need to store additional information needed for * the statistic to function properly. In this example, we have added a minimum word length * parameter, so we need to store that when the statistic is serialized and deserialized. */ @Override protected int byteLength() { return super.byteLength() + Integer.BYTES; } @Override protected void writeBytes(ByteBuffer buffer) { super.writeBytes(buffer); buffer.putInt(minWordLength); } @Override protected void readBytes(ByteBuffer buffer) { super.readBytes(buffer); minWordLength = buffer.getInt(); } /** * Every statistic has a corresponding statistic value. This class is responsible for determining * what happens when entries are ingested or deleted, as well as when two values need to be * merged. If a value can be updated on ingest, `StatisticsIngestCallback` should be implemented. * If the value can be updated on delete, `StatisticsDeleteCallback` should be implemented. Some * statistics, such as bounding box statistics cannot be updated on delete because there isn't * enough information to know if the bounding box should shrink when an entry is deleted. In that * case, only the ingest callback would be implemented. */ public static class WordCountValue extends StatisticValue implements StatisticsIngestCallback, StatisticsDeleteCallback { private long count = 0; public WordCountValue() { this(null); } private WordCountValue(final WordCountStatistic statistic) { super(statistic); } public long getCount() { return count; } /** * Merge this value with another. */ @Override public void merge(final Mergeable merge) { if ((merge != null) && (merge instanceof WordCountValue)) { final WordCountValue other = (WordCountValue) merge; count += other.count; } } /** * Get the field value from the adapter, and if it's not null, count the number of words that * exceed the minimum length and add it to the total. */ @Override public void entryIngested( final DataTypeAdapter adapter, final T entry, final GeoWaveRow... rows) { final WordCountStatistic stat = ((WordCountStatistic) getStatistic()); final Object o = adapter.getFieldValue(entry, stat.getFieldName()); if (o == null) { return; } final String str = (String) o; final String[] split = str.split(WHITESPACE_REGEX); for (String word : split) { if (word.length() >= stat.minWordLength) { count++; } } } /** * Get the field value from the adapter, and if it's not null, count the number of words that * exceed the minimum length and subtract it from the total. */ @Override public void entryDeleted( final DataTypeAdapter adapter, final T entry, final GeoWaveRow... rows) { final WordCountStatistic stat = ((WordCountStatistic) getStatistic()); final Object o = adapter.getFieldValue(entry, stat.getFieldName()); if (o == null) { return; } final String str = (String) o; final String[] split = str.split(WHITESPACE_REGEX); for (String word : split) { if (word.length() >= stat.minWordLength) { count++; } } } /** * Return the actual value of the statistic. */ @Override public Long getValue() { return getCount(); } /** * Serialize the statistic value to binary. */ @Override public byte[] toBinary() { final ByteBuffer buffer = ByteBuffer.allocate(VarintUtils.unsignedLongByteLength(count)); VarintUtils.writeUnsignedLong(count, buffer); return buffer.array(); } /** * Deserialize the statistic value from binary. */ @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buffer = ByteBuffer.wrap(bytes); count = VarintUtils.readUnsignedLong(buffer); } } } ================================================ FILE: examples/java-api/src/main/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi ================================================ org.locationtech.geowave.examples.ExamplePersistableRegistry ================================================ FILE: examples/java-api/src/main/resources/META-INF/services/org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi ================================================ org.locationtech.geowave.examples.ingest.plugin.CustomIngestFormat ================================================ FILE: examples/java-api/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.statistics.StatisticsRegistrySPI ================================================ org.locationtech.geowave.examples.stats.ExampleRegisteredStatistics ================================================ FILE: examples/java-api/src/main/resources/geonames.txt ================================================ 3373406 Yorkshire Yorkshire 13.1 -59.5 P PPL BB 01 0 46 America/Barbados 1993-12-22 3373407 Wotton Wotton 13.06667 -59.53333 P PPL BB 01 0 48 America/Barbados 1993-12-22 3373408 Worthing Worthing 13.07496 -59.58358 P PPL BB 01 0 10 America/Barbados 2011-03-17 3373409 Workhall Workhall Workhall 13.11667 -59.45 P PPL BB 10 0 22 America/Barbados 2012-01-18 3373410 Woodbourne Woodbourne Woodbourne 13.08333 -59.48333 P PPL BB 10 0 60 America/Barbados 2012-01-18 3373411 Woman’s Bay Woman's Bay 13.03333 -59.5 H BAY BB 01 0 -9999 America/Barbados 1993-12-22 3373412 Windy Ridge Windy Ridge 13.16667 -59.46667 P PPLL BB 05 0 91 America/Barbados 1993-12-22 3373413 Windy Hill Windy Hill 13.23333 -59.55 P PPL BB 02 0 64 America/Barbados 1993-12-22 3373414 Windsor Station Windsor Station 13.11667 -59.51667 S RSTN BB 00 0 52 America/Barbados 1993-12-22 3373415 Windsor Windsor 13.11667 -59.51667 P PPL BB 00 0 52 America/Barbados 1993-12-22 3373416 Wilson Hill Wilson Hill 13.16667 -59.53333 P PPL BB 05 0 249 America/Barbados 1993-12-22 3373417 Wildey Wildey Wildey 13.1 -59.56667 P PPL BB 08 0 108 America/Barbados 2012-01-18 3373418 Wilcox Wilcox 13.05 -59.5 P PPL BB 01 0 -9999 America/Barbados 1993-12-22 3373419 White Hill White Hill White Hill 13.2 -59.56667 P PPL BB 02 0 296 America/Barbados 2012-01-18 3373420 Whitehaven Whitehaven 13.16667 -59.45 P PPL BB 10 0 36 America/Barbados 1993-12-22 3373421 White Hall White Hall 13.25 -59.61667 P PPLL BB 09 0 158 America/Barbados 1993-12-22 3373422 Weymouth Weymouth 13.08333 -59.6 P PPLX BB 08 0 23 America/Barbados 1993-12-22 3373423 Weston Weston 13.21667 -59.63333 P PPL BB 04 0 63 America/Barbados 1993-12-22 3373424 Westmoreland Westmoreland 13.21667 -59.61667 P PPL BB 04 0 167 America/Barbados 1993-12-22 3373425 Well Road Well Road 13.3 -59.58333 P PPL BB 07 0 63 America/Barbados 1993-12-22 3373426 Wellhouse Wellhouse 13.13333 -59.43333 P PPL BB 10 0 27 America/Barbados 1993-12-22 3373427 Welchtown Welchtown 13.26667 -59.58333 P PPL BB 09 0 139 America/Barbados 1993-12-22 3373428 Welch Town Welch Town 13.26667 -59.58333 P PPL BB 02 0 139 America/Barbados 1993-12-22 3373429 Welch Town Welch Town 13.16667 -59.48333 P PPL BB 05 0 193 America/Barbados 1993-12-22 3373430 Welchman Hall Welchman Hall 13.18333 -59.56667 P PPLA BB 11 0 267 America/Barbados 2012-01-16 3373431 Welches Welches 13.05 -59.55 P PPL BB 01 0 1 America/Barbados 1993-12-22 3373432 Waverley Cot Waverley Cot 13.13333 -59.53333 P PPL BB 03 0 138 America/Barbados 1993-12-22 3373433 Watts Village Watts Village 13.1 -59.53333 P PPL BB 03 0 105 America/Barbados 1993-12-22 3373434 Waterford Waterford 13.11667 -59.58333 P PPL BB 08 0 43 America/Barbados 1993-12-22 3373435 Warrens Warrens Warrens 13.15 -59.6 P PPL BB 08 0 107 America/Barbados 2012-01-18 3373436 Warners Warners 13.06667 -59.56667 P PPL BB 01 0 8 America/Barbados 1993-12-22 3373437 Warleigh Warleigh 13.25 -59.63333 P PPL BB 09 0 50 America/Barbados 1993-12-22 3373438 Wanstead Wanstead 13.13333 -59.61667 P PPL BB 08 0 84 America/Barbados 1993-12-22 3373439 Walronds Walronds 13.08333 -59.48333 P PPL BB 01 0 60 America/Barbados 1993-12-22 3373440 Walkes Spring Walkes Spring Walkes Spring,francia 13.16667 -59.56667 P PPL BB 11 0 193 America/Barbados 2012-01-18 3373441 Walkers Terrace Walkers Terrace 13.13333 -59.55 P PPL BB 03 0 117 America/Barbados 1993-12-22 3373442 Walker’s Savannah Walker's Savannah 13.25 -59.55 L LCTY BB 02 0 1 America/Barbados 1993-12-22 3373443 Walkers Beach Walkers Beach 13.25 -59.55 T BCH BB 02 0 1 America/Barbados 1993-12-22 3373444 Wakefield Tenantry Wakefield Tenantry 13.16667 -59.51667 P PPL BB 05 0 203 America/Barbados 1993-12-22 3373445 Wakefield Wakefield Haynes Field,Wakefield 13.18333 -59.51667 P PPL BB BB 05 0 233 America/Barbados 2012-01-18 3373446 Vineyard Vineyard 13.15 -59.45 P PPL BB 10 0 46 America/Barbados 1993-12-22 3373447 Verdun Verdun Cheshire,Verdun 13.18333 -59.5 P PPL BB BB 05 0 251 America/Barbados 2012-01-18 3373448 Venture Venture 13.18333 -59.51667 P PPL BB 05 0 233 America/Barbados 1993-12-22 3373449 Vauxhall Vauxhall 13.08333 -59.56667 P PPL BB 01 0 68 America/Barbados 1993-12-22 3373450 Vaucluse Factory Vaucluse Factory 13.16667 -59.58333 P PPL BB 11 0 159 America/Barbados 1993-12-22 3373451 Valley Valley 13.11667 -59.56667 P PPL BB 03 0 47 America/Barbados 1993-12-22 3373452 Upper Salmonds Upper Salmonds 13.3 -59.58333 P PPL BB 07 0 63 America/Barbados 1993-12-22 3373453 Upper Parks Upper Parks 13.2 -59.55 P PPL BB 06 0 215 America/Barbados 1993-12-22 3373454 Upper Carlton Upper Carlton 13.21667 -59.61667 P PPL BB 04 0 167 America/Barbados 1993-12-22 3373455 Union Hall Union Hall 13.11667 -59.45 P PPL BB 10 0 22 America/Barbados 1993-12-22 3373456 Union Union 13.13333 -59.53333 P PPL BB 03 0 138 America/Barbados 1993-12-22 3373457 Two Mile Hill Two Mile Hill Two Mile Hill 13.08333 -59.58333 P PPL BB 08 0 38 America/Barbados 2012-01-18 3373458 Turnpike Turnpike 13.11667 -59.55 P PPL BB 03 0 53 America/Barbados 1993-12-22 3373459 Turners Hall Turners Hall 13.23333 -59.58333 P PPL BB 02 0 85 America/Barbados 1993-12-22 3373460 Trents Trents 13.3 -59.61667 P PPL BB 07 0 84 America/Barbados 1993-12-22 3373461 Trents Trents 13.2 -59.63333 P PPL BB 04 0 41 America/Barbados 1993-12-22 3373462 Trader Bank Trader Bank 13.05 -59.65 H BNK BB 00 0 -9999 America/Barbados 1993-12-22 3373463 Touce’s Point Touce's Point 13.31667 -59.61667 T PT BB 07 0 38 America/Barbados 1993-12-22 3373464 Top Rock Top Rock 13.06667 -59.56667 P PPL BB 01 0 8 America/Barbados 1993-12-22 3373465 Todds Todds 13.16667 -59.51667 P PPL BB 05 0 203 America/Barbados 1993-12-22 3373466 Three Houses Station Three Houses Station 13.15 -59.45 S RSTN BB 00 0 46 America/Barbados 1993-12-22 3373467 Three Houses Three Houses 13.15 -59.46667 S EST BB 10 0 88 America/Barbados 1993-12-22 3373468 Three Boys’ Rock Three Boys' Rock 13.2 -59.5 T RK BB 05 0 27 America/Barbados 1993-12-22 3373469 Thornbury Hill Thornbury Hill 13.05 -59.53333 P PPL BB 01 0 13 America/Barbados 1993-12-22 3373470 Thicket Thicket 13.15 -59.45 P PPL BB 10 0 46 America/Barbados 1993-12-22 3373471 The Whim The Whim 13.25 -59.63333 P PPL BB 09 0 50 America/Barbados 1993-12-22 3373472 The Savannah The Savannah 13.25 -59.56667 L LCTY BB 02 0 20 America/Barbados 1993-12-22 3373473 The Risk The Risk 13.28333 -59.56667 P PPL BB 09 0 1 America/Barbados 1993-12-22 3373474 The Potteries The Potteries 13.21667 -59.55 L LCTY BB 02 0 269 America/Barbados 1993-12-22 3373475 The Glebe The Glebe 13.11667 -59.55 P PPL BB 03 0 53 America/Barbados 1993-12-22 3373476 The Garden The Garden 13.2 -59.63333 P PPL BB 04 0 41 America/Barbados 1993-12-22 3373477 Crane Crane The Crane 13.1 -59.45 P PPLA BB 10 935 -9999 America/Barbados 2013-06-26 3373478 The Baltic The Baltic 13.28333 -59.58333 P PPL BB 09 0 199 America/Barbados 1993-12-22 3373479 Tent Bay Tent Bay 13.2 -59.5 P PPL BB 06 0 27 America/Barbados 1993-12-22 3373480 Pico Teneriffe Pico Teneriffe 13.28333 -59.56667 T HLL BB 09 0 1 America/Barbados 1993-12-22 3373481 Taylor Bay Taylor Bay 13.31667 -59.63333 H COVE BB 07 0 41 America/Barbados 1993-12-22 3373482 Sweet Bottom Sweet Bottom Sweet Bottom,Sweet Vale 13.16667 -59.55 P PPL BB BB 03 0 216 America/Barbados 2012-01-18 3373483 Swanns Swanns 13.23333 -59.58333 P PPL BB 02 0 85 America/Barbados 1993-12-22 3373484 Sutherland Road Sutherland Road 13.26667 -59.63333 P PPL BB 07 0 51 America/Barbados 1993-12-22 3373485 Surinam Surinam 13.18333 -59.53333 P PPL BB 06 0 265 America/Barbados 1993-12-22 3373486 Supers Supers 13.15 -59.45 P PPL BB 10 0 46 America/Barbados 1993-12-22 ================================================ FILE: examples/java-api/src/main/resources/stateCapitals.csv ================================================ Alabama,Montgomery,-86.2460375,32.343799,1846,155.4,205764,scala Alaska,Juneau,-134.1765792,58.3844634,1906,2716.7,31275,scala Arizona,Phoenix,-112.125051,33.6054149,1889,474.9,1445632,scala Arkansas,Little Rock,-92.3379275,34.7240049,1821,116.2,193524,java California,Sacramento,-121.4429125,38.5615405,1854,97.2,466488,java Colorado,Denver,-104.8551114,39.7643389,1867,153.4,600158,java Connecticut,Hartford,-72.680087,41.7656874,1875,17.3,124512,scala Delaware,Dover,-75.5134199,39.1564159,1777,22.4,36047,scala Florida,Tallahassee,-84.2568559,30.4671395,1824,95.7,181412,scala Georgia,Atlanta,-84.420604,33.7677129,1868,131.7,420003,scala Hawaii,Honolulu,-157.7989705,21.3280681,1845,85.7,337256,java Idaho,Boise,-116.2338979,43.6008061,1865,63.8,205671,java Illinois,Springfield,-89.6708313,39.7638375,1837,54,116250,scala Indiana,Indianapolis,-86.13275,39.7797845,1825,361.5,829718,java Iowa,Des Moines,-93.606516,41.5666699,1857,75.8,203433,java Kansas,Topeka,-95.708031,39.0130545,1856,56,127473,scala Kentucky,Frankfort,-84.8666254,38.1944455,1792,14.7,25527,java Louisiana,Baton Rouge,-91.1114186,30.441474,1880,76.8,229553,java Maine,Augusta,-69.730692,44.3334319,1832,55.4,19136,java Maryland,Annapolis,-76.5046945,38.9724689,1694,6.73,38394,scala Massachusetts,Boston,-71.0571571,42.3133735,1630,48.4,617594,scala Michigan,Lansing,-84.559032,42.7086815,1847,35,114297,java Minnesota,Saint Paul,-93.1060534,44.9397075,1849,52.8,285068,scala Mississippi,Jackson,-90.1888874,32.3103284,1821,104.9,173514,scala Missouri,Jefferson City,-92.1624049,38.5711659,1826,27.3,43079,java Montana,Helena,-112.0156939,46.5933579,1875,14,28190,java Nebraska,Lincoln,-96.6907283,40.800609,1867,74.6,258379,scala Nevada,Carson City,-119.7526546,39.1678334,1861,143.4,55274,scala New Hampshire,Concord,-71.5626055,43.2308015,1808,64.3,42695,scala New Jersey,Trenton,-74.7741221,40.2162772,1784,7.66,84913,java New Mexico,Santa Fe,-105.983036,35.6824934,1610,37.3,75764,java New York,Albany,-73.8113997,42.6681399,1797,21.4,97856,java North Carolina,Raleigh,-78.6450559,35.843768,1792,114.6,403892,scala North Dakota,Bismarck,-100.7670546,46.809076,1883,26.9,61272,scala Ohio,Columbus,-82.990829,39.9829515,1816,210.3,822553,java Oklahoma,Oklahoma City,-97.4791974,35.4826479,1910,607,580000,java Oregon,Salem,-123.0282074,44.9329915,1855,45.7,154637,java Pennsylvania,Harrisburg,-76.8804255,40.2821445,1812,8.11,49528,scala Rhode Island,Providence,-71.4211805,41.8169925,1900,18.5,178042,scala South Carolina,Columbia,-80.9375649,34.0375089,1786,125.2,131686,scala South Dakota,Pierre,-100.3205385,44.3708241,1889,13,13646,java Tennessee,Nashville,-86.7852455,36.1866405,1826,473.3,635710,scala Texas,Austin,-97.7534014,30.3077609,1839,251.5,790390,java Utah,Salt Lake City,-111.920485,40.7766079,1858,109.1,186440,java Vermont,Montpelier,-72.5687199,44.2739708,1805,10.2,7855,java Virginia,Richmond,-77.4932614,37.524661,1780,60.1,204214,scala Washington,Olympia,-122.8938687,47.0393335,1853,16.7,46478,scala West Virginia,Charleston,-81.6405384,38.3560436,1885,31.6,51400,scala Wisconsin,Madison,-89.4064204,43.0849935,1838,68.7,233209,scala Wyoming,Cheyenne,-104.7674045,41.1475325,1869,21.1,59466,java ================================================ FILE: examples/java-api/src/test/java/org/locationtech/geowave/examples/ingest/BulkIngestInputGenerationTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.examples.ingest; import java.io.IOException; import java.util.Locale; import org.apache.accumulo.core.client.mapreduce.AccumuloFileOutputFormat; import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.data.Value; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocatedFileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Assume; import org.junit.BeforeClass; import org.junit.Test; import org.locationtech.geowave.examples.ingest.bulk.GeonamesDataFileInputFormat; import org.locationtech.geowave.examples.ingest.bulk.SimpleFeatureToAccumuloKeyValueMapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class BulkIngestInputGenerationTest { private static final Logger LOGGER = LoggerFactory.getLogger(BulkIngestInputGenerationTest.class); private static final String TEST_DATA_LOCATION = "src/test/resources/org/locationtech/geowave/examples/ingest/geonames/barbados"; private static final long NUM_GEONAMES_RECORDS = 834; // (see BB.txt) private static final String OUTPUT_PATH = "target/tmp_bulkIngestTest"; private static long mapInputRecords; private static long mapOutputRecords; private static long startMillis; @BeforeClass public static void startTimer() { startMillis = System.currentTimeMillis(); LOGGER.warn("-----------------------------------------"); LOGGER.warn("* *"); LOGGER.warn("* RUNNING BulkIngestInputGenerationIT *"); LOGGER.warn("* *"); LOGGER.warn("-----------------------------------------"); } @AfterClass public static void reportTest() { LOGGER.warn("-----------------------------------------"); LOGGER.warn("* *"); LOGGER.warn("* FINISHED BulkIngestInputGenerationIT *"); LOGGER.warn( "* " + ((System.currentTimeMillis() - startMillis) / 1000) + "s elapsed. *"); LOGGER.warn("* *"); LOGGER.warn("-----------------------------------------"); } @Test public void testMapReduceJobSuccess() throws Exception { // There is a linker error on windows when running this test Assume.assumeFalse(isWindows()); LOGGER.info("Running Bulk Ingest Input Generation MapReduce job..."); final int exitCode = ToolRunner.run(new BulkIngestInputGenerationJobRunner(), null); LOGGER.info("Job completed with exit code: " + exitCode); // verify exitCode = 0 Assert.assertEquals(exitCode, 0); verifyNumInputRecords(); verifyNumAccumuloKeyValuePairs(); verifyJobOutput(); } private static boolean isWindows() { final String OS = System.getProperty("os.name", "generic").toLowerCase(Locale.ENGLISH); return (OS.indexOf("win") > -1); } private void verifyNumInputRecords() { Assert.assertEquals(mapInputRecords, NUM_GEONAMES_RECORDS); } private void verifyNumAccumuloKeyValuePairs() { Assert.assertEquals(mapOutputRecords, (NUM_GEONAMES_RECORDS)); } private void verifyJobOutput() throws IOException { final String _SUCCESS = "_SUCCESS"; final String REDUCER_OUTPUT = "part-r-"; boolean wasSuccessful = false; boolean reducerOutputExists = false; final FileSystem fs = FileSystem.getLocal(new Configuration()); final RemoteIterator iterator = fs.listFiles(new Path(OUTPUT_PATH), false); LocatedFileStatus fileStatus = null; String fileName = null; while (iterator.hasNext()) { fileStatus = iterator.next(); fileName = fileStatus.getPath().getName(); if (fileName.contains(_SUCCESS)) { wasSuccessful = true; } if (fileName.contains(REDUCER_OUTPUT)) { reducerOutputExists = true; } } // verify presence of _SUCCESS file Assert.assertEquals(wasSuccessful, true); // verify presence of Reducer output Assert.assertEquals(reducerOutputExists, true); } private static class BulkIngestInputGenerationJobRunner extends Configured implements Tool { private static final String JOB_NAME = "BulkIngestInputGenerationITJob"; private static final String TASK_COUNTER_GROUP_NAME = "org.apache.hadoop.mapreduce.TaskCounter"; private static final String MAP_INPUT_RECORDS = "MAP_INPUT_RECORDS"; private static final String MAP_OUTPUT_RECORDS = "MAP_OUTPUT_RECORDS"; @Override public int run(final String[] args) throws Exception { final Configuration conf = getConf(); conf.set("fs.defaultFS", "file:///"); final Job job = Job.getInstance(conf, JOB_NAME); job.setJarByClass(getClass()); FileInputFormat.setInputPaths(job, new Path(TEST_DATA_LOCATION)); FileOutputFormat.setOutputPath(job, cleanPathForReuse(conf, OUTPUT_PATH)); job.setMapperClass(SimpleFeatureToAccumuloKeyValueMapper.class); job.setReducerClass(Reducer.class); // (Identity Reducer) job.setInputFormatClass(GeonamesDataFileInputFormat.class); job.setOutputFormatClass(AccumuloFileOutputFormat.class); job.setMapOutputKeyClass(Key.class); job.setMapOutputValueClass(Value.class); job.setOutputKeyClass(Key.class); job.setOutputValueClass(Value.class); job.setNumReduceTasks(1); job.setSpeculativeExecution(false); final boolean result = job.waitForCompletion(true); mapInputRecords = job.getCounters().findCounter(TASK_COUNTER_GROUP_NAME, MAP_INPUT_RECORDS).getValue(); mapOutputRecords = job.getCounters().findCounter(TASK_COUNTER_GROUP_NAME, MAP_OUTPUT_RECORDS).getValue(); return result ? 0 : 1; } private Path cleanPathForReuse(final Configuration conf, final String pathString) throws IOException { final FileSystem fs = FileSystem.get(conf); final Path path = new Path(pathString); if (fs.exists(path)) { LOGGER.info("Deleting '" + pathString + "' for reuse."); fs.delete(path, true); } return path; } } } ================================================ FILE: examples/java-api/src/test/java/org/locationtech/geowave/examples/ingest/SimpleIngestTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.examples.ingest; import java.util.Set; import java.util.TreeSet; import org.junit.Assert; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.QueryBuilder; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.Point; import org.opengis.feature.simple.SimpleFeature; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class SimpleIngestTest { private static final Logger LOGGER = LoggerFactory.getLogger(SimpleIngestTest.class); final GeometryFactory factory = new GeometryFactory(); IndexStore indexStore; PersistentAdapterStore adapterStore; DataStatisticsStore statsStore; protected static Set getCalcedPointSet() { final Set calcPoints = new TreeSet<>(); for (int longitude = -180; longitude <= 180; longitude += 5) { for (int latitude = -90; latitude <= 90; latitude += 5) { final Point p = GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(longitude, latitude)); calcPoints.add(p); } } return calcPoints; } protected static Set getStoredPointSet(final DataStore ds) { final CloseableIterator itr = ds.query( QueryBuilder.newBuilder().constraints( new BasicQueryByClass(new BasicQueryByClass.ConstraintsByClass())).build()); final Set readPoints = new TreeSet<>(); while (itr.hasNext()) { final Object n = itr.next(); if (n instanceof SimpleFeature) { final SimpleFeature gridCell = (SimpleFeature) n; final Point p = (Point) gridCell.getDefaultGeometry(); readPoints.add(p); } } return readPoints; } protected static void validate(final DataStore ds) { final Set readPoints = getStoredPointSet(ds); final Set calcPoints = getCalcedPointSet(); Assert.assertTrue(readPoints.equals(calcPoints)); } } ================================================ FILE: examples/java-api/src/test/resources/hbase.properties ================================================ # Zookeeper zookeeper.temp.dir=./target/zk_temp zookeeper.host=127.0.0.1 zookeeper.port=2181 zookeeper.connection.string=127.0.0.1:2181 # HBase hbase.master.port=25111 hbase.master.info.port=-1 hbase.num.region.servers=1 hbase.root.dir=./target/hbase_temp hbase.znode.parent=/hbase hbase.wal.replication.enabled=false ================================================ FILE: examples/java-api/src/test/resources/org/locationtech/geowave/examples/ingest/geonames/barbados/BB.txt ================================================ 3373406 Yorkshire Yorkshire 13.1 -59.5 P PPL BB 01 0 46 America/Barbados 1993-12-22 3373407 Wotton Wotton 13.06667 -59.53333 P PPL BB 01 0 48 America/Barbados 1993-12-22 3373408 Worthing Worthing 13.07496 -59.58358 P PPL BB 01 0 10 America/Barbados 2011-03-17 3373409 Workhall Workhall Workhall 13.11667 -59.45 P PPL BB 10 0 22 America/Barbados 2012-01-18 3373410 Woodbourne Woodbourne Woodbourne 13.08333 -59.48333 P PPL BB 10 0 60 America/Barbados 2012-01-18 3373411 Woman’s Bay Woman's Bay 13.03333 -59.5 H BAY BB 01 0 -9999 America/Barbados 1993-12-22 3373412 Windy Ridge Windy Ridge 13.16667 -59.46667 P PPLL BB 05 0 91 America/Barbados 1993-12-22 3373413 Windy Hill Windy Hill 13.23333 -59.55 P PPL BB 02 0 64 America/Barbados 1993-12-22 3373414 Windsor Station Windsor Station 13.11667 -59.51667 S RSTN BB 00 0 52 America/Barbados 1993-12-22 3373415 Windsor Windsor 13.11667 -59.51667 P PPL BB 00 0 52 America/Barbados 1993-12-22 3373416 Wilson Hill Wilson Hill 13.16667 -59.53333 P PPL BB 05 0 249 America/Barbados 1993-12-22 3373417 Wildey Wildey Wildey 13.1 -59.56667 P PPL BB 08 0 108 America/Barbados 2012-01-18 3373418 Wilcox Wilcox 13.05 -59.5 P PPL BB 01 0 -9999 America/Barbados 1993-12-22 3373419 White Hill White Hill White Hill 13.2 -59.56667 P PPL BB 02 0 296 America/Barbados 2012-01-18 3373420 Whitehaven Whitehaven 13.16667 -59.45 P PPL BB 10 0 36 America/Barbados 1993-12-22 3373421 White Hall White Hall 13.25 -59.61667 P PPLL BB 09 0 158 America/Barbados 1993-12-22 3373422 Weymouth Weymouth 13.08333 -59.6 P PPLX BB 08 0 23 America/Barbados 1993-12-22 3373423 Weston Weston 13.21667 -59.63333 P PPL BB 04 0 63 America/Barbados 1993-12-22 3373424 Westmoreland Westmoreland 13.21667 -59.61667 P PPL BB 04 0 167 America/Barbados 1993-12-22 3373425 Well Road Well Road 13.3 -59.58333 P PPL BB 07 0 63 America/Barbados 1993-12-22 3373426 Wellhouse Wellhouse 13.13333 -59.43333 P PPL BB 10 0 27 America/Barbados 1993-12-22 3373427 Welchtown Welchtown 13.26667 -59.58333 P PPL BB 09 0 139 America/Barbados 1993-12-22 3373428 Welch Town Welch Town 13.26667 -59.58333 P PPL BB 02 0 139 America/Barbados 1993-12-22 3373429 Welch Town Welch Town 13.16667 -59.48333 P PPL BB 05 0 193 America/Barbados 1993-12-22 3373430 Welchman Hall Welchman Hall 13.18333 -59.56667 P PPLA BB 11 0 267 America/Barbados 2012-01-16 3373431 Welches Welches 13.05 -59.55 P PPL BB 01 0 1 America/Barbados 1993-12-22 3373432 Waverley Cot Waverley Cot 13.13333 -59.53333 P PPL BB 03 0 138 America/Barbados 1993-12-22 3373433 Watts Village Watts Village 13.1 -59.53333 P PPL BB 03 0 105 America/Barbados 1993-12-22 3373434 Waterford Waterford 13.11667 -59.58333 P PPL BB 08 0 43 America/Barbados 1993-12-22 3373435 Warrens Warrens Warrens 13.15 -59.6 P PPL BB 08 0 107 America/Barbados 2012-01-18 3373436 Warners Warners 13.06667 -59.56667 P PPL BB 01 0 8 America/Barbados 1993-12-22 3373437 Warleigh Warleigh 13.25 -59.63333 P PPL BB 09 0 50 America/Barbados 1993-12-22 3373438 Wanstead Wanstead 13.13333 -59.61667 P PPL BB 08 0 84 America/Barbados 1993-12-22 3373439 Walronds Walronds 13.08333 -59.48333 P PPL BB 01 0 60 America/Barbados 1993-12-22 3373440 Walkes Spring Walkes Spring Walkes Spring,francia 13.16667 -59.56667 P PPL BB 11 0 193 America/Barbados 2012-01-18 3373441 Walkers Terrace Walkers Terrace 13.13333 -59.55 P PPL BB 03 0 117 America/Barbados 1993-12-22 3373442 Walker’s Savannah Walker's Savannah 13.25 -59.55 L LCTY BB 02 0 1 America/Barbados 1993-12-22 3373443 Walkers Beach Walkers Beach 13.25 -59.55 T BCH BB 02 0 1 America/Barbados 1993-12-22 3373444 Wakefield Tenantry Wakefield Tenantry 13.16667 -59.51667 P PPL BB 05 0 203 America/Barbados 1993-12-22 3373445 Wakefield Wakefield Haynes Field,Wakefield 13.18333 -59.51667 P PPL BB BB 05 0 233 America/Barbados 2012-01-18 3373446 Vineyard Vineyard 13.15 -59.45 P PPL BB 10 0 46 America/Barbados 1993-12-22 3373447 Verdun Verdun Cheshire,Verdun 13.18333 -59.5 P PPL BB BB 05 0 251 America/Barbados 2012-01-18 3373448 Venture Venture 13.18333 -59.51667 P PPL BB 05 0 233 America/Barbados 1993-12-22 3373449 Vauxhall Vauxhall 13.08333 -59.56667 P PPL BB 01 0 68 America/Barbados 1993-12-22 3373450 Vaucluse Factory Vaucluse Factory 13.16667 -59.58333 P PPL BB 11 0 159 America/Barbados 1993-12-22 3373451 Valley Valley 13.11667 -59.56667 P PPL BB 03 0 47 America/Barbados 1993-12-22 3373452 Upper Salmonds Upper Salmonds 13.3 -59.58333 P PPL BB 07 0 63 America/Barbados 1993-12-22 3373453 Upper Parks Upper Parks 13.2 -59.55 P PPL BB 06 0 215 America/Barbados 1993-12-22 3373454 Upper Carlton Upper Carlton 13.21667 -59.61667 P PPL BB 04 0 167 America/Barbados 1993-12-22 3373455 Union Hall Union Hall 13.11667 -59.45 P PPL BB 10 0 22 America/Barbados 1993-12-22 3373456 Union Union 13.13333 -59.53333 P PPL BB 03 0 138 America/Barbados 1993-12-22 3373457 Two Mile Hill Two Mile Hill Two Mile Hill 13.08333 -59.58333 P PPL BB 08 0 38 America/Barbados 2012-01-18 3373458 Turnpike Turnpike 13.11667 -59.55 P PPL BB 03 0 53 America/Barbados 1993-12-22 3373459 Turners Hall Turners Hall 13.23333 -59.58333 P PPL BB 02 0 85 America/Barbados 1993-12-22 3373460 Trents Trents 13.3 -59.61667 P PPL BB 07 0 84 America/Barbados 1993-12-22 3373461 Trents Trents 13.2 -59.63333 P PPL BB 04 0 41 America/Barbados 1993-12-22 3373462 Trader Bank Trader Bank 13.05 -59.65 H BNK BB 00 0 -9999 America/Barbados 1993-12-22 3373463 Touce’s Point Touce's Point 13.31667 -59.61667 T PT BB 07 0 38 America/Barbados 1993-12-22 3373464 Top Rock Top Rock 13.06667 -59.56667 P PPL BB 01 0 8 America/Barbados 1993-12-22 3373465 Todds Todds 13.16667 -59.51667 P PPL BB 05 0 203 America/Barbados 1993-12-22 3373466 Three Houses Station Three Houses Station 13.15 -59.45 S RSTN BB 00 0 46 America/Barbados 1993-12-22 3373467 Three Houses Three Houses 13.15 -59.46667 S EST BB 10 0 88 America/Barbados 1993-12-22 3373468 Three Boys’ Rock Three Boys' Rock 13.2 -59.5 T RK BB 05 0 27 America/Barbados 1993-12-22 3373469 Thornbury Hill Thornbury Hill 13.05 -59.53333 P PPL BB 01 0 13 America/Barbados 1993-12-22 3373470 Thicket Thicket 13.15 -59.45 P PPL BB 10 0 46 America/Barbados 1993-12-22 3373471 The Whim The Whim 13.25 -59.63333 P PPL BB 09 0 50 America/Barbados 1993-12-22 3373472 The Savannah The Savannah 13.25 -59.56667 L LCTY BB 02 0 20 America/Barbados 1993-12-22 3373473 The Risk The Risk 13.28333 -59.56667 P PPL BB 09 0 1 America/Barbados 1993-12-22 3373474 The Potteries The Potteries 13.21667 -59.55 L LCTY BB 02 0 269 America/Barbados 1993-12-22 3373475 The Glebe The Glebe 13.11667 -59.55 P PPL BB 03 0 53 America/Barbados 1993-12-22 3373476 The Garden The Garden 13.2 -59.63333 P PPL BB 04 0 41 America/Barbados 1993-12-22 3373477 Crane Crane The Crane 13.1 -59.45 P PPLA BB 10 935 -9999 America/Barbados 2013-06-26 3373478 The Baltic The Baltic 13.28333 -59.58333 P PPL BB 09 0 199 America/Barbados 1993-12-22 3373479 Tent Bay Tent Bay 13.2 -59.5 P PPL BB 06 0 27 America/Barbados 1993-12-22 3373480 Pico Teneriffe Pico Teneriffe 13.28333 -59.56667 T HLL BB 09 0 1 America/Barbados 1993-12-22 3373481 Taylor Bay Taylor Bay 13.31667 -59.63333 H COVE BB 07 0 41 America/Barbados 1993-12-22 3373482 Sweet Bottom Sweet Bottom Sweet Bottom,Sweet Vale 13.16667 -59.55 P PPL BB BB 03 0 216 America/Barbados 2012-01-18 3373483 Swanns Swanns 13.23333 -59.58333 P PPL BB 02 0 85 America/Barbados 1993-12-22 3373484 Sutherland Road Sutherland Road 13.26667 -59.63333 P PPL BB 07 0 51 America/Barbados 1993-12-22 3373485 Surinam Surinam 13.18333 -59.53333 P PPL BB 06 0 265 America/Barbados 1993-12-22 3373486 Supers Supers 13.15 -59.45 P PPL BB 10 0 46 America/Barbados 1993-12-22 3373487 Sunset Crest Sunset Crest 13.16667 -59.63333 P PPL BB 04 0 25 America/Barbados 1993-12-22 3373488 Sunbury Station Sunbury Station 13.11667 -59.48333 S RSTN BB 10 0 36 America/Barbados 1993-12-22 3373489 Sunbury Sunbury Sunbury 13.11667 -59.48333 P PPL BB 10 0 36 America/Barbados 2012-01-18 3373490 Summervale Summervale 13.13333 -59.46667 P PPL BB 10 0 54 America/Barbados 1993-12-22 3373491 Sugar Hill Sugar Hill 13.18333 -59.55 P PPL BB 06 0 275 America/Barbados 1993-12-22 3373492 Sturges Sturges 13.2 -59.56667 P PPL BB 06 0 296 America/Barbados 1993-12-22 3373493 Stroud Point Stroud Point Bargie Point,Stroud Point 13.31667 -59.63333 T PT BB BB 07 0 41 America/Barbados 2012-01-18 3373494 Stroude Land Stroude Land 13.11667 -59.45 P PPL BB 10 0 22 America/Barbados 1993-12-22 3373495 Stroud Bay Stroud Bay 13.31667 -59.65 H BGHT BB 07 0 1 America/Barbados 1993-12-22 3373496 St. Patricks St. Patricks 13.1 -59.5 P PPL BB 01 0 46 America/Barbados 1993-12-22 3373497 Stewart Hill Stewart Hill 13.15 -59.46667 P PPL BB 05 0 88 America/Barbados 1993-12-22 3373498 Stepney Stepney 13.11667 -59.55 P PPL BB 03 0 53 America/Barbados 1993-12-22 3373499 Station Hill Station Hill 13.1 -59.6 P PPLX BB 08 0 30 America/Barbados 1993-12-22 3373500 Spring Head Spring Head 13.23333 -59.6 P PPL BB 07 0 258 America/Barbados 1993-12-22 3373501 Spring Hall Spring Hall 13.31667 -59.6 P PPL BB 07 0 27 America/Barbados 1993-12-22 3373502 Springfield Springfield 13.21667 -59.53333 P PPL BB 06 0 64 America/Barbados 1993-12-22 3373503 The Spout The Spout 13.31667 -59.6 T PT BB 07 0 27 America/Barbados 1993-12-22 3373504 Spencers Spencers 13.08333 -59.46667 P PPL BB 01 0 26 America/Barbados 1993-12-22 3373505 Speightstown Speightstown Speightstown,Spreightstown 13.25 -59.65 P PPLA BB 09 3634 1 America/Barbados 2013-05-05 3373506 South Point Lighthouse South Point Lighthouse South Point Lighthouse 13.03333 -59.51667 S LTHSE BB 01 0 -9999 America/Barbados 2013-04-04 3373507 South Point South Point 13.03333 -59.51667 T PT BB 01 0 -9999 America/Barbados 1993-12-22 3373508 South District South District 13.1 -59.55 P PPL BB 03 0 111 America/Barbados 1993-12-22 3373509 Small Town Small Town 13.16667 -59.5 P PPL BB 05 0 188 America/Barbados 1993-12-22 3373510 Small Hope Small Hope 13.16667 -59.5 P PPL BB 05 0 188 America/Barbados 1993-12-22 3373511 Skeete's Bay Skeete's Bay 13.16878 -59.4481 H BAY BB 05 0 5 America/Barbados 2010-04-16 3373512 Skeenes Hill Skeenes Hill 13.1 -59.51667 P PPL BB 01 0 89 America/Barbados 1993-12-22 3373513 Six Men’s Bay Six Men's Bay Six Men's Bay,Six Men’s Bay 13.26667 -59.63333 H BAY BB 09 0 51 America/Barbados 2012-01-18 3373514 Six Mens Six Mens 13.25 -59.63333 P PPL BB 09 0 50 America/Barbados 1993-12-22 3373515 Six Cross Roads Six Cross Roads 13.11667 -59.48333 P PPL BB 01 0 36 America/Barbados 1993-12-22 3373516 Sion Hill Sion Hill 13.23333 -59.61667 P PPL BB 09 0 179 America/Barbados 1993-12-22 3373517 Sion Hill Sion Hill 13.08333 -59.5 P PPL BB 01 0 70 America/Barbados 1993-12-22 3373518 Silver Sands Silver Sands 13.05 -59.5 P PPL BB 01 0 -9999 America/Barbados 1993-12-22 3373519 Silver Hill Silver Hill 13.06667 -59.55 P PPL BB 01 0 10 America/Barbados 1993-12-22 3373520 Shrewsbury Chapel Shrewsbury Chapel 13.11667 -59.43333 S CH BB 10 0 -9999 America/Barbados 1993-12-22 3373521 Shorey Shorey 13.25 -59.56667 P PPL BB 02 0 20 America/Barbados 1993-12-22 3373522 Shop Hill Shop Hill 13.15 -59.6 P PPL BB 11 0 107 America/Barbados 1993-12-22 3373523 Sherbourne Sherbourne 13.16667 -59.51667 P PPL BB 05 0 203 America/Barbados 1993-12-22 3373524 Sheraton Park Sheraton Park 13.06667 -59.55 P PPL BB 01 0 10 America/Barbados 1993-12-22 3373525 Shark’s Hole Shark's Hole 13.11667 -59.43333 T PT BB 10 0 -9999 America/Barbados 1993-12-22 3373526 The Shallows The Shallows 12.96667 -59.46667 H BNK BB 00 0 -9999 America/Barbados 1993-12-22 3373527 Selah School Selah School 13.3 -59.63333 S SCH BB 07 0 52 America/Barbados 1993-12-22 3373528 Sedge Pond Sedge Pond 13.25 -59.58333 P PPL BB 09 0 50 America/Barbados 1993-12-22 3373529 Seaview Seaview 13.31667 -59.6 P PPL BB 07 0 27 America/Barbados 1993-12-22 3373530 Seaview Seaview 13.16667 -59.61667 P PPL BB 04 0 120 America/Barbados 1993-12-22 3373531 Seaview Seaview 13.05 -59.5 P PPL BB 01 0 -9999 America/Barbados 1993-12-22 3373532 Searles Searles 13.1 -59.5 P PPL BB 01 0 46 America/Barbados 1993-12-22 3373533 Searles Searles Seales,Searles 13.08333 -59.5 P PPL BB BB 01 0 70 America/Barbados 2012-01-18 3373534 Sealy Hill Sealy Hill 13.15 -59.43333 P PPL BB 10 0 32 America/Barbados 1993-12-22 3373535 Sealy Hall Sealy Hall 13.16667 -59.46667 P PPL BB 05 0 91 America/Barbados 1993-12-22 3373536 Scotland District Scotland District 13.21667 -59.63333 L RGN BB 01 0 63 America/Barbados 1993-12-22 3373537 Scarborough Scarborough Scarboro,Scarborough 13.05 -59.53333 P PPL BB BB 01 0 13 America/Barbados 2012-01-18 3373538 Sayes Court Sayes Court 13.05 -59.51667 P PPL BB 01 0 13 America/Barbados 1993-12-22 3373539 Satellite Earth Station Satellite Earth Station 13.18333 -59.48333 S STNS BB 05 0 104 America/Barbados 1993-12-22 3373540 Sargeant Sargeant 13.08333 -59.56667 P PPL BB 01 0 68 America/Barbados 1993-12-22 3373541 Sandy Lane Bay Sandy Lane Bay 13.16667 -59.63333 H BAY BB 04 0 25 America/Barbados 1993-12-22 3373542 Sandy Lane Sandy Lane 13.16667 -59.63333 P PPL BB 04 0 25 America/Barbados 1993-12-22 3373543 Sandy Hill Point Sandy Hill Point 13.31667 -59.6 T PT BB 07 0 27 America/Barbados 1993-12-22 3373544 Sandford Sandford 13.13333 -59.45 P PPL BB 10 0 39 America/Barbados 1993-12-22 3373545 Sam Lords Castle Sam Lords Castle Lords Castle,Sam Lords Castle 13.11667 -59.43333 P PPL BB BB 10 0 -9999 America/Barbados 2012-01-18 3373546 The Salt Lakes The Salt Lakes 13.31667 -59.6 H LKN BB 07 0 27 America/Barbados 2014-10-01 3373547 Salters Salters 13.11667 -59.56667 P PPL BB 03 0 47 America/Barbados 1993-12-22 3373548 Salt Cave Point Salt Cave Point 13.08333 -59.46667 T PT BB 10 0 26 America/Barbados 1993-12-22 3373549 Salt Cave Salt Cave 13.06667 -59.45 H COVE BB 10 0 -9999 America/Barbados 1993-12-22 3373550 Salmond Salmond 13.31667 -59.63333 P PPL BB 07 0 41 America/Barbados 1993-12-22 3373551 Saint Thomas Saint Thomas Agios Thomas,Parroquia de Saint Thomas,Saint Thomas,Saint Thomas prestegjeld,Sankta Tomaso,Sent Tomas,sheng tuo ma si qu,Άγιος Θωμάς,Сент Томас,聖托馬斯區 13.18333 -59.58333 A ADM1 BB 11 11850 262 America/Barbados 2012-01-16 3373552 Saint Swithins Church Saint Swithins Church 13.3 -59.61667 S CH BB 07 0 84 America/Barbados 1993-12-22 3373553 Saint Philip Saint Philip Agios Filippos,Parroquia de Saint Philip,Saint Philip,Saint Philip prestegjeld,Sankta Filipo,Sent-Filip,sheng fei li pu qu,Άγιος Φίλιππος,Сент-Філіп,聖菲利普區 13.11667 -59.46667 A ADM1 BB 10 20944 29 America/Barbados 2012-01-16 3373554 Saint Peter Saint Peter Agios Petros,Parroquia de Saint Peter,Saint Peter,Saint Peter prestegjeld,Saint Peters,Sankta Petro,Sent-Piter,sheng bi de jiao qu,Άγιος Πέτρος,Сент-Пітер,聖彼得教區 13.25 -59.61667 A ADM1 BB 09 11544 158 America/Barbados 2012-01-16 3373555 Saint Nicholas Abbey Saint Nicholas Abbey 13.26667 -59.58333 S HSE BB 09 0 139 America/Barbados 1993-12-22 3373556 Saint Nicholas Saint Nicholas Nicholas Abbey,Saint Nicholas 13.28333 -59.58333 P PPL BB BB 09 0 199 America/Barbados 2012-01-18 3373557 Saint Michael Saint Michael Agios Michail,Parroquia de Saint Michael,Saint Michael,Saint Michael prestegjeld,Sankta Mikaelo,sant maykl,sheng mai ke er qu,Άγιος Μιχαήλ,سانت مايكل,聖邁克爾區 13.11667 -59.6 A ADM1 BB 08 99609 53 America/Barbados 2012-01-16 3373558 Saint Mathias Saint Mathias 13.06667 -59.6 P PPL BB 01 0 1 America/Barbados 1993-12-22 3373559 Saint Martins Saint Martins 13.08333 -59.46667 P PPL BB 10 0 26 America/Barbados 1993-12-22 3373560 Saint Marks Saint Marks 13.16667 -59.45 P PPL BB 05 0 36 America/Barbados 1993-12-22 3373561 Saint Margaret’s Church Saint Margaret's Church 13.18333 -59.5 S CH BB 05 0 251 America/Barbados 1993-12-22 3373562 Saint Margarets Saint Margarets 13.18333 -59.5 P PPL BB 05 0 251 America/Barbados 1993-12-22 3373563 Saint Lucy’s School Saint Lucy's School 13.28333 -59.61667 S SCH BB 07 0 118 America/Barbados 1993-12-22 3373564 Saint Lucy District Hospital Saint Lucy District Hospital 13.31667 -59.6 S HSP BB 07 0 27 America/Barbados 1993-12-22 3373565 Saint Lucy Saint Lucy Agia Loukia,Parroquia de Saint Lucy,Saint Lucy,Saint Lucy prestegjeld,Sankta Lucio,Sent-Ljusi,sheng lu xi jiao qu,Αγία Λουκία,Сент-Люсі,聖露西教區 13.3 -59.61667 A ADM1 BB 07 9706 84 America/Barbados 2012-01-16 3373566 Saint Lawrence Saint Lawrence 13.06667 -59.58333 P PPL BB 01 0 5 America/Barbados 1993-12-22 3373567 Saint Judes Saint Judes 13.15 -59.53333 P PPL BB 03 0 197 America/Barbados 1993-12-22 3373568 Saint Joseph Saint Joseph Agios Iosif,Parroquia de Saint Joseph,Saint Joseph,Saint Joseph prestegjeld,Sankta Jozefo,Sent DZozef,sheng yue se fu qu,Άγιος Ιωσήφ,Сент Џозеф,聖約瑟夫區 13.2 -59.53333 A ADM1 BB 06 7764 324 America/Barbados 2012-01-16 3373569 Saint John Saint John Agios Ioannis,Parroquia de Saint John,Saint John,Saint John prestegjeld,Saint-John,Sankta Johano,Sent DZon,sheng yue han jiao qu,Άγιος Ιωάννης,Сент Џон,聖約翰教區 13.16667 -59.48333 A ADM1 BB 05 10421 193 America/Barbados 2012-01-16 3373570 Saint James Saint James Agios Iakovos,Parroquia de Saint James,Saint James,Saint James prestegjeld,Sankta Jakobo,sheng zhan mu si jiao qu,Άγιος Ιάκωβος,聖詹姆斯教區 13.21667 -59.61667 A ADM1 BB 04 21454 167 America/Barbados 2012-01-16 3373571 Saint Georges Valley Saint Georges Valley 13.11667 -59.53333 T VAL BB 03 0 48 America/Barbados 1993-12-22 3373572 Saint George Saint George Agios Georgios,Parroquia de Saint George,Saint George,Saint George prestegjeld,Sankta Georgo,Sent DZordz,sheng qiao zhi jiao qu,Άγιος Γεώργιος,Сент Џорџ,聖喬治教區 13.13333 -59.53333 A ADM1 BB 03 19530 138 America/Barbados 2013-06-30 3373573 Saint Elizabeths Saint Elizabeths 13.2 -59.53333 P PPL BB 06 0 324 America/Barbados 1993-12-22 3373574 Saint Davids Saint Davids 13.08333 -59.55 P PPL BB 01 0 79 America/Barbados 1993-12-22 3373575 Saint Clement Vicarage Saint Clement Vicarage 13.3 -59.58333 S HSE BB 07 0 63 America/Barbados 1993-12-22 3373576 Saint Clements Schools Saint Clements Schools 13.3 -59.58333 S SCH BB 07 0 63 America/Barbados 1993-12-22 3373577 Saint Clements Church Saint Clements Church 13.3 -59.58333 S CH BB 07 0 63 America/Barbados 1993-12-22 3373578 Saint Andrews Station Saint Andrews Station Saint Andrew,Saint Andrews Station 13.25 -59.55 S RSTN BB BB 02 0 1 America/Barbados 2012-01-18 3373579 Saint Andrews Saint Andrews 13.25 -59.55 P PPL BB 02 0 1 America/Barbados 1993-12-22 3373580 Saint Andrew Saint Andrew Agios Andreas,Saint Andrew,Saint Andrew prestegjeld,Saint Andrews,Sankta Andreo,sheng an de lu qu,Άγιος Ανδρέας,聖安德魯區 13.23333 -59.56667 A ADM1 BB 02 6436 80 America/Barbados 2012-01-16 3373581 Ruby Ruby 13.13333 -59.45 P PPL BB 10 0 39 America/Barbados 1993-12-22 3373582 Rowans Rowans 13.13333 -59.56667 P PPL BB 03 0 86 America/Barbados 1993-12-22 3373583 Round Rock Round Rock 13.26667 -59.56667 T RK BB 02 0 10 America/Barbados 1993-12-22 3373584 Round Rock Round Rock 13.03333 -59.5 P PPL BB 01 0 -9999 America/Barbados 1993-12-22 3373585 Rouen Station Rouen Station 13.11667 -59.56667 S RSTN BB 08 0 47 America/Barbados 1993-12-22 3373586 Rouen Rouen 13.1 -59.58333 P PPL BB 08 0 66 America/Barbados 1993-12-22 3373587 Rose Hill Rose Hill 13.26667 -59.61667 P PPL BB 09 0 128 America/Barbados 1993-12-22 3373588 Rocky Bay Rocky Bay 13.31667 -59.6 H COVE BB 07 0 27 America/Barbados 1993-12-22 3373589 Rockley Beach Rockley Beach 13.06667 -59.58333 T BCH BB 01 0 5 America/Barbados 1993-12-22 3373590 Rockley Rockley 13.07471 -59.58869 P PPL BB 01 0 8 America/Barbados 2011-03-17 3373591 Rock Hall Rock Hall 13.28333 -59.6 P PPL BB 07 0 153 America/Barbados 1993-12-22 3373592 Rock Hall Rock Hall 13.25 -59.6 P PPL BB 09 0 246 America/Barbados 1993-12-22 3373593 Rock Hall Rock Hall 13.18333 -59.6 P PPL BB 11 0 210 America/Barbados 1993-12-22 3373594 Rock Hall Rock Hall 13.08333 -59.46667 L LCTY BB 10 0 26 America/Barbados 1993-12-22 3373595 Rockfield Rockfield 13.3 -59.58333 P PPL BB 07 0 63 America/Barbados 1993-12-22 3373596 Rock Dundo Rock Dundo 13.21667 -59.61667 P PPL BB 04 0 167 America/Barbados 1993-12-22 3373597 Rock Dundo Rock Dundo 13.11667 -59.6 P PPL BB 08 0 53 America/Barbados 1993-12-22 3373598 Robinsons Robinsons 13.11667 -59.43333 P PPL BB 10 0 -9999 America/Barbados 1993-12-22 3373599 Roaches Roaches 13.31667 -59.61667 L LCTY BB 07 0 38 America/Barbados 1993-12-22 3373600 Roach Roach 13.15 -59.55 P PPL BB 03 0 212 America/Barbados 1993-12-22 3373601 River Bay River Bay 13.31667 -59.58333 H BAY BB 07 0 -9999 America/Barbados 1993-12-22 3373602 River River 13.13333 -59.43333 P PPL BB 10 0 27 America/Barbados 1993-12-22 3373603 Rices Rices 13.1 -59.45 P PPL BB 10 0 -9999 America/Barbados 1993-12-22 3373604 Retreat Retreat 13.31667 -59.61667 P PPL BB 07 0 38 America/Barbados 1993-12-22 3373605 Retreat Retreat 13.25 -59.63333 P PPL BB 09 0 50 America/Barbados 1993-12-22 3373606 Retreat Retreat 13.15 -59.55 P PPL BB 03 0 212 America/Barbados 1993-12-22 3373607 Rendezvous Rendezvous 13.06667 -59.58333 P PPL BB 01 0 5 America/Barbados 1993-12-22 3373608 Regency Park Regency Park 13.08333 -59.56667 P PPL BB 01 0 68 America/Barbados 1993-12-22 3373609 Reeds Hill Reeds Hill 13.15 -59.61667 P PPL BB 04 0 117 America/Barbados 1993-12-22 3373610 Redmans Redmans 13.15 -59.6 P PPL BB 11 0 107 America/Barbados 1993-12-22 3373611 Redland Redland 13.18333 -59.53333 P PPL BB 06 0 265 America/Barbados 1993-12-22 3373612 Read’s Bay Read's Bay 13.2 -59.63333 H BAY BB 04 0 41 America/Barbados 1993-12-22 3373613 Ragged Point Ragged Point 13.16667 -59.43333 T PT BB 10 0 -9999 America/Barbados 1993-12-22 3373614 Providence Providence 13.06667 -59.51667 P PPL BB 01 0 23 America/Barbados 1993-12-22 3373615 Proutes Proutes 13.15 -59.56667 P PPL BB 11 0 166 America/Barbados 1993-12-22 3373616 Prospect Prospect 13.25 -59.58333 P PPL BB 09 0 50 America/Barbados 1993-12-22 3373617 Prospect Prospect 13.13333 -59.63333 P PPL BB 08 0 27 America/Barbados 1993-12-22 3373618 Prior Park Prior Park 13.13333 -59.61667 P PPL BB 04 0 84 America/Barbados 1993-12-22 3373619 Prerogative Prerogative 13.15 -59.53333 P PPL BB 03 0 197 America/Barbados 1993-12-22 3373620 Portland Portland 13.26667 -59.6 P PPL BB 09 0 198 America/Barbados 1993-12-22 3373621 Porters Porters 13.2 -59.63333 P PPL BB 04 0 41 America/Barbados 1993-12-22 3373622 Poreys Spring Poreys Spring 13.18333 -59.6 P PPL BB 11 0 210 America/Barbados 1993-12-22 3373623 Pool Pool 13.18333 -59.5 P PPL BB 05 0 251 America/Barbados 1993-12-22 3373624 Plumtree Plumtree 13.2 -59.6 P PPL BB 10 0 218 America/Barbados 1993-12-22 3373625 Pinelands Pinelands 13.08333 -59.58333 P PPL BB 08 0 38 America/Barbados 1993-12-22 3373626 Pine Housing Estate Pine Housing Estate Pine,Pine Housing Estate 13.1 -59.6 P PPL BB BB 08 0 30 America/Barbados 2012-01-18 3373627 Pilgrim Road Pilgrim Road 13.06667 -59.5 P PPL BB 01 0 19 America/Barbados 1993-12-22 3373628 Pilgrim Place Pilgrim Place 13.06667 -59.51667 P PPL BB 01 0 23 America/Barbados 1993-12-22 3373629 Pie Corner Pie Corner 13.3 -59.58333 P PPL BB 07 0 63 America/Barbados 1993-12-22 3373630 Pickerings Pickerings 13.3 -59.6 P PPL BB 07 0 121 America/Barbados 1993-12-22 3373631 Pennyhole Rock Pennyhole Rock 13.08333 -59.46667 T RK BB 10 0 26 America/Barbados 1993-12-22 3373632 Pelican Island Pelican Island Pelican Island,Pelican Islet 13.1 -59.63333 T ISL BB BB 08 0 1 America/Barbados 2012-01-18 3373633 Pegwell Pegwell 13.05 -59.53333 P PPL BB 01 0 13 America/Barbados 1993-12-22 3373634 Paynes Bay Paynes Bay Paynes Bay 13.16667 -59.63333 H BAY BB 08 0 25 America/Barbados 2012-01-18 3373635 Paul’s Point Paul's Point 13.3 -59.56667 T PT BB 07 0 -9999 America/Barbados 1993-12-22 3373636 Parish Land Parish Land 13.06667 -59.51667 P PPL BB 01 0 23 America/Barbados 1993-12-22 3373637 Paragon Paragon 13.06667 -59.48333 P PPL BB 01 0 1 America/Barbados 1993-12-22 3373638 Palmetto Bay Palmetto Bay 13.13333 -59.41667 H COVE BB 10 0 -9999 America/Barbados 1993-12-22 3373639 Palmers Palmers 13.15 -59.46667 P PPL BB 05 0 88 America/Barbados 1993-12-22 3373640 Padmore Padmore 13.11667 -59.46667 P PPL BB 10 0 29 America/Barbados 1993-12-22 3373641 Packers Packers 13.08333 -59.5 P PPL BB 01 0 70 America/Barbados 1993-12-22 3373642 Oxnards Oxnards 13.13333 -59.61667 P PPL BB 08 0 84 America/Barbados 1993-12-22 3373643 Oxford Oxford 13.28333 -59.6 P PPL BB 09 0 153 America/Barbados 1993-12-22 3373644 Oxford Oxford 13.26667 -59.6 L LCTY BB 00 0 198 America/Barbados 1993-12-22 3373645 Oughtersons Oughtersons 13.13333 -59.46667 P PPL BB 10 0 54 America/Barbados 1993-12-22 3373646 Orange Hill Orange Hill 13.25 -59.6 P PPLL BB 09 0 246 America/Barbados 1993-12-22 3373647 Orange Hill Orange Hill 13.2 -59.6 P PPL BB 04 0 218 America/Barbados 1993-12-22 3373648 Oliver’s Cave Oliver's Cave 13.08333 -59.45 H COVE BB 10 0 -9999 America/Barbados 1993-12-22 3373649 Old Post Office Old Post Office 13.15 -59.55 P PPL BB 03 0 212 America/Barbados 1993-12-22 3373650 Oldbury Oldbury 13.08333 -59.46667 P PPL BB 10 0 26 America/Barbados 1993-12-22 3373651 Oistins Bay Oistins Bay Oistin Bay,Oistins Bay 13.05 -59.55 H BAY BB BB 01 0 1 America/Barbados 2012-01-18 3373652 Oistins Oistins Oistin's Town,Oistins,Oistin’s Town 13.06667 -59.53333 P PPLA BB 01 2285 48 America/Barbados 2013-06-26 3373653 Ocean City Ocean City 13.08333 -59.38333 P PPL BB 10 0 -9999 America/Barbados 1993-12-22 3373654 North Point North Point 13.33333 -59.6 T PT BB 07 0 -9999 America/Barbados 1993-12-22 3373655 Norse’s Bay Norse's Bay 13.3 -59.63333 H BAY BB 07 0 52 America/Barbados 1993-12-22 3373656 Newton Terrace Newton Terrace 13.06667 -59.53333 P PPL BB 01 0 48 America/Barbados 1993-12-22 3373657 New Orleans New Orleans 13.1 -59.61667 P PPLX BB 08 0 10 America/Barbados 1993-12-22 3373658 New Fall Cliff New Fall Cliff 13.08333 -59.45 T CLF BB 10 0 -9999 America/Barbados 1993-12-22 3373659 Newcastle Newcastle 13.2 -59.48333 P PPL BB 05 0 -9999 America/Barbados 1993-12-22 3373660 Newcastle Newcastle 13.18333 -59.48333 S HSE BB 05 0 104 America/Barbados 1993-12-22 3373661 Newbury Newbury 13.13333 -59.55 P PPL BB 03 0 117 America/Barbados 1993-12-22 3373662 Nesfield Nesfield 13.28333 -59.61667 P PPL BB 07 0 118 America/Barbados 1993-12-22 3373663 Neils Neils 13.11667 -59.56667 P PPL BB 08 0 47 America/Barbados 1993-12-22 3373664 Needham's Point Needham's Point Needham Point 13.07935 -59.61229 T PT BB BB 08 0 6 America/Barbados 2010-02-01 3373665 Navy Gardens Navy Gardens 13.06667 -59.58333 P PPL BB 01 0 5 America/Barbados 1993-12-22 3373666 Nan’s Bay Nan's Bay 13.3 -59.58333 H COVE BB 07 0 63 America/Barbados 1993-12-22 3373667 Mullins Bay Mullins Bay 13.21667 -59.63333 H BAY BB 09 0 63 America/Barbados 1993-12-22 3373668 Mullins Mullins 13.21667 -59.63333 P PPL BB 09 0 63 America/Barbados 1993-12-22 3373669 Mount Wilton Mount Wilton 13.18333 -59.56667 P PPL BB 11 0 267 America/Barbados 1993-12-22 3373670 Mount View Mount View 13.3 -59.6 P PPL BB 07 0 121 America/Barbados 1993-12-22 3373671 Mount Stepney Mount Stepney 13.26667 -59.58333 P PPL BB 09 0 139 America/Barbados 1993-12-22 3373672 Mount Standfast Mount Standfast 13.2 -59.63333 P PPL BB 04 0 41 America/Barbados 1993-12-22 3373673 Mount Royer Mount Royer 13.3 -59.61667 P PPLL BB 07 0 84 America/Barbados 1993-12-22 3373674 Mount Pleasant Mount Pleasant 13.3 -59.6 P PPL BB 07 0 121 America/Barbados 1993-12-22 3373675 Mount Pleasant Mount Pleasant 13.15 -59.46667 P PPL BB 10 0 88 America/Barbados 1993-12-22 3373676 Mount Gay Mount Gay 13.3 -59.6 P PPL BB 07 0 121 America/Barbados 1993-12-22 3373677 Mount Friendship Mount Friendship 13.1 -59.58333 P PPL BB 08 0 66 America/Barbados 1993-12-22 3373678 Mount Brevitor Mount Brevitor 13.26667 -59.6 P PPL BB 09 0 198 America/Barbados 1993-12-22 3373679 Mount Mount 13.13333 -59.53333 P PPL BB 03 0 138 America/Barbados 1993-12-22 3373680 Mother’s Day Bay Mother's Day Bay 13.28333 -59.65 H COVE BB 07 0 9 America/Barbados 1993-12-22 3373681 Morgan Lewis Beach Morgan Lewis Beach 13.26667 -59.56667 T BCH BB 02 0 10 America/Barbados 1993-12-22 3373682 Morgan Lewis Morgan Lewis 13.26667 -59.56667 P PPL BB 09 0 10 America/Barbados 1993-12-22 3373683 Moores Moores 13.16667 -59.5 P PPL BB 05 0 188 America/Barbados 1993-12-22 3373684 Moore Hill Moore Hill 13.26667 -59.58333 P PPL BB 09 0 139 America/Barbados 1993-12-22 3373685 Moonshine Hall Moonshine Hall 13.15 -59.55 P PPL BB 03 0 212 America/Barbados 1993-12-22 3373686 Montrose Montrose 13.06667 -59.53333 P PPL BB 01 0 48 America/Barbados 1993-12-22 3373687 Molyneux Molyneux 13.18333 -59.61667 P PPL BB 04 0 116 America/Barbados 1993-12-22 3373688 Mount Misery Mount Misery 13.2 -59.58333 T MT BB 11 0 259 America/Barbados 1993-12-22 3373689 Mile and a Quarter Mile and a Quarter 13.25 -59.61667 P PPL BB 09 0 158 America/Barbados 1993-12-22 3373690 Middle Bay Middle Bay 13.31667 -59.6 H COVE BB 07 0 27 America/Barbados 1993-12-22 3373691 Merricks Merricks 13.13333 -59.41667 P PPL BB 10 0 -9999 America/Barbados 1993-12-22 3373692 Melvin Hill Melvin Hill 13.2 -59.55 P PPL BB 06 0 215 America/Barbados 1993-12-22 3373693 Melverton Melverton 13.13333 -59.51667 P PPL BB 03 0 123 America/Barbados 1993-12-22 3373694 Maynards Maynards 13.25 -59.63333 P PPL BB 09 0 50 America/Barbados 1993-12-22 3373695 Maycock’s Bay Maycock's Bay Maycock's Bay,Maycock’s Bay 13.3 -59.65 H BAY BB 07 0 0 America/Barbados 2012-01-18 3373696 Maycock Maycock 13.28333 -59.63333 P PPL BB 07 0 56 America/Barbados 1993-12-22 3373697 Maxwell Hill Maxwell Hill Maxwell,Maxwell Hill 13.06667 -59.56667 P PPL BB BB 01 0 8 America/Barbados 2012-01-18 3373698 Maxwell Coast Maxwell Coast 13.06667 -59.55 P PPL BB 01 0 10 America/Barbados 1993-12-22 3373699 Maxwell Coast Maxwell Coast 13.06667 -59.55 T BCH BB 01 0 10 America/Barbados 1993-12-22 3373700 Maxwell Maxwell 13.06667 -59.55 P PPL BB 01 0 10 America/Barbados 1993-12-22 3373701 Massiah Street Massiah Street Massiah Street,Rosegate 13.16667 -59.48333 P PPL BB BB 05 0 193 America/Barbados 2012-01-18 3373702 Martins Bay Martins Bay 13.18333 -59.48333 P PPL BB 05 0 104 America/Barbados 1993-12-22 3373703 Marley Vale Marley Vale 13.15 -59.43333 P PPL BB 10 0 32 America/Barbados 1993-12-22 3373704 Market Hill Market Hill 13.15 -59.55 P PPL BB 03 0 212 America/Barbados 1993-12-22 3373705 Marine Gardens Marine Gardens 13.06667 -59.58333 P PPL BB 01 0 5 America/Barbados 1993-12-22 3373706 Marchfield Marchfield Marchfield 13.11667 -59.46667 P PPL BB 10 0 29 America/Barbados 2012-01-18 3373707 Mapp Hill Mapp Hill 13.1 -59.56667 P PPL BB 08 0 108 America/Barbados 1993-12-22 3373708 Mangrove Mangrove 13.23333 -59.6 P PPL BB 02 0 258 America/Barbados 1993-12-22 3373709 Mangrove Mangrove 13.08333 -59.46667 P PPL BB 10 0 26 America/Barbados 1993-12-22 3373710 Malvern Malvern 13.1942 -59.52066 P PPL BB 06 0 266 America/Barbados 2014-07-18 3373711 Lynches Lynches 13.31667 -59.6 T PT BB 07 0 27 America/Barbados 1993-12-22 3373712 Luke Hill Luke Hill 13.26667 -59.61667 P PPLL BB 07 0 128 America/Barbados 1993-12-22 3373713 Lucas Street Lucas Street 13.11667 -59.45 P PPL BB 10 0 22 America/Barbados 1993-12-22 3373714 Lowthers Lowthers 13.08333 -59.48333 P PPL BB 01 0 60 America/Barbados 1993-12-22 3373715 Lowland Lowland 13.3 -59.6 P PPL BB 07 0 121 America/Barbados 1993-12-22 3373716 Lowland Lowland Lowland,Lowlands 13.08333 -59.51667 P PPL BB BB 01 0 87 America/Barbados 2012-01-18 3373717 Lower Greys Lower Greys 13.1 -59.51667 P PPL BB 01 0 89 America/Barbados 1993-12-22 3373718 Lower Estate Lower Estate 13.13333 -59.56667 P PPL BB 03 0 86 America/Barbados 1993-12-22 3373719 Lower Carlton Lower Carlton 13.21667 -59.65 P PPL BB 04 0 1 America/Barbados 1993-12-22 3373720 Lower Birneys Lower Birneys 13.1 -59.56667 P PPL BB 08 0 108 America/Barbados 1993-12-22 3373721 Long Pond Long Pond 13.25 -59.55 H INLT BB 02 0 1 America/Barbados 1993-12-22 3373722 Long Bay Long Bay 13.13333 -59.43333 H BGHT BB 10 0 27 America/Barbados 1993-12-22 3373723 Long Bay Long Bay 13.06667 -59.48333 H BAY BB 01 0 1 America/Barbados 1993-12-22 3373724 Lodge Road Lodge Road 13.06667 -59.53333 P PPL BB 01 0 48 America/Barbados 1993-12-22 3373725 Locust Hall Locust Hall 13.15 -59.56667 P PPL BB 03 0 166 America/Barbados 1993-12-22 3373726 Littlegood Harbour Littlegood Harbour 13.26667 -59.63333 H HBR BB 09 0 51 America/Barbados 1993-12-22 3373727 Little Bay Little Bay 13.3 -59.58333 H COVE BB 07 0 63 America/Barbados 1993-12-22 3373728 Little Bay Little Bay 13.03333 -59.51667 H COVE BB 01 0 -9999 America/Barbados 1993-12-22 3373729 Little Battaleys Little Battaleys 13.23333 -59.63333 P PPL BB 09 0 56 America/Barbados 1993-12-22 3373730 Litchfield Litchfield 13.25 -59.63333 P PPL BB 09 0 50 America/Barbados 1993-12-22 3373731 Lion Castle Tenantry Lion Castle Tenantry 13.18333 -59.58333 P PPL BB 11 0 262 America/Barbados 1993-12-22 3373732 Lion Lion 13.13333 -59.55 P PPL BB 03 0 117 America/Barbados 1993-12-22 3373733 Less Beholden Less Beholden 13.21667 -59.55 P PPLL BB 02 0 269 America/Barbados 1993-12-22 3373734 Lemon Arbour Lemon Arbour 13.16667 -59.53333 P PPL BB 05 0 249 America/Barbados 1993-12-22 3373735 Lears Lears 13.15 -59.58333 P PPL BB 11 0 105 America/Barbados 1993-12-22 3373736 Lead Vale Lead Vale 13.08333 -59.5 P PPL BB 01 0 70 America/Barbados 1993-12-22 3373737 Lazaretto Lazaretto 13.13333 -59.61667 P PPL BB 04 0 84 America/Barbados 1993-12-22 3373738 Laycock Bay Laycock Bay 13.3 -59.58333 H COVE BB 07 0 63 America/Barbados 1993-12-22 3373739 Lascelles Lascelles 13.18333 -59.63333 P PPL BB 04 0 18 America/Barbados 1993-12-22 3373740 The Landlock The Landlock 13.3 -59.58333 H COVE BB 07 0 63 America/Barbados 1993-12-22 3373741 Lancaster Lancaster 13.2 -59.61667 P PPL BB 04 0 126 America/Barbados 1993-12-22 3373742 Lamberts Lamberts Lamberts 13.3 -59.58333 P PPL BB 07 0 63 America/Barbados 2012-01-18 3373743 Lambert Point Lambert Point 13.31667 -59.63333 T PT BB 07 0 41 America/Barbados 1993-12-22 3373744 Lakes Beach Lakes Beach 13.23333 -59.55 T BCH BB 02 0 64 America/Barbados 1993-12-22 3373745 Lakes Lakes 13.23333 -59.55 P PPL BB 02 0 64 America/Barbados 1993-12-22 3373746 Ladder Bay Ladder Bay 13.31667 -59.6 H BAY BB 07 0 27 America/Barbados 1993-12-22 3373747 Kitridge Point Kitridge Point Kitridge Point,Kittridge Point 13.15 -59.41667 T PT BB BB 10 0 1 America/Barbados 2012-01-18 3373748 Kitridge Bay Kitridge Bay 13.15 -59.41667 H BAY BB 10 0 1 America/Barbados 1993-12-22 3373749 Kirtons Kirtons 13.1 -59.45 P PPL BB 10 0 -9999 America/Barbados 1993-12-22 3373750 Kingsland Kingsland 13.08333 -59.53333 P PPL BB 01 0 88 America/Barbados 1993-12-22 3373751 King’s Bay King's Bay 13.3 -59.58333 H COVE BB 07 0 63 America/Barbados 1993-12-22 3373752 Kent Kent 13.08333 -59.55 P PPL BB 01 0 79 America/Barbados 1993-12-22 3373753 Kendal Point Kendal Point 13.05 -59.53333 T PT BB 01 0 13 America/Barbados 1993-12-22 3373754 Kendal Hill Kendal Hill 13.06667 -59.55 P PPL BB 01 0 10 America/Barbados 1993-12-22 3373755 Kendal Factory Kendal Factory 13.15 -59.5 P PPL BB 05 0 151 America/Barbados 1993-12-22 3373756 Kendal Kendal 13.15 -59.5 P PPL BB 05 0 151 America/Barbados 1993-12-22 3373757 Kelzer Hill Kelzer Hill 13.05 -59.53333 P PPL BB 01 0 13 America/Barbados 1993-12-22 3373758 Josey Hill Josey Hill 13.3 -59.6 P PPL BB 07 0 121 America/Barbados 1993-12-22 3373759 Jordans Cowpen Jordans Cowpen 13.31667 -59.61667 H COVE BB 07 0 38 America/Barbados 1993-12-22 3373760 Jordans Jordans Jordan,Jordans 13.13333 -59.55 P PPL BB BB 03 0 117 America/Barbados 2012-01-18 3373761 Jones Bay Jones Bay 13.31667 -59.58333 H COVE BB 07 0 -9999 America/Barbados 1993-12-22 3373762 Joes River Joes River 13.21667 -59.53333 P PPL BB 06 0 64 America/Barbados 1993-12-22 3373763 Jezreel Jezreel 13.11667 -59.45 P PPL BB 10 0 22 America/Barbados 1993-12-22 3373764 Jerusalem Agricultural Station Jerusalem Agricultural Station 13.25 -59.61667 S AGRF BB 09 0 158 America/Barbados 1993-12-22 3373765 Jericho Jericho 13.15 -59.55 P PPL BB 03 0 212 America/Barbados 1993-12-22 3373766 Jemmotts Jemmotts 13.3 -59.6 P PPL BB 07 0 121 America/Barbados 1993-12-22 3373767 Jamestown Park Jamestown Park 13.18333 -59.63333 P PPL BB 04 0 18 America/Barbados 1993-12-22 3373768 Jackson Jackson 13.15 -59.6 P PPL BB 08 0 107 America/Barbados 1993-12-22 3373769 Jackmans Jackmans 13.13333 -59.58333 P PPL BB 08 0 69 America/Barbados 1993-12-22 3373770 Industry Hall Industry Hall 13.15 -59.43333 P PPL BB 10 0 32 America/Barbados 1993-12-22 3373771 Indian River Indian River Indian River 13.1 -59.61667 H STM BB 08 0 10 America/Barbados 2012-01-18 3373772 Indian Ground Indian Ground 13.25 -59.6 P PPL BB 02 0 246 America/Barbados 1993-12-22 3373773 Inch Marlowe Swamp Inch Marlowe Swamp 13.05 -59.5 H SWMP BB 01 0 -9999 America/Barbados 1993-12-22 3373774 Inch Marlowe Point Inch Marlowe Point 13.05 -59.5 T PT BB 01 0 -9999 America/Barbados 1993-12-22 3373775 Inch Marlowe Inch Marlowe 13.06667 -59.5 P PPL BB 01 0 19 America/Barbados 1993-12-22 3373776 Husbands Husbands 13.28333 -59.65 P PPL BB 07 0 9 America/Barbados 1993-12-22 3373777 Husbands Husbands 13.15 -59.61667 P PPL BB 04 0 117 America/Barbados 1993-12-22 3373778 Hoytes Hoytes 13.21667 -59.56667 P PPL BB 02 0 78 America/Barbados 1993-12-22 3373779 Hoytes Hoytes 13.15 -59.61667 P PPL BB 04 0 117 America/Barbados 1993-12-22 3373780 Howells Howells 13.1 -59.58333 P PPL BB 08 0 66 America/Barbados 1993-12-22 3373781 Hothersal Turning Hothersal Turning 13.11667 -59.58333 P PPL BB 01 0 43 America/Barbados 1993-12-22 3373782 Hothersal Hothersal 13.18333 -59.5 P PPL BB 05 0 251 America/Barbados 1993-12-22 3373783 Horse Shoe Bay Horse Shoe Bay 13.31667 -59.61667 H COVE BB 07 0 38 America/Barbados 1993-12-22 3373784 Horse Hill Horse Hill 13.2 -59.53333 T HLL BB 06 0 324 America/Barbados 1993-12-22 3373785 The Horse The Horse 13.1 -59.43333 T PT BB 10 0 -9999 America/Barbados 1993-12-22 3373786 Hopewell Hopewell Hopewell 13.16667 -59.58333 P PPL BB 11 0 159 America/Barbados 2013-04-04 3373787 Hopewell Hopewell Hopewell 13.05 -59.51667 P PPL BB 01 0 13 America/Barbados 2013-04-04 3373788 Hopeland Hopeland 13.1 -59.45 P PPL BB 10 0 -9999 America/Barbados 1993-12-22 3373789 Hope Hope 13.31667 -59.6 P PPL BB 07 0 27 America/Barbados 1993-12-22 3373790 Holetown Holetown Holetown,The Hole 13.18672 -59.63808 P PPLA BB 04 1350 -1 America/Barbados 2012-01-16 3373791 Holders Holders 13.16667 -59.61667 P PPL BB 04 0 120 America/Barbados 1993-12-22 3373792 Hillcrest Hillcrest 13.21028 -59.52307 P PPL BB 06 0 13 America/Barbados 2014-07-18 3373793 Mount Hillaby Mount Hillaby 13.2 -59.58 T MT BB 02 0 340 220 America/Barbados 2006-01-17 3373794 Hillaby Hillaby Hillaby,Mount Hillaby 13.21667 -59.58333 P PPL BB 00 519 196 America/Barbados 2012-01-18 3373795 The Hill The Hill 13.23333 -59.6 T HLL BB 02 0 258 America/Barbados 1993-12-22 3373796 Highland Highland 13.1 -59.48333 P PPL BB 10 0 42 America/Barbados 1993-12-22 3373797 Highgate Highgate Highgate,Highgate House 13.08333 -59.58333 P PPL BB BB 08 0 38 America/Barbados 2012-01-18 3373798 Heywoods Beach Heywoods Beach 13.25 -59.63333 T BCH BB 09 0 50 America/Barbados 1993-12-22 3373799 Heywoods Heywoods Heywoods,Heywoods Village 13.25 -59.65 P PPL BB BB 09 0 1 America/Barbados 2012-01-18 3373800 Henrys Henrys 13.08333 -59.6 P PPLX BB 08 0 23 America/Barbados 1993-12-22 3373801 Henley Henley 13.15 -59.51667 P PPL BB 05 0 156 America/Barbados 1993-12-22 3373802 Heddings Heddings 13.1 -59.45 P PPL BB 10 0 -9999 America/Barbados 1993-12-22 3373803 Haynesville Haynesville 13.15 -59.61667 P PPL BB 04 0 117 America/Barbados 1993-12-22 3373804 Haymans Factory Haymans Factory 13.25 -59.61667 P PPL BB 09 0 158 America/Barbados 1993-12-22 3373805 Hastings Hastings 13.07513 -59.59688 P PPL BB 01 0 9 America/Barbados 2008-01-10 3373806 Harrow Harrow 13.13333 -59.46667 P PPL BB 10 0 54 America/Barbados 1993-12-22 3373807 Harrisons Harrisons 13.3 -59.63333 P PPL BB 07 0 52 America/Barbados 1993-12-22 3373808 Harrison Reefs Harrison Reefs 13.31667 -59.66667 H RF BB 00 0 -9999 America/Barbados 1993-12-22 3373809 Harrison Point Harrison Point 13.3 -59.65 T PT BB 07 0 0 America/Barbados 1993-12-22 3373810 Harrismith Harrismith 13.11667 -59.41667 P PPL BB 10 0 -9999 America/Barbados 1993-12-22 3373811 Harris Harris 13.3 -59.61667 P PPL BB 07 0 84 America/Barbados 1993-12-22 3373812 Hanson Hanson 13.1 -59.56667 L LCTY BB 03 0 108 America/Barbados 1993-12-22 3373813 Hannays Tenantry Hannays Tenantry 13.1 -59.51667 P PPL BB 01 0 89 America/Barbados 1993-12-22 3373814 Hannays Hannays 13.28333 -59.63333 P PPL BB 07 0 56 America/Barbados 1993-12-22 3373815 Hannays Hannays 13.1 -59.51667 P PPL BB 01 0 89 America/Barbados 1993-12-22 3373816 Hangman’s Bay Hangman's Bay 13.28333 -59.65 H BAY BB 07 0 9 America/Barbados 1993-12-22 3373817 Halton Halton 13.13333 -59.48333 P PPL BB 10 0 78 America/Barbados 1993-12-22 3373818 Half Acre Half Acre 13.28333 -59.61667 P PPLL BB 07 0 118 America/Barbados 1993-12-22 3373819 Haggatt Hall Haggatt Hall 13.1 -59.56667 P PPL BB 08 0 108 America/Barbados 1993-12-22 3373820 Hackletons Cliff Hackletons Cliff Hacklestons Cliff,Hackletons Cliff 13.20164 -59.52521 T CLF BB BB 06 0 208 America/Barbados 2014-07-18 3373821 Gun Hill Gun Hill 13.13333 -59.55 T HLL BB 03 0 117 America/Barbados 1993-12-22 3373822 Guinea Guinea 13.15 -59.48333 P PPL BB 10 0 160 America/Barbados 1993-12-22 3373823 Grove’s Agricultural Station Grove's Agricultural Station Grove's Agricultural Station,Groves,Grove’s Agricultural Station 13.15 -59.55 S AGRF BB 03 0 212 America/Barbados 2012-01-18 3373824 Greshie Bay Greshie Bay 13.3 -59.65 H COVE BB 07 0 0 America/Barbados 1993-12-22 3373825 Gregg Farm Gregg Farm 13.21667 -59.58333 P PPL BB 02 0 196 America/Barbados 1993-12-22 3373826 Greenwich Greenwich 13.18333 -59.61667 P PPL BB 04 0 116 America/Barbados 1993-12-22 3373827 Greens Greens 13.15 -59.51667 P PPL BB 05 0 156 America/Barbados 1993-12-22 3373828 Greenpond Greenpond 13.25 -59.55 H COVE BB 02 0 1 America/Barbados 1993-12-22 3373829 Green Point Green Point 13.31667 -59.63333 T PT BB 07 0 41 America/Barbados 1993-12-22 3373830 Green Point Green Point 13.08333 -59.45 T PT BB 10 0 -9999 America/Barbados 1993-12-22 3373831 Greenland Greenland Greenland 13.25 -59.56667 P PPLA BB 02 623 20 America/Barbados 2013-06-26 3373832 Greenidge Greenidge 13.31667 -59.61667 P PPL BB 07 0 38 America/Barbados 1993-12-22 3373833 Green Hill Green Hill 13.13333 -59.6 P PPL BB 08 0 74 America/Barbados 1993-12-22 3373834 Green Garden Green Garden 13.03333 -59.51667 P PPL BB 01 0 -9999 America/Barbados 1993-12-22 3373835 Great Head Great Head 13.3 -59.65 T PT BB 07 0 0 America/Barbados 1993-12-22 3373836 Grazettes Grazettes 13.13333 -59.6 P PPL BB 08 0 74 America/Barbados 1993-12-22 3373837 Graveyard Graveyard 13.28333 -59.58333 P PPL BB 07 0 199 America/Barbados 1993-12-22 3373838 Grape Hall Grape Hall 13.31667 -59.63333 P PPL BB 07 0 41 America/Barbados 1993-12-22 3373839 Grantley Adams International Airport Grantley Adams International Airport Aeroport international Grantley-Adams,Aeroporto Internacional Grantley Adams,Aeroporto di Bridgetown - Grantley Adams,Aeropuerto Internacional Grantley Adams,Aéroport international Grantley-Adams,BGI,Bandar Udara Internasional Grantley Adams,Flughafen Bridgetown Grantley Adams,Grantley Adams Airport,Grantley Adams nemzetkoezi repueloter,Grantley Adams nemzetközi repülőtér,Grantley Adams tarptautinis oro uostas,Internacia Flughaveno Grantley Adams,Port lotniczy Grantley Adams,San bay quoc te Grantley Adams,Seawell Airport,Seawell International Airport,Sân bay quốc tế Grantley Adams,TBPB,bu li qi dui guo ji ji chang,Фурудгоҳи бин‌алмилалӣ гронтли одмз,فرودگاه بین‌المللی گرانتلی ادمز,グラントレー・アダムス国際空港,布里奇敦國際機場 13.0746 -59.49246 S AIRP BB BB 01 0 51 55 America/Barbados 2007-01-03 3373840 Granny’s Bay Granny's Bay 13.31667 -59.63333 H COVE BB 07 0 41 America/Barbados 1993-12-22 3373841 Grand View Grand View 13.16667 -59.6 L LCTY BB 11 0 214 America/Barbados 1993-12-22 3373842 Graeme Hall Swamp Graeme Hall Swamp 13.06667 -59.56667 H SWMP BB 01 0 8 America/Barbados 1993-12-22 3373843 Graeme Hall Graeme Hall Graeme Hall,Groeme Hall 13.08333 -59.56667 P PPL BB BB 01 0 68 America/Barbados 2012-01-18 3373844 Gouldings Green Gouldings Green 13.31667 -59.61667 T PT BB 07 0 38 America/Barbados 1993-12-22 3373845 Goodland Goodland 13.05 -59.51667 P PPL BB 01 0 13 America/Barbados 1993-12-22 3373846 Good Intene Good Intene 13.11667 -59.53333 P PPL BB 03 0 48 America/Barbados 1993-12-22 3373847 Golden Ridge Golden Ridge 13.16667 -59.55 P PPL BB 03 0 216 America/Barbados 1993-12-22 3373848 Golden Grove Golden Grove Golden Grove,Lewis Vale 13.15 -59.45 P PPL BB BB 10 0 46 America/Barbados 2012-01-18 3373849 Godings Bay Godings Bay 13.23333 -59.63333 H BAY BB 09 0 56 America/Barbados 1993-12-22 3373850 Goat House Bay Goat House Bay 13.3 -59.58333 H COVE BB 07 0 63 America/Barbados 1993-12-22 3373851 Glebe Land Glebe Land 13.16667 -59.48333 P PPL BB 05 0 193 America/Barbados 1993-12-22 3373852 Mount Gilboa Mount Gilboa 13.28333 -59.61667 T HLL BB 07 0 118 America/Barbados 1993-12-22 3373853 Gibbons Boggs Gibbons Boggs 13.05 -59.53333 P PPL BB 01 0 13 America/Barbados 1993-12-22 3373854 Gibbons Gibbons 13.05 -59.51667 P PPLL BB 01 0 13 America/Barbados 1993-12-22 3373855 Gibbons Gibbons 13.06667 -59.53333 S EST BB 01 0 48 America/Barbados 1993-12-22 3373856 Gibbes Bay Gibbes Bay 13.21667 -59.63333 H BAY BB 09 0 63 America/Barbados 1993-12-22 3373857 Gibbs Gibbs Gibbes,Gibbs 13.22963 -59.63782 P PPL BB BB 09 0 29 America/Barbados 2012-07-25 3373858 Gent’s Bay Gent's Bay 13.31667 -59.61667 H COVE BB 07 0 38 America/Barbados 1993-12-22 3373859 Gemswick Gemswick 13.06667 -59.46667 P PPL BB 10 0 -9999 America/Barbados 1993-12-22 3373860 Gay’s Cove Gay's Cove 13.3 -59.56667 H COVE BB 07 0 -9999 America/Barbados 1993-12-22 3373861 Gays Gays 13.28333 -59.58333 P PPL BB 09 0 199 America/Barbados 1993-12-22 3373862 Garrison Garrison 13.06667 -59.6 P PPL BB 08 0 1 America/Barbados 1993-12-22 3373863 Gall Hill Gall Hill 13.06667 -59.53333 P PPL BB 01 0 48 America/Barbados 1993-12-22 3373864 Fustic Fustic 13.26667 -59.63333 P PPL BB 07 0 51 America/Barbados 1993-12-22 3373865 Fryer’s Well Point Fryer's Well Point 13.26667 -59.65 T PT BB 07 0 1 America/Barbados 1993-12-22 3373866 Fruitful Hill Fruitful Hill 13.2 -59.56667 P PPL BB 06 0 296 America/Barbados 1993-12-22 3373867 Frizers Frizers Frazers,Frizers 13.21667 -59.53333 P PPL BB BB 06 0 64 America/Barbados 2012-01-18 3373868 Friendship Terrace Friendship Terrace 13.13333 -59.6 P PPL BB 08 0 74 America/Barbados 1993-12-22 3373869 Friendship Friendship 13.3 -59.63333 P PPL BB 07 0 52 America/Barbados 1993-12-22 3373870 Freshwater Bay Freshwater Bay Freshwater Bay 13.13333 -59.61667 H BAY BB 08 0 84 America/Barbados 2012-01-18 3373871 Frere Pilgrim Frere Pilgrim 13.1 -59.53333 P PPL BB 01 0 105 America/Barbados 1993-12-22 3373872 French French 13.25 -59.6 P PPL BB 09 0 246 America/Barbados 1993-12-22 3373873 Free Hill Free Hill 13.3 -59.6 P PPL BB 07 0 121 America/Barbados 1993-12-22 3373874 Free Hill Free Hill 13.13333 -59.55 P PPL BB 03 0 117 America/Barbados 1993-12-22 3373875 Four Winds Four Winds 13.21667 -59.63333 P PPL BB 09 0 63 America/Barbados 1993-12-22 3373876 Four Roads Four Roads 13.1 -59.46667 P PPL BB 10 0 45 America/Barbados 1993-12-22 3373877 Four Cross Roads Four Cross Roads 13.16667 -59.51667 P PPLA BB 05 0 203 America/Barbados 2013-05-05 3373878 Foul Bay Foul Bay 13.08333 -59.45 P PPL BB 10 0 -9999 America/Barbados 1993-12-22 3373879 Foul Bay Foul Bay Foul Bay 13.1 -59.45 H BAY BB 10 0 -9999 America/Barbados 2012-01-18 3373880 Fosters Fosters 13.28333 -59.63333 P PPL BB 07 0 56 America/Barbados 1993-12-22 3373881 Foster Hall Foster Hall 13.2 -59.5 P PPL BB 06 0 27 America/Barbados 1993-12-22 3373882 Foster Hall Foster Hall 13.11667 -59.56667 P PPL BB 03 0 47 America/Barbados 1993-12-22 3373883 Fortescue Fortescue 13.16667 -59.45 P PPLL BB 10 0 36 America/Barbados 1993-12-22 3373884 Folkestone Park Folkestone Park 13.18333 -59.63333 P PPL BB 04 0 18 America/Barbados 1993-12-22 3373885 Flat Rock Flat Rock 13.15 -59.56667 P PPL BB 03 0 166 America/Barbados 1993-12-22 3373886 Flatfield Flatfield 13.31667 -59.6 P PPL BB 07 0 27 America/Barbados 1993-12-22 3373887 Fitts Fitts 13.13333 -59.63333 P PPL BB 04 0 27 America/Barbados 1993-12-22 3373888 Fisher Pond Fisher Pond 13.16667 -59.55 P PPL BB 11 0 216 America/Barbados 1993-12-22 3373889 Farm Road Farm Road 13.23333 -59.63333 P PPL BB 09 0 56 America/Barbados 1993-12-22 3373890 Farmers Farmers 13.2 -59.58333 P PPL BB 11 0 259 America/Barbados 1993-12-22 3373891 Farley Hill Farley Hill 13.26667 -59.58333 T HLL BB 02 0 139 America/Barbados 1993-12-22 3373892 Fairy Valley Rock Fairy Valley Rock 13.05 -59.5 P PPL BB 01 0 -9999 America/Barbados 1993-12-22 3373893 Fairy Valley Fairy Valley 13.06667 -59.5 P PPL BB 01 0 19 America/Barbados 1993-12-22 3373894 Fair View Fair View 13.15 -59.53333 P PPL BB 03 0 197 America/Barbados 1993-12-22 3373895 Fairview Fairview 13.08333 -59.5 P PPL BB 01 0 70 America/Barbados 1993-12-22 3373896 Fairfield Fairfield 13.11667 -59.6 P PPLX BB 08 0 53 America/Barbados 1993-12-22 3373897 Fairfield Fairfield 13.3 -59.61667 P PPL BB 07 0 84 America/Barbados 1993-12-22 3373898 Exchange Exchange 13.15 -59.58333 P PPL BB 04 0 105 America/Barbados 1993-12-22 3373899 Enterprise Enterprise 13.05 -59.53333 P PPL BB 01 0 13 America/Barbados 1993-12-22 3373900 Endeavour Endeavour 13.2 -59.6 P PPL BB 04 0 218 America/Barbados 1993-12-22 3373901 Endeavour Endeavour 13.16667 -59.58333 P PPL BB 11 0 159 America/Barbados 1993-12-22 3373902 Ellis Castle Ellis Castle 13.28333 -59.58333 P PPL BB 09 0 199 America/Barbados 1993-12-22 3373903 Ellesmere Ellesmere 13.15 -59.53333 P PPL BB 03 0 197 America/Barbados 1993-12-22 3373904 Ellerton Ellerton 13.13333 -59.55 P PPL BB 03 0 117 America/Barbados 1993-12-22 3373905 Elizabeth Park Elizabeth Park 13.08333 -59.56667 P PPL BB 01 0 68 America/Barbados 1993-12-22 3373906 Edge Hill Edge Hill 13.15 -59.6 P PPL BB 04 0 107 America/Barbados 1993-12-22 3373907 Edgecumbe Edgecumbe 13.11667 -59.5 P PPL BB 00 0 39 America/Barbados 1993-12-22 3373908 Edey Edey 13.08333 -59.53333 P PPL BB 01 0 88 America/Barbados 1993-12-22 3373909 Eden Lodge Eden Lodge 13.13333 -59.6 P PPL BB 08 0 74 America/Barbados 1993-12-22 3373910 Ebworth Ebworth 13.26667 -59.61667 P PPLL BB 09 0 128 America/Barbados 1993-12-22 3373911 Ebenezer Ebenezer 13.11667 -59.5 P PPL BB 10 0 39 America/Barbados 1993-12-22 3373912 Easy Hall Easy Hall 13.2 -59.53333 P PPL BB 06 0 324 America/Barbados 1993-12-22 3373913 East Point Lighthouse East Point Lighthouse 13.15 -59.41667 S LTHSE BB 10 0 1 America/Barbados 1993-12-22 3373914 East Lynne East Lynne 13.11667 -59.53333 P PPL BB 01 0 48 America/Barbados 1993-12-22 3373915 Eastbourne Eastbourne 13.11667 -59.43333 P PPL BB 10 0 -9999 America/Barbados 1993-12-22 3373916 Ealing Park Ealing Park 13.03333 -59.5 P PPL BB 01 0 -9999 America/Barbados 1993-12-22 3373917 Ealing Grove Ealing Grove 13.05 -59.5 P PPL BB 01 0 -9999 America/Barbados 1993-12-22 3373918 Durham Durham 13.3 -59.6 P PPL BB 07 0 121 America/Barbados 1993-12-22 3373919 Durants Durants 13.15 -59.63333 P PPL BB 04 0 56 America/Barbados 1993-12-22 3373920 Durants Durants Durant,Durants 13.08333 -59.53333 P PPL BB BB 01 0 88 America/Barbados 2012-01-18 3373921 Dunscombe Dunscombe 13.2 -59.58333 P PPL BB 11 0 259 America/Barbados 1993-12-22 3373922 Dukes Dukes 13.18333 -59.58333 P PPL BB 11 0 262 America/Barbados 1993-12-22 3373923 Drax Hill Green Drax Hill Green 13.15 -59.53333 P PPL BB 03 0 197 America/Barbados 1993-12-22 3373924 Draxhall Woods Draxhall Woods 13.13333 -59.51667 P PPL BB 03 0 123 America/Barbados 1993-12-22 3373925 Drax Hall Jump Drax Hall Jump 13.13333 -59.51667 P PPL BB 03 0 123 America/Barbados 1993-12-22 3373926 Drax Hall Hope Drax Hall Hope 13.13333 -59.53333 P PPL BB 03 0 138 America/Barbados 1993-12-22 3373927 Drax Hall Drax Hall 13.13333 -59.51667 P PPL BB 03 0 123 America/Barbados 1993-12-22 3373928 Dover Dover 13.05 -59.56667 P PPL BB 01 0 -9999 America/Barbados 1993-12-22 3373929 Diamond Valley Diamond Valley 13.1 -59.43333 P PPL BB 10 0 -9999 America/Barbados 1993-12-22 3373930 Diamond Corner Diamond Corner 13.26667 -59.6 P PPL BB 09 0 198 America/Barbados 1993-12-22 3373931 Deebles Point Deebles Point 13.15 -59.41667 T PT BB 10 0 1 America/Barbados 1993-12-22 3373932 Deacons Deacons 13.1 -59.61667 P PPLX BB 08 0 10 America/Barbados 1993-12-22 3373933 Date Tree Hill Date Tree Hill 13.28333 -59.58333 P PPL BB 07 0 199 America/Barbados 1993-12-22 3373934 Dash Valley Dash Valley 13.1 -59.56667 P PPL BB 03 0 108 America/Barbados 1993-12-22 3373935 Cummings Cummings 13.31667 -59.6 H COVE BB 07 0 27 America/Barbados 1993-12-22 3373936 Culpepper Island Culpepper Island Culpepper Island 13.16667 -59.45 T ISL BB 10 0 36 America/Barbados 2012-01-18 3373937 Cuckold Point Cuckold Point Cuckold Point,Cuckolds Point 13.31667 -59.56667 T PT BB BB 07 0 -9999 America/Barbados 2012-01-18 3373938 Creek Bay Creek Bay 13.31667 -59.6 H COVE BB 07 0 27 America/Barbados 1993-12-22 3373939 Crane Hotel Crane Hotel Crane Hotel,Crane View,The Crane 13.1 -59.43333 S RSRT BB BB 10 0 -9999 America/Barbados 2012-01-18 3373940 Crane Beach Crane Beach 13.1 -59.43333 T BCH BB 10 0 -9999 America/Barbados 1993-12-22 3373941 Crane Bay Crane Bay 13.1 -59.45 H COVE BB 10 0 -9999 America/Barbados 1993-12-22 3373942 Crab Hill Crab Hill 13.31667 -59.63333 P PPL BB 07 727 41 America/Barbados 2006-01-17 3373943 Cowpen Rock Cowpen Rock 13.31667 -59.63333 T RK BB 07 0 41 America/Barbados 1993-12-22 3373944 Coverly Coverly 13.08333 -59.48333 P PPL BB 01 0 60 America/Barbados 1993-12-22 3373945 Cove Cove 13.3 -59.56667 P PPLL BB 07 0 -9999 America/Barbados 1993-12-22 3373946 Cotton House Bay Cotton House Bay 13.05 -59.53333 H BAY BB 01 0 13 America/Barbados 1993-12-22 3373947 Cottage Vale Cottage Vale 13.13333 -59.48333 P PPL BB 10 0 78 America/Barbados 1993-12-22 3373948 Cottage Cottage 13.28333 -59.6 P PPLL BB 07 0 153 America/Barbados 1993-12-22 3373949 Cottage Cottage 13.15 -59.55 P PPL BB 03 0 212 America/Barbados 1993-12-22 3373950 Corben’s Bay Corben's Bay 13.28333 -59.56667 H COVE BB 07 0 1 America/Barbados 1993-12-22 3373951 Cookram Rock Cookram Rock 13.28333 -59.65 T RK BB 07 0 9 America/Barbados 1993-12-22 3373952 Content Content 13.3 -59.63333 P PPL BB 07 0 52 America/Barbados 1993-12-22 3373953 Constitution River Constitution River Constitution River 13.1 -59.61667 H STM BB 08 0 10 America/Barbados 2012-01-18 3373954 Constant Constant 13.11667 -59.55 P PPL BB 03 0 53 America/Barbados 1993-12-22 3373955 Conset Point Conset Point 13.18333 -59.46667 T PT BB 05 0 29 America/Barbados 1993-12-22 3373956 Conset Bay Conset Bay Conset Bay,Consets Bay 13.18333 -59.46667 H BAY BB BB 05 0 29 America/Barbados 2012-01-18 3373957 Connell Town Connell Town 13.31667 -59.61667 P PPL BB 07 0 38 America/Barbados 1993-12-22 3373958 Congor Rocks Congor Rocks 13.18333 -59.48333 T RKS BB 05 0 104 America/Barbados 1993-12-22 3373959 Congo Road Congo Road 13.11667 -59.45 P PPL BB 10 0 22 America/Barbados 1993-12-22 3373960 Congor Bay Congor Bay 13.18333 -59.48333 H BAY BB 05 0 104 America/Barbados 1993-12-22 3373961 Collins Collins 13.28333 -59.58333 P PPL BB 09 0 199 America/Barbados 1993-12-22 3373962 Colleton Colleton 13.26667 -59.63333 P PPL BB 09 0 51 America/Barbados 1993-12-22 3373963 Colleton Colleton 13.18333 -59.48333 P PPL BB 05 0 104 America/Barbados 1993-12-22 3373964 College Savannah College Savannah 13.16667 -59.45 P PPL BB 05 0 36 America/Barbados 1993-12-22 3373965 Coles Pasture Coles Pasture 13.15 -59.41667 P PPLL BB 10 0 1 America/Barbados 1993-12-22 3373966 Coles Cave Coles Cave 13.18333 -59.56667 P PPL BB 05 0 267 America/Barbados 1993-12-22 3373967 Coffee Gully Coffee Gully 13.18333 -59.55 P PPL BB 06 0 275 America/Barbados 1993-12-22 3373968 Codrington College Codrington College Codrington,Codrington College 13.18333 -59.46667 P PPL BB BB 05 0 29 America/Barbados 2012-01-18 3373969 Codrington Codrington 13.11667 -59.6 P PPL BB 08 0 53 America/Barbados 1993-12-22 3373970 Coconut Hall Coconut Hall 13.31667 -59.6 P PPL BB 07 0 27 America/Barbados 1993-12-22 3373971 Cobbler’s Rock Cobbler's Rock 13.08333 -59.43333 T RK BB 10 0 -9999 America/Barbados 1993-12-22 3373972 Cobblers Reef Cobblers Reef 13.13333 -59.41667 H RF BB 10 0 -9999 America/Barbados 1993-12-22 3373973 Coach Hill Coach Hill 13.16667 -59.48333 P PPL BB 05 0 193 America/Barbados 1993-12-22 3373974 Cluff’s Bay Cluff's Bay 13.31667 -59.61667 H COVE BB 07 0 38 America/Barbados 1993-12-22 3373975 Cluffs Cluffs 13.33333 -59.61667 P PPL BB 07 0 21 America/Barbados 1993-12-22 3373976 Clifton Hill Clifton Hill 13.16667 -59.56667 P PPL BB 11 0 193 America/Barbados 1993-12-22 3373977 Clifton Hall Clifton Hall 13.2 -59.5 P PPL BB 05 0 27 America/Barbados 1993-12-22 3373978 Cliff Cottage Cliff Cottage 13.16667 -59.48333 P PPL BB 05 0 193 America/Barbados 1993-12-22 3373979 Cliff Cliff 13.15 -59.48333 P PPL BB 05 0 160 America/Barbados 1993-12-22 3373980 Clermont Clermont 13.15 -59.61667 P PPL BB 00 0 117 America/Barbados 1993-12-22 3373981 Cleland Cleland 13.26667 -59.58333 P PPL BB 02 0 139 America/Barbados 1993-12-22 3373982 Clapham Clapham 13.08333 -59.58333 P PPL BB 01 0 38 America/Barbados 1993-12-22 3373983 Church Village Church Village Church Village 13.13333 -59.48333 P PPL BB 10 0 78 America/Barbados 2012-01-18 3373984 Church View Church View 13.18333 -59.48333 P PPL BB 05 0 104 America/Barbados 1993-12-22 3373985 Church Hill Church Hill 13.28333 -59.61667 P PPL BB 07 0 118 America/Barbados 1993-12-22 3373986 Christie Christie 13.16667 -59.6 P PPL BB 11 0 214 America/Barbados 1993-12-22 3373987 Christ Church Ridge Christ Church Ridge 13.08333 -59.53333 T RDGE BB 01 0 88 America/Barbados 1993-12-22 3373988 Christ Church Christ Church Christ Church,Christ Church prestegjeld,Christchurch,Kariah Christ Church,Kraist Tserts,Krajst-Cherch,Krista Kirko,Kristaus baznycios parapija,Kristaus bažnyčios parapija,Parroquia de Christ Church,ji du cheng jiao qu,Κράιστ Τσερτς,Крайст-Черч,基督城教區 13.08333 -59.53333 A ADM1 BB 01 48119 88 America/Barbados 2012-01-16 3373989 The Choyce The Choyce 13.28333 -59.56667 T CAPE BB 02 0 1 America/Barbados 1993-12-22 3373990 Chimborazo Chimborazo 13.2 -59.55 P PPL BB 06 0 215 America/Barbados 1993-12-22 3373991 Cherry Tree Hill Cherry Tree Hill 13.26667 -59.58333 T HLL BB 09 0 139 America/Barbados 1993-12-22 3373992 Cherry Grove Cherry Grove 13.15 -59.51667 P PPL BB 05 0 156 America/Barbados 1993-12-22 3373993 Checker Hall Checker Hall 13.28333 -59.63333 P PPLA BB 07 0 56 America/Barbados 2013-05-05 3373994 Cheapside Cheapside 13.10247 -59.62589 P PPLX BB 08 0 8 America/Barbados 2010-02-01 3373995 The Chase The Chase 13.26667 -59.56667 T BCH BB 02 0 10 America/Barbados 1993-12-22 3373996 Charnocks Charnocks 13.08333 -59.5 P PPL BB 01 0 70 America/Barbados 1993-12-22 3373997 Chapman Chapman 13.18333 -59.56667 P PPL BB 11 0 267 America/Barbados 1993-12-22 3373998 Chandler Bay Chandler Bay 13.3 -59.58333 H COVE BB 07 0 63 America/Barbados 1993-12-22 3373999 Chancery Lane Swamp Chancery Lane Swamp 13.06334 -59.5 H SWMP BB 01 0 5 America/Barbados 2008-01-11 3374000 Chancery Lane Chancery Lane 13.06667 -59.5 P PPL BB 01 0 19 America/Barbados 1993-12-22 3374001 Chance Hall Chance Hall 13.31667 -59.6 P PPL BB 07 0 27 America/Barbados 1993-12-22 3374002 Chalky Mount Chalky Mount Chalky Mount 13.23333 -59.55 T HLL BB 02 0 64 America/Barbados 2012-01-18 3374003 The Chair The Chair 13.15 -59.41667 T PT BB 10 0 1 America/Barbados 1993-12-22 3374004 Cave Hill Cave Hill 13.3 -59.58333 P PPL BB 07 0 63 America/Barbados 1993-12-22 3374005 Cave Hill Cave Hill 13.13333 -59.61667 P PPL BB 08 0 84 America/Barbados 1993-12-22 3374006 Cave Bay Cave Bay Cave Bay 13.11667 -59.41667 H BAY BB 10 0 -9999 America/Barbados 2012-01-18 3374007 Cattlewash Cattlewash 13.21667 -59.53333 P PPL BB 06 0 64 America/Barbados 1993-12-22 3374008 Castle Grant Castle Grant 13.2 -59.55 P PPL BB 06 0 215 America/Barbados 1993-12-22 3374009 Castle Castle 13.26667 -59.6 P PPL BB 09 0 198 America/Barbados 1993-12-22 3374010 Carter Carter 13.18333 -59.5 P PPL BB 05 0 251 America/Barbados 1993-12-22 3374011 Carrington Carrington 13.18333 -59.56667 P PPL BB 11 0 267 America/Barbados 1993-12-22 3374012 Carrington Carrington 13.11667 -59.48333 P PPL BB 10 0 36 America/Barbados 1993-12-22 3374013 Carlton Carlton 13.21667 -59.63333 P PPL BB 04 0 63 America/Barbados 1993-12-22 3374014 Carlisle Bay Carlisle Bay Carlisle Bay 13.08333 -59.61667 H BAY BB 08 0 1 America/Barbados 2012-01-18 3374015 Careenage Careenage 13.1 -59.61667 T PT BB 08 0 10 America/Barbados 1993-12-22 3374016 Cane Wood Cane Wood 13.15 -59.58333 P PPL BB 08 0 105 America/Barbados 1993-12-22 3374017 Cane Vale Cane Vale 13.05 -59.53333 P PPL BB 01 0 13 America/Barbados 1993-12-22 3374018 Canefield Canefield 13.2 -59.58333 P PPL BB 06 0 259 America/Barbados 1993-12-22 3374019 Campaign Castle Campaign Castle 13.11667 -59.51667 P PPL BB 03 0 52 America/Barbados 1993-12-22 3374020 Cambridge Cambridge 13.21667 -59.55 S EST BB 06 0 269 America/Barbados 1993-12-22 3374021 Callendar Callendar 13.06667 -59.51667 P PPL BB 01 0 23 America/Barbados 1993-12-22 3374022 The Cabben The Cabben Breakfast Point,The Cabben 13.31667 -59.61667 T PT BB BB 07 0 38 America/Barbados 2012-01-18 3374023 Bushy Park Station Bushy Park Station 13.13333 -59.46667 S RSTN BB 10 0 54 America/Barbados 1993-12-22 3374024 Bushy Park Bushy Park Bushy Park 13.13333 -59.46667 P PPL BB 10 0 54 America/Barbados 2012-01-18 3374025 Bulkely Factory Bulkely Factory 13.11667 -59.53333 P PPL BB 03 0 48 America/Barbados 1993-12-22 3374026 Bulkeley Station Bulkeley Station Bulkeley Factory Station,Bulkeley Station 13.11667 -59.53333 S RSTN BB BB 03 0 48 America/Barbados 2012-01-18 3374027 Buckden House Buckden House 13.2 -59.53333 P PPL BB 06 0 324 America/Barbados 1993-12-22 3374028 Bruce Vale River Bruce Vale River Bruce Vale River 13.25 -59.55 H STM BB 02 0 1 America/Barbados 2012-01-18 3374029 Bruce Vale Bruce Vale Bruce,Bruce Vale 13.23333 -59.55 P PPL BB BB 02 0 64 America/Barbados 2012-01-18 3374030 Brome Field Brome Field 13.3 -59.63333 P PPL BB 07 0 52 America/Barbados 1993-12-22 3374031 Brittons Hill Brittons Hill 13.08759 -59.59517 P PPL BB 08 0 49 America/Barbados 2010-02-01 3374032 Brighton Brighton 13.1 -59.61667 P PPLX BB 08 0 10 America/Barbados 1993-12-22 3374033 Brighton Brighton 13.11667 -59.51667 P PPL BB 03 0 52 America/Barbados 1993-12-22 3374034 Briggs Briggs 13.1 -59.53333 P PPL BB 01 0 105 America/Barbados 1993-12-22 3374035 Bridgetown Harbour Bridgetown Harbour Bridgetown Harbour,New Deep Water Harbour 13.1 -59.63333 H HBR BB BB 08 0 1 America/Barbados 2012-01-18 3374036 Bridgetown Bridgetown BGI,Bridgetown,Bridzhtaun,Bridztaun,Bridztaunas,Bridžtaunas,The Bridge Town,beulijitaun,brydj tawn,bu li qi dun,burijjitaun,Бриджтаун,Бриџтаун,ברידג'טאון,بريدج تاون,ብርጅታውን,ブリッジタウン,布里奇敦,브리지타운 13.1 -59.61667 P PPLC BB 08 98511 10 America/Barbados 2012-01-18 3374037 Bridgefield Bridgefield 13.15 -59.58333 P PPL BB 11 0 105 America/Barbados 1993-12-22 3374038 Brereton Brereton Brereton 13.11667 -59.5 P PPL BB 10 0 39 America/Barbados 2012-01-18 3374039 Breedy’s Breedy's Breedy's,Breedy’s 13.25 -59.58333 P PPL BB 02 0 50 America/Barbados 2012-01-18 3374040 Branchbury Branchbury 13.18333 -59.55 P PPL BB 06 0 275 America/Barbados 1993-12-22 3374041 Bowmanston Bowmanston 13.16667 -59.51667 P PPL BB 05 0 203 America/Barbados 1993-12-22 3374042 Bow Bells Reef Bow Bells Reef 13.03333 -59.51667 H RF BB 01 0 -9999 America/Barbados 1993-12-22 3374043 Bourbon Bourbon 13.28333 -59.63333 P PPL BB 07 0 56 America/Barbados 1993-12-22 3374044 Bottom Bay Bottom Bay Bottom Bay 13.11667 -59.41667 H BAY BB 10 0 -9999 America/Barbados 2012-01-18 3374045 Boscobelle Boscobelle 13.28333 -59.56667 P PPL BB 09 0 1 America/Barbados 1993-12-22 3374046 Bonwell Bonwell 13.18333 -59.53333 P PPL BB 06 0 265 America/Barbados 1993-12-22 3374047 Boiling Spring Boiling Spring 13.21667 -59.58333 H SPNG BB 02 0 196 America/Barbados 1993-12-22 3374048 Boarded Hall Boarded Hall 13.1 -59.53333 P PPL BB 01 0 105 America/Barbados 1993-12-22 3374049 Blue Waters Blue Waters 13.06667 -59.58333 P PPL BB 01 0 5 America/Barbados 1993-12-22 3374050 Blowers Blowers 13.2 -59.61667 P PPL BB 11 0 126 America/Barbados 1993-12-22 3374051 Bloomsbury Bloomsbury 13.2 -59.56667 P PPL BB 11 0 296 America/Barbados 1993-12-22 3374052 Blades Hill Blades Hill Blades Hill 13.15 -59.45 P PPL BB 10 0 46 America/Barbados 2012-01-18 3374053 Blades Blades 13.1 -59.46667 P PPL BB 10 0 45 America/Barbados 1993-12-22 3374054 Blacksage Alley Blacksage Alley 13.3 -59.58333 P PPL BB 07 0 63 America/Barbados 1993-12-22 3374055 Black Rock Black Rock 13.13333 -59.63333 P PPL BB 08 0 27 America/Barbados 1993-12-22 3374056 Blackmans Blackmans 13.18333 -59.53333 P PPL BB 06 623 265 America/Barbados 2006-01-17 3374057 Black Bird Rock Black Bird Rock 13.28333 -59.65 T RK BB 07 0 9 America/Barbados 1993-12-22 3374058 Black Bess Black Bess 13.23333 -59.61667 S EST BB 09 0 179 America/Barbados 1993-12-22 3374059 Bissex Bissex 13.21667 -59.55 P PPL BB 06 0 269 America/Barbados 1993-12-22 3374060 Bishops Bishops 13.3 -59.6 P PPL BB 07 0 121 America/Barbados 1993-12-22 3374061 Bibbys Lane Bibbys Lane 13.13333 -59.58333 P PPL BB 08 0 69 America/Barbados 1993-12-22 3374062 Bentleys Bentleys Bentleys 13.11667 -59.5 P PPL BB 10 0 39 America/Barbados 2012-01-18 3374063 Benthams Benthams 13.28333 -59.61667 P PPL BB 07 0 118 America/Barbados 1993-12-22 3374064 Benny Hall Benny Hall 13.26667 -59.6 P PPLL BB 09 0 198 America/Barbados 1993-12-22 3374065 Bennetts Bennetts 13.16667 -59.6 P PPL BB 11 0 214 America/Barbados 1993-12-22 3374066 Benab Benab 13.21667 -59.53333 P PPLL BB 02 0 64 America/Barbados 1993-12-22 3374067 Below Rock Below Rock 13.05 -59.53333 T RK BB 01 0 13 America/Barbados 1993-12-22 3374068 Belmont Belmont 13.08333 -59.6 P PPLX BB 08 0 23 America/Barbados 1993-12-22 3374069 Bell Point Bell Point 13.16667 -59.45 T PT BB 05 0 36 America/Barbados 1993-12-22 3374070 Belleplaine Belleplaine Belleplaine 13.25 -59.56667 P PPL BB 02 0 20 America/Barbados 2012-01-18 3374071 Belle Hill Belle Hill 13.25 -59.56667 T HLL BB 02 0 20 America/Barbados 1993-12-22 3374072 Belle Belle 13.11667 -59.58333 P PPL BB 08 0 43 America/Barbados 1993-12-22 3374073 Belair Belair 13.15 -59.55 P PPL BB 03 0 212 America/Barbados 1993-12-22 3374074 Bel Air Bel Air Bel Air 13.11667 -59.43333 P PPL BB 10 0 -9999 America/Barbados 2012-01-18 3374075 Beachy Head Beachy Head 13.1 -59.43333 T PT BB 10 0 -9999 America/Barbados 1993-12-22 3374076 Bayville Bayville 13.08449 -59.60602 P PPLX BB 08 0 10 America/Barbados 2010-02-01 3374077 Bayleys Bayleys Bayley,Bayleys 13.15 -59.45 P PPL BB BB 10 0 46 America/Barbados 2012-01-18 3374078 Bayfield Bayfield Bayfield 13.15 -59.45 P PPL BB 10 0 46 America/Barbados 2012-01-18 3374079 Baxters Baxters Baxters 13.21667 -59.56667 P PPL BB 02 0 78 America/Barbados 2012-01-18 3374080 Batts Rock Bay Batts Rock Bay 13.13333 -59.63333 H BAY BB 08 0 27 America/Barbados 1993-12-22 3374081 Bath Station Bath Station 13.18333 -59.46667 S RSTN BB 05 0 29 America/Barbados 1993-12-22 3374082 Bathsheba Station Bathsheba Station 13.21667 -59.51667 S RSTN BB 06 0 -9999 America/Barbados 1993-12-22 3374083 Bathsheba Bathsheba Bathsheba 13.21434 -59.52521 P PPLA BB 06 1765 49 America/Barbados 2014-07-18 3374084 Barbados Barbados Baabados,Baarbadoos,Babadosi,Bac-ba-got,Barabada,Barabadosi,Barabâda,Barbada,Barbadas,Barbade,Barbadeaen,Barbadeän,Barbadhos,Barbadi,Barbado,Barbadoes,Barbadoos,Barbados,Barbados nutome,Barbadosa,Barbadosas,Barbadosi,Barbadosin Orn,Barbadot,Barbaduosos,Barbadus,Barbady,Barbadós,Barbata,Barbaus,Barbàdos,Barbâda,Barbåde,Barebade,Barubadosi,Barɛbadɛ,Bhabhadosi,Bác-ba-đốt,Bárbádọ̀s,Colony of Barbados,Hashkʼaan Bikéyah,IBhadosi,Lababad,Mparmpantos,Orileede Babadosi,Orílẹ́ède Bábádósì,Papeitosi,Pāpeitosi,ba ba duo si,babados,babeidoseu,barabadasa,barabados,barabadosa,barbados,barbadosa,barbadosi,barbadws,barbedos,barbydws,barubadosu,brbadws,brbdws,i-Barbados,parpatocu,parpatos,prathes barbedos,Μπαρμπάντος,Барбадас,Барбадос,Барбадосин Орн,Բարբադոս,ברבדוס,باربادوس,باربادۆس,بارباڈوس,باربيدوس,بربادوس,ބާބަޑޮސް,बारबाडोस,बार्बाडोस,বারবাদোস,বার্বাডোস,ਬਾਰਬਾਡੋਸ,બાર્બાડોસ,ବାରବାଡସ,ବାରବାଡୋସ୍,பார்படோசு,பார்படோஸ்,బార్బడోస్,ಬಾರ್ಬಡೋಸ್,ബാര്‍ബഡോസ്,ബർബാഡോസ്,බාර්බඩෝස්,บาร์เบโดส,ประเทศบาร์เบโดส,ບາບາຄັອດ,བར་བ་ཌོ་སི།,བཱརྦ་ཌོས྄།,ბარბადოსი,ባርቤዶስ,បារបាដូស,バルバドス,巴巴多斯,바베이도스 13.16667 -59.53333 A PCLI BB 00 285653 249 America/Barbados 2012-01-18 3374085 Barbados Barbados Barbados 13.16667 -59.55 T ISL BB 00 277821 216 America/Barbados 2014-07-08 3374086 Bannatyne Bannatyne 13.08333 -59.53333 P PPL BB 01 0 88 America/Barbados 1993-12-22 3374087 Bank Hall Bank Hall 13.1 -59.6 P PPLX BB 08 0 30 America/Barbados 1993-12-22 3374088 Bakers Bakers 13.23333 -59.61667 P PPL BB 09 0 179 America/Barbados 1993-12-22 3374089 Bairds Bairds 13.13333 -59.53333 P PPL BB 03 0 138 America/Barbados 1993-12-22 3374090 Bagatelle Bagatelle 13.15 -59.61667 P PPL BB 11 0 117 America/Barbados 1993-12-22 3374091 Babbs Babbs 13.28333 -59.63333 P PPL BB 07 0 56 America/Barbados 1993-12-22 3374092 Atlantic Shores Atlantic Shores 13.03333 -59.51667 P PPL BB 01 0 -9999 America/Barbados 1993-12-22 3374093 Ashton Hall Ashton Hall 13.25 -59.63333 P PPL BB 09 0 50 America/Barbados 1993-12-22 3374094 Ashford Ashford 13.16667 -59.5 S EST BB 05 0 188 America/Barbados 1993-12-22 3374095 Ashbury Ashbury 13.16667 -59.53333 P PPL BB 05 0 249 America/Barbados 1993-12-22 3374096 Arthurs Seat Arthurs Seat 13.15 -59.6 P PPL BB 11 0 107 America/Barbados 1993-12-22 3374097 Arch Hall Arch Hall 13.16667 -59.61667 P PPL BB 11 0 120 America/Barbados 1993-12-22 3374098 Archer’s Bay Archer's Bay 13.31667 -59.63333 H BAY BB 07 0 41 America/Barbados 1993-12-22 3374099 Archers Archers Archer,Archers 13.31667 -59.63333 P PPL BB BB 07 0 41 America/Barbados 2012-01-18 3374100 Applewhaites Applewhaites 13.16667 -59.56667 P PPL BB 11 0 193 America/Barbados 1993-12-22 3374101 Appleby Appleby 13.15 -59.63333 P PPL BB 04 0 56 America/Barbados 1993-12-22 3374102 Apes Hill Apes Hill 13.21667 -59.6 P PPL BB 04 0 266 America/Barbados 1993-12-22 3374103 Antilles Flat Antilles Flat 13.31667 -59.58333 H FLTT BB 07 0 -9999 America/Barbados 1993-12-22 3374104 Animal Flower Cave Animal Flower Cave Animal Flower Cave,Cove 13.33333 -59.6 P PPL BB BB 07 0 -9999 America/Barbados 2012-01-18 3374105 Animal Flower Bay Animal Flower Bay 13.31667 -59.6 H BAY BB 07 0 27 America/Barbados 1993-12-22 3374106 Ananias Point Ananias Point 13.03333 -59.51667 T PT BB 01 0 -9999 America/Barbados 1993-12-22 3374107 All Saints Church All Saints Church 13.26667 -59.6 S CH BB 09 0 198 America/Barbados 1993-12-22 3374108 Allmans Allmans 13.26667 -59.63333 P PPLL BB 07 0 51 America/Barbados 1993-12-22 3374109 Alleynes Bay Alleynes Bay 13.2 -59.63333 H BAY BB 04 0 41 America/Barbados 1993-12-22 3374110 Alleynedale Alleynedale 13.28333 -59.61667 P PPL BB 00 0 118 America/Barbados 1993-12-22 3374111 Allen View Allen View 13.18333 -59.56667 P PPL BB 11 0 267 America/Barbados 1993-12-22 3374112 Alexandra Alexandra 13.28333 -59.6 P PPL BB 07 0 153 America/Barbados 1993-12-22 3374113 Airy Hill Airy Hill 13.18333 -59.55 P PPL BB 06 0 275 America/Barbados 1993-12-22 3374114 Abbott’s Bay Abbott's Bay 13.31667 -59.6 H COVE BB 07 0 27 America/Barbados 1993-12-22 6300886 Bridgetown City Bridgetown City TBPO 13.1 -59.61667 S AIRF BB 0 50 10 America/Barbados 2011-03-20 6464783 Cobblers Cove Hotel Cobblers Cove Hotel 13.2376 -59.639 S HTL BB 0 29 America/Barbados 2007-04-13 6465468 Mango Bay Club - All Inclusive Mango Bay Club - All Inclusive 13.189 -59.6343 S HTL BB 0 14 America/Barbados 2007-04-13 6465533 Blue Horizon Hotel Blue Horizon Hotel 13.081 -59.5779 S HTL BB 0 44 America/Barbados 2007-04-13 6465665 Time Out At The Gap Time Out At The Gap 13.0655 -59.5602 S HTL BB 01 0 10 America/Barbados 2007-04-13 6465731 The Savannah The Savannah 13.0782 -59.5713 S HTL BB 0 40 America/Barbados 2007-04-13 6466200 Turtle Beach Resort All Inclusive Turtle Beach Resort All Inclusive 13.0655 -59.5533 S HTL BB 01 0 6 America/Barbados 2007-04-13 6466804 Crystal Cove Hotel All Inclusive Crystal Cove Hotel All Inclusive 13.1277 -59.6258 S HTL BB 0 32 America/Barbados 2007-04-13 6466990 Colony Club Hotel Colony Club Hotel 13.2293 -59.639 S HTL BB 0 23 America/Barbados 2007-04-13 6468243 Tamarind Cove Hotel Tamarind Cove Hotel 13.2019 -59.638 S HTL BB 0 17 America/Barbados 2007-04-13 6468509 Sandy Bay Beach Club All Inclusive Sandy Bay Beach Club All Inclusive 13.0702 -59.5794 S HTL BB 01 0 7 America/Barbados 2007-04-13 6469089 Accra Beach Hotel ACCRA BEACH HOTEL 13.0667 -59.5616 S HTL BB 01 0 12 America/Barbados 2007-04-13 6469565 Southern Palms Beach Club Southern Palms Beach Club 13.0746 -59.5657 S HTL BB 0 27 America/Barbados 2007-04-13 6469858 Treasure Beach Hotel Treasure Beach Hotel 13.1524 -59.6305 S HTL BB 0 69 America/Barbados 2007-04-13 6470109 Almond Beach Village - All Inclusive Almond Beach Village - All Inclusive 13.2467 -59.6362 S HTL BB 0 46 America/Barbados 2007-04-13 6470127 The Fairmont Royal Pavilion Hotel The Fairmont Royal Pavilion Hotel 13.2303 -59.638 S HTL BB 0 21 America/Barbados 2007-04-13 6471276 Barbados Beach Club Barbados Beach Club 13.0685 -59.5725 S HTL BB 01 0 8 America/Barbados 2007-04-13 6471678 Settlers Beach Villa Hotel Settlers Beach Villa Hotel 13.1927 -59.6362 S HTL BB 0 9 America/Barbados 2007-04-13 6471743 Almond Beach Club & Spa All Inclusive Almond Beach Club & Spa All Inclusive 13.1717 -59.6343 S HTL BB 0 17 America/Barbados 2007-04-13 6471842 Bougainvillea Beach Resort Bougainvillea Beach Resort 13.069 -59.543 S HTL BB 0 38 America/Barbados 2007-04-13 6472789 Discovery Bay by Rex Resorts Discovery Bay by Rex Resorts 13.2092 -59.6352 S HTL BB 0 48 America/Barbados 2007-04-13 6490639 South Gap Hotel South Gap Hotel 13.1674 -59.5552 S HTL BB 0 227 America/Barbados 2007-04-15 6491360 Golden Sands Hotel Golden Sands Hotel 13.0666 -59.5666 S HTL BB 01 0 8 America/Barbados 2007-04-15 6491404 Butterfly Beach Hotel Butterfly Beach Hotel 13.0666 -59.5537 S HTL BB 01 0 10 America/Barbados 2007-04-15 6491530 Barbados Beach Club Family Resort Barbados Beach Club Family Resort 13.0661 -59.56 S HTL BB 01 0 10 America/Barbados 2007-04-15 6492080 Chateau Blanc Apartments on Sea Chateau Blanc Apartments on Sea 13.1674 -59.5552 S HTL BB 0 227 America/Barbados 2007-04-15 6493726 Monteray Apartment Hotel Monteray Apartment Hotel 13.1674 -59.5552 S HTL BB 0 227 America/Barbados 2007-04-14 6493825 Silverpoint Villa Hotel Silverpoint Villa Hotel 13.0666 -59.5833 S HTL BB 01 0 1 America/Barbados 2007-04-14 6495006 Nautilus Beach Apartments Nautilus Beach Apartments 13.1 -59.6166 S HTL BB 0 10 America/Barbados 2007-04-14 6497736 Sea Breeze Beach Hotel Sea Breeze Beach Hotel 13.0661 -59.55 S HTL BB 01 0 12 America/Barbados 2007-04-14 6498323 Allamanda Beach Hotel Allamanda Beach Hotel 13.0737 -59.5657 S HTL BB 01 0 24 America/Barbados 2007-04-14 6498440 Barbados Hilton Barbados Hilton 13.07873 -59.6113 S HTL BB 08 0 5 America/Barbados 2010-02-01 6500060 Tropical Escape All Inclusive Tropical Escape All Inclusive 13.1607 -59.6343 S HTL BB 0 26 America/Barbados 2007-04-14 6500287 Coconut Court Beach Hotel Coconut Court Beach Hotel 13.0902 -59.6033 S HTL BB 0 15 America/Barbados 2007-04-14 6501853 Little Arches Barbados Little Arches Barbados 13.0626 -59.5395 S HTL BB 0 10 America/Barbados 2007-04-14 6502453 Silver Point Hotel Silver Point Hotel 13.0535 -59.5205 S HTL BB 0 21 America/Barbados 2007-04-14 6503274 Amaryllis Beach Resort Amaryllis Beach Resort 13.081 -59.5873 S HTL BB 0 24 America/Barbados 2007-04-14 6504759 The House The House 13.1991 -59.639 S HTL BB 0 10 America/Barbados 2007-04-14 6505124 Sunswept Beach Hotel SUNSWEPT BEACH HOTEL 13.1674 -59.5552 S HTL BB 0 227 America/Barbados 2007-04-13 6506721 Angler Apartments ANGLER APARTMENTS 13.1516 -59.6259 S HTL BB 0 100 America/Barbados 2007-04-13 6507366 Kings Beach Hotel KINGS BEACH HOTEL 13.2431 -59.6396 S HTL BB 0 19 America/Barbados 2007-04-13 6507673 Divi Heritage DIVI HERITAGE 13.1516 -59.6259 S HTL BB 0 100 America/Barbados 2007-04-13 6509395 Coral Reef Club CORAL REEF CLUB 13.1908 -59.6341 S HTL BB 0 12 America/Barbados 2007-04-13 6509528 Silver Rock SILVER ROCK 13.0667 -59.5616 S HTL BB 01 0 12 America/Barbados 2007-04-13 6509754 The Fairmont Glitter Bay THE FAIRMONT GLITTER BAY 13.0643 -59.566 S HTL BB 01 0 1 America/Barbados 2007-04-13 6510468 Sunset Crest Resort SUNSET CREST RESORT 13.1516 -59.6259 S HTL BB 0 100 America/Barbados 2007-04-13 6512386 Sandy Lane Hotel SANDY LANE HOTEL 13.1516 -59.6259 S HTL BB 0 100 America/Barbados 2007-04-13 6512507 Rainbow Beach Hotel RAINBOW BEACH HOTEL 13.0667 -59.5616 S HTL BB 01 0 12 America/Barbados 2007-04-13 6519986 Meridian Inn MERIDIAN INN 13.1674 -59.5552 S HTL BB 0 227 America/Barbados 2007-04-15 6520876 Port St Charles PORT ST CHARLES 13.1674 -59.5552 S HTL BB 0 227 America/Barbados 2007-04-15 6526132 Amaryllis Beach Resort AMARYLLIS BEACH RESORT 13.1674 -59.5552 S HTL BB 0 227 America/Barbados 2007-04-14 6526176 Yellow Bird Hotel YELLOW BIRD HOTEL 13.1674 -59.5552 S HTL BB 0 227 America/Barbados 2007-04-14 6526371 Allamanda Beach Hotel ALLAMANDA BEACH HOTEL 13.0666 -59.5833 S HTL BB 01 0 1 America/Barbados 2007-04-14 6526578 Legend Garden Condos LEGEND GARDEN CONDOS 13.2166 -59.6333 S HTL BB 0 63 America/Barbados 2007-04-14 6526845 Waters Meet Beach Apt WATERS MEET BEACH APT 13.0666 -59.5833 S HTL BB 01 0 1 America/Barbados 2007-04-14 6527288 The Sandpiper THE SANDPIPER 13.1674 -59.5552 S HTL BB 0 227 America/Barbados 2007-04-14 6528527 Silver Sands Resort SILVER SANDS RESORT 13.1674 -59.5552 S HTL BB 0 227 America/Barbados 2007-04-14 6528801 The Crane Resort THE CRANE RESORT 13.0667 -59.5616 S HTL BB 01 0 12 America/Barbados 2007-04-14 6529818 Island Inn Hotel ISLAND INN HOTEL 13.0667 -59.5616 S HTL BB 01 0 12 America/Barbados 2007-04-14 6941782 Saint Lawrence Gap Saint Lawrence Gap The Gap 13.06489 -59.56405 P PPL BB 0 9 America/Barbados 2011-03-18 7117029 Maycocks Bay Maycocks Bay 13.29533 -59.64958 H BAY BB 0 24 America/Barbados 2010-01-19 7117030 Queen Elisabeth Hospital Queen Elisabeth Hospital 13.09507 -59.60684 S HSP BB 0 13 America/Barbados 2010-01-19 7157462 Garrison Garrison 13.08119 -59.60774 P PPLX BB 0 9 America/Barbados 2010-02-01 7287815 Saint Lawrence Gap Saint Lawrence Gap 13.0661 -59.56564 R RD BB 0 1 7 America/Barbados 2010-04-08 7287816 Dover Beach Dover Beach 13.06662 -59.5709 T BCH BB 01 0 6 9 America/Barbados 2010-10-03 7732027 Fitts Village Fitts Village 13.14607 -59.63795 P PPL BB 0 8 America/Barbados 2011-03-23 8354480 Gibbs Bay Gibbs Bay 13.22812 -59.6434 H BAY BB 0 1 America/Barbados 2012-07-25 8354481 Mahogany Bay Mahogany Bay 13.16015 -59.63781 H BAY BB 0 4 America/Barbados 2012-07-25 8643376 Worthing Beach Worthing Beach 13.07118 -59.58305 T BCH BB 01 0 5 America/Barbados 2013-11-23 9239026 Hackletons Hackletons 13.19975 -59.52427 P PPL BB 06 0 266 America/Barbados 2014-08-14 9342420 Andromeda Botanical Gardens Andromeda Botanical Gardens 13.20803 -59.51706 S GDN BB 06 0 146 America/Barbados 2014-08-14 9342438 Bathsheba park Bathsheba park 13.21237 -59.51865 L PRK BB 06 0 13 America/Barbados 2014-08-14 9342506 Tent Bay Tent Bay 13.21275 -59.51109 H BAY BB 06 0 1 America/Barbados 2014-08-14 9963354 Sugar Cane Club Sugar Cane Club 13.263 -59.63593 S HTL BB 0 24 America/Barbados 2015-01-19 9963355 South Beach Resort & Vacation Club South Beach Resort & Vacation Club 13.07486 -59.58883 S HTL BB 0 1 America/Barbados 2015-01-19 9963356 Courtyard Bridgetown Courtyard Bridgetown 13.07658 -59.60132 S HTL BB 0 16 America/Barbados 2015-01-19 9963357 Casuarina Beach Resort Casuarina Beach Resort 13.06603 -59.56314 S HTL BB 0 1 America/Barbados 2015-01-19 9963358 Ocean Spray Beach Apartments Ocean Spray Beach Apartments 13.05263 -59.50702 S HTL BB 0 1 America/Barbados 2015-01-19 9963359 Ocean Two Resort And Residences Ocean Two Resort And Residences 13.06615 -59.56727 S HTL BB 0 1 America/Barbados 2015-01-19 9963360 Worthing Court Worthing Court 13.0716 -59.58584 S HTL BB 0 1 America/Barbados 2015-01-19 9963361 Rostrevor Rostrevor 13.06709 -59.57339 S HTL BB 0 14 America/Barbados 2015-01-19 9963362 Plum Tree Club Plum Tree Club 13.07039 -59.57781 S HTL BB 0 5 America/Barbados 2015-01-19 9963363 Sunbay Hotel Sunbay Hotel 13.08259 -59.60948 S HTL BB 0 6 America/Barbados 2015-01-19 9964647 Lighthouse Resort Lighthouse Resort 13.04784 -59.52296 S HTL BB 0 1 America/Barbados 2015-01-20 9969513 Waves Barbados Waves Barbados 13.14227 -59.63749 S HTL BB 0 22 America/Barbados 2015-01-22 9970876 Radisson Aquatica Resort Barbados Radisson Aquatica Resort Barbados 13.08265 -59.60933 S HTL BB 0 6 America/Barbados 2015-01-23 9971694 Beach View Beach View 13.16549 -59.63744 S HTL BB 0 5 America/Barbados 2015-01-24 9971722 South Gap Hotel Barbados South Gap Hotel Barbados 13.06736 -59.57403 S HTL BB 0 14 America/Barbados 2015-01-24 9971760 Hilton Barbados Resort Hilton Barbados Resort 13.07867 -59.61261 S HTL BB 0 6 America/Barbados 2015-01-24 9971867 Halcyon Palm Halcyon Palm 13.17861 -59.63771 S HTL BB 0 13 America/Barbados 2015-01-24 10099015 All Seasons Resort Europa All Seasons Resort Europa 13.18307 -59.63918 S HTL BB 0 13 America/Barbados 2015-02-13 10099016 Pirates Inn Pirates Inn 13.07585 -59.59564 S HTL BB 0 23 America/Barbados 2015-02-13 10099017 Dover Beach Hotel Dover Beach Hotel 13.064 -59.565 S HTL BB 0 1 America/Barbados 2015-02-13 10101039 Waves Barbados All Inclusive Waves Barbados All Inclusive 13.14224 -59.63756 S HTL BB 0 22 America/Barbados 2015-02-14 10104998 Discovery Bay All Inclusive Discovery Bay All Inclusive 13.19248 -59.63997 S HTL BB 0 19 America/Barbados 2015-02-19 10105638 Waves Beach Resort All Inclusive Waves Beach Resort All Inclusive 13.14225 -59.63751 S HTL BB 0 22 America/Barbados 2015-02-19 10105643 The Soco Hotel The Soco Hotel 13.07606 -59.59729 S HTL BB 0 23 America/Barbados 2015-02-19 10105644 Sandals Barbados Sandals Barbados 13.06988 -59.57633 S HTL BB 0 5 America/Barbados 2015-02-19 10105645 Melrose Beach Apartment Melrose Beach Apartment 13.07007 -59.57969 S HTL BB 0 5 America/Barbados 2015-02-19 10105646 Infinity On The Beach Infinity On The Beach 13.06729 -59.56997 S HTL BB 0 14 America/Barbados 2015-02-19 10110156 Lantana Resort Barbados Lantana Resort Barbados 13.21484 -59.63975 S HTL BB 0 31 America/Barbados 2015-02-22 10110157 Ocean 15 Hotel Ocean 15 Hotel 13.06744 -59.57246 S HTL BB 0 14 America/Barbados 2015-02-22 10111901 Ocean Two Resort & Residences Ocean Two Resort & Residences 13.06527 -59.54457 S HTL BB 0 1 America/Barbados 2015-02-22 10112188 Couples Barbados Couples Barbados 13.06507 -59.56285 S HTL BB 0 1 America/Barbados 2015-02-22 10112189 Adulo Apartments Adulo Apartments 13.07767 -59.5917 S HTL BB 0 23 America/Barbados 2015-02-22 10113289 Bonanza Apartments Bonanza Apartments 13.06744 -59.57092 S HTL BB 0 14 America/Barbados 2015-02-23 10117812 Battaleys Mews Barbados Battaleys Mews Barbados 13.2378 -59.63994 S HTL BB 0 41 America/Barbados 2015-02-25 10120819 Little Good Harbour Little Good Harbour 13.28606 -59.64546 S HTL BB 0 36 America/Barbados 2015-02-26 10121323 Tropical Winds Tropical Winds 13.09356 -59.61051 S HTL BB 0 7 America/Barbados 2015-02-26 10121723 Rostrevor Apartment Hotel Rostrevor Apartment Hotel 13.06749 -59.57321 S HTL BB 0 14 America/Barbados 2015-02-26 10121881 Almond Casuarina Beach Resort Almond Casuarina Beach Resort 13.067 -59.56973 S HTL BB 0 14 America/Barbados 2015-02-26 10123230 Divi Southwinds Beach Resort Divi Southwinds Beach Resort 13.06848 -59.57102 S HTL BB 0 14 America/Barbados 2015-02-27 10170035 All Season Resort Europa All Season Resort Europa 13.1775 -59.6356 S HTL BB 0 13 America/Barbados 2015-03-10 10170036 The Club Barbados Resort And Spa The Club Barbados Resort And Spa 13.17713 -59.6386 S HTL BB 0 13 America/Barbados 2015-03-10 10170037 Divi Heritage Beach Resort Divi Heritage Beach Resort 13.18175 -59.63846 S HTL BB 0 13 America/Barbados 2015-03-10 10170038 Tropical Sunset Tropical Sunset 13.18997 -59.63938 S HTL BB 0 9 America/Barbados 2015-03-10 10170039 Mango Bay Beach Resort Mango Bay Beach Resort 13.18731 -59.63794 S HTL BB 0 9 America/Barbados 2015-03-10 10171402 Savannah Beach All Inclusive Savannah Beach All Inclusive 13.07738 -59.60213 S HTL BB 0 16 America/Barbados 2015-03-12 10171404 Pirate's Inn Pirate's Inn 13.08561 -59.58075 S HTL BB 0 59 America/Barbados 2015-03-12 10174497 Tamarid Cove Tamarid Cove 13.16317 -59.63753 S HTL BB 0 5 America/Barbados 2015-03-27 10174498 The Club Barbados Resort & Spa The Club Barbados Resort & Spa 13.17757 -59.63799 S HTL BB 0 13 America/Barbados 2015-03-27 10174499 Boungainvillea Beach Resort Boungainvillea Beach Resort 13.0657 -59.56015 S HTL BB 0 1 America/Barbados 2015-03-27 10175825 Couples Babados All Inclusive Couples Babados All Inclusive 13.0676 -59.57306 S HTL BB 0 14 America/Barbados 2015-03-31 10176103 Courtyard By Marriott Bridgetown Courtyard By Marriott Bridgetown 13.07583 -59.59737 S HTL BB 0 23 America/Barbados 2015-03-31 10176378 Travellers Palm Travellers Palm 13.18241 -59.63883 S HTL BB 0 13 America/Barbados 2015-03-31 ================================================ FILE: examples/java-api/src/test/resources/org/locationtech/geowave/examples/ingest/geonames/readme.txt ================================================ Readme for GeoNames Gazetteer extract files ============================================================================================================ This work is licensed under a Creative Commons Attribution 3.0 License, see http://creativecommons.org/licenses/by/3.0/ The Data is provided "as is" without warranty or any representation of accuracy, timeliness or completeness. The data format is tab-delimited text in utf8 encoding. Files : ------- XX.zip : features for country with iso code XX, see 'geoname' table for columns allCountries.zip : all countries combined in one file, see 'geoname' table for columns cities1000.zip : all cities with a population > 1000 or seats of adm div (ca 80.000), see 'geoname' table for columns cities5000.zip : all cities with a population > 5000 or PPLA (ca 40.000), see 'geoname' table for columns cities15000.zip : all cities with a population > 15000 or capitals (ca 20.000), see 'geoname' table for columns alternateNames.zip : two files, alternate names with language codes and geonameId, file with iso language codes admin1CodesASCII.txt : ascii names of admin divisions. (beta > http://forum.geonames.org/gforum/posts/list/208.page#1143) admin2Codes.txt : names for administrative subdivision 'admin2 code' (UTF8), Format : concatenated codes name asciiname geonameId iso-languagecodes.txt : iso 639 language codes, as used for alternate names in file alternateNames.zip featureCodes.txt : name and description for feature classes and feature codes timeZones.txt : countryCode, timezoneId, gmt offset on 1st of January, dst offset to gmt on 1st of July (of the current year), rawOffset without DST countryInfo.txt : country information : iso codes, fips codes, languages, capital ,... see the geonames webservices for additional country information, bounding box : http://ws.geonames.org/countryInfo? country names in different languages : http://ws.geonames.org/countryInfoCSV?lang=it modifications-.txt : all records modified on the previous day, the date is in yyyy-MM-dd format. You can use this file to daily synchronize your own geonames database. deletes-.txt : all records deleted on the previous day, format : geonameId name comment. alternateNamesModifications-.txt : all alternate names modified on the previous day, alternateNamesDeletes-.txt : all alternate names deleted on the previous day, format : alternateNameId geonameId name comment. userTags.zip : user tags , format : geonameId tag. hierarchy.zip : parentId, childId, type. The type 'ADM' stands for the admin hierarchy modeled by the admin1-4 codes. The other entries are entered with the user interface. The relation toponym-adm hierarchy is not included in the file, it can instead be built from the admincodes of the toponym. The main 'geoname' table has the following fields : --------------------------------------------------- geonameid : integer id of record in geonames database name : name of geographical point (utf8) varchar(200) asciiname : name of geographical point in plain ascii characters, varchar(200) alternatenames : alternatenames, comma separated, ascii names automatically transliterated, convenience attribute from alternatename table, varchar(10000) latitude : latitude in decimal degrees (wgs84) longitude : longitude in decimal degrees (wgs84) feature class : see http://www.geonames.org/export/codes.html, char(1) feature code : see http://www.geonames.org/export/codes.html, varchar(10) country code : ISO-3166 2-letter country code, 2 characters cc2 : alternate country codes, comma separated, ISO-3166 2-letter country code, 60 characters admin1 code : fipscode (subject to change to iso code), see exceptions below, see file admin1Codes.txt for display names of this code; varchar(20) admin2 code : code for the second administrative division, a county in the US, see file admin2Codes.txt; varchar(80) admin3 code : code for third level administrative division, varchar(20) admin4 code : code for fourth level administrative division, varchar(20) population : bigint (8 byte int) elevation : in meters, integer dem : digital elevation model, srtm3 or gtopo30, average elevation of 3''x3'' (ca 90mx90m) or 30''x30'' (ca 900mx900m) area in meters, integer. srtm processed by cgiar/ciat. timezone : the timezone id (see file timeZone.txt) varchar(40) modification date : date of last modification in yyyy-MM-dd format AdminCodes: Most adm1 are FIPS codes. ISO codes are used for US, CH, BE and ME. UK and Greece are using an additional level between country and fips code. The code '00' stands for general features where no specific adm1 code is defined. The table 'alternate names' : ----------------------------- alternateNameId : the id of this alternate name, int geonameid : geonameId referring to id in table 'geoname', int isolanguage : iso 639 language code 2- or 3-characters; 4-characters 'post' for postal codes and 'iata','icao' and faac for airport codes, fr_1793 for French Revolution names, abbr for abbreviation, link for a website, varchar(7) alternate name : alternate name or name variant, varchar(200) isPreferredName : '1', if this alternate name is an official/preferred name isShortName : '1', if this is a short name like 'California' for 'State of California' isColloquial : '1', if this alternate name is a colloquial or slang term isHistoric : '1', if this alternate name is historic and was used in the past Remark : the field 'alternatenames' in the table 'geoname' is a short version of the 'alternatenames' table without links and postal codes but with ascii transliterations. You probably don't need both. If you don't need to know the language of a name variant, the field 'alternatenames' will be sufficient. If you need to know the language of a name variant, then you will need to load the table 'alternatenames' and you can drop the column in the geoname table. Statistics on the number of features per country and the feature class and code distributions : http://www.geonames.org/statistics/ Continent codes : AF : Africa geonameId=6255146 AS : Asia geonameId=6255147 EU : Europe geonameId=6255148 NA : North America geonameId=6255149 OC : Oceania geonameId=6255151 SA : South America geonameId=6255150 AN : Antarctica geonameId=6255152 If you find errors or miss important places, please do use the wiki-style edit interface on our website http://www.geonames.org to correct inaccuracies and to add new records. Thanks in the name of the geonames community for your valuable contribution. Data Sources: http://www.geonames.org/data-sources.html More Information is also available in the geonames faq : http://forum.geonames.org/gforum/forums/show/6.page The forum : http://forum.geonames.org or the google group : http://groups.google.com/group/geonames ================================================ FILE: extensions/adapters/auth/pom.xml ================================================ 4.0.0 geowave-extension-parent org.locationtech.geowave ../../ 2.0.2-SNAPSHOT geowave-adapter-auth GeoWave Adapter Auth Authorization functionality for GeoWave Data Adapters org.springframework.security spring-security-core com.fasterxml.jackson.core jackson-databind commons-logging commons-logging 1.2 test ================================================ FILE: extensions/adapters/auth/src/main/java/org/locationtech/geowave/adapter/auth/AuthorizationEntry.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.auth; import java.util.List; /** Used for Json based authorization data sets. */ public class AuthorizationEntry { String userName; List authorizations; protected String getUserName() { return userName; } protected void setUserName(final String userName) { this.userName = userName; } protected List getAuthorizations() { return authorizations; } protected void setAuthorizations(final List authorizations) { this.authorizations = authorizations; } } ================================================ FILE: extensions/adapters/auth/src/main/java/org/locationtech/geowave/adapter/auth/AuthorizationFactorySPI.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.auth; import java.net.URL; /** Creates an authorization provider with the given URL. */ public interface AuthorizationFactorySPI { /** * @param location Any connection information to be interpreted by the provider. * @return the authorization provider */ AuthorizationSPI create(URL location); } ================================================ FILE: extensions/adapters/auth/src/main/java/org/locationtech/geowave/adapter/auth/AuthorizationSPI.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.auth; /** A provider that looks up authorizations given a user name. */ public interface AuthorizationSPI { public String[] getAuthorizations(); } ================================================ FILE: extensions/adapters/auth/src/main/java/org/locationtech/geowave/adapter/auth/AuthorizationSet.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.auth; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; public class AuthorizationSet { Map> authorizationSet = new HashMap<>(); protected Map> getAuthorizationSet() { return authorizationSet; } protected void setAuthorizationSet(final Map> authorizationSet) { this.authorizationSet = authorizationSet; } public List findAuthorizationsFor(final String name) { final List r = authorizationSet.get(name); return r == null ? new LinkedList<>() : r; } } ================================================ FILE: extensions/adapters/auth/src/main/java/org/locationtech/geowave/adapter/auth/EmptyAuthorizationFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.auth; import java.net.URL; public class EmptyAuthorizationFactory implements AuthorizationFactorySPI { @Override public AuthorizationSPI create(final URL url) { return new EmptyAuthorizationProvider(); } @Override public String toString() { return "empty"; } } ================================================ FILE: extensions/adapters/auth/src/main/java/org/locationtech/geowave/adapter/auth/EmptyAuthorizationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.auth; /** No authorization provided. */ public class EmptyAuthorizationProvider implements AuthorizationSPI { public EmptyAuthorizationProvider() {} @Override public String[] getAuthorizations() { return new String[0]; } } ================================================ FILE: extensions/adapters/auth/src/main/java/org/locationtech/geowave/adapter/auth/JsonFileAuthorizationFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.auth; import java.net.URL; /** * Stores authorization data in a json file. Format: { "authorizationSet" : { "fred" : * ["auth1","auth2"], "barney" : ["auth1","auth3"] } } */ public class JsonFileAuthorizationFactory implements AuthorizationFactorySPI { @Override public AuthorizationSPI create(final URL url) { return new JsonFileAuthorizationProvider(url); } @Override public String toString() { return "jsonFile"; } } ================================================ FILE: extensions/adapters/auth/src/main/java/org/locationtech/geowave/adapter/auth/JsonFileAuthorizationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.auth; import java.io.File; import java.io.IOException; import java.net.URL; import java.util.List; import org.springframework.security.core.Authentication; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.security.core.userdetails.UserDetails; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.ObjectMapper; /** * Use the user details to to determine a user's name. Given the user's name, lookup the user * credentials in a Json file. The location of the file is provided through the URL (protocol is * file). */ public class JsonFileAuthorizationProvider implements AuthorizationSPI { private AuthorizationSet authorizationSet; public JsonFileAuthorizationProvider(final URL location) { if (location == null) { authorizationSet = new AuthorizationSet(); } else { String path = location.getPath(); if (!location.getProtocol().equals("file") || (!new File(path).canRead() && !new File("." + path).canRead())) { throw new IllegalArgumentException("Cannot find file " + location.toString()); } try { if (!new File(path).canRead()) { path = "." + path; } parse(new File(path)); } catch (final JsonParseException e) { throw new IllegalArgumentException("Cannot parse file " + location.toString(), e); } catch (final JsonMappingException e) { throw new IllegalArgumentException("Cannot parse file " + location.toString(), e); } catch (final IOException e) { throw new IllegalArgumentException("Cannot parse file " + location.toString(), e); } } } private void parse(final File file) throws JsonParseException, JsonMappingException, IOException { final ObjectMapper mapper = new ObjectMapper(); authorizationSet = mapper.readValue(file, AuthorizationSet.class); } @Override public String[] getAuthorizations() { final Authentication auth = SecurityContextHolder.getContext().getAuthentication(); if (auth == null) { return new String[0]; } final Object principal = SecurityContextHolder.getContext().getAuthentication().getPrincipal(); String userName = principal.toString(); if (principal instanceof UserDetails) { // most likely type of principal final UserDetails userDetails = (UserDetails) principal; userName = userDetails.getUsername(); } final List auths = authorizationSet.findAuthorizationsFor(userName); final String[] result = new String[auths.size()]; auths.toArray(result); return result; } } ================================================ FILE: extensions/adapters/auth/src/main/resources/META-INF/services/org.locationtech.geowave.adapter.auth.AuthorizationFactorySPI ================================================ org.locationtech.geowave.adapter.auth.JsonFileAuthorizationFactory org.locationtech.geowave.adapter.auth.EmptyAuthorizationFactory ================================================ FILE: extensions/adapters/auth/src/test/java/org/locationtech/geowave/adapter/auth/JsonFileAuthorizationAdapterTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.auth; import static org.junit.Assert.assertTrue; import java.io.File; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import org.junit.Test; import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; import org.springframework.security.core.Authentication; import org.springframework.security.core.GrantedAuthority; import org.springframework.security.core.context.SecurityContext; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.security.core.userdetails.User; import org.springframework.security.core.userdetails.UserDetails; public class JsonFileAuthorizationAdapterTest { @Test public void testBasic() throws MalformedURLException { final SecurityContext context = new SecurityContext() { /** */ private static final long serialVersionUID = 1L; @Override public Authentication getAuthentication() { final Authentication auth = new UsernamePasswordAuthenticationToken("fred", "barney"); return auth; } @Override public void setAuthentication(final Authentication arg0) {} }; SecurityContextHolder.setContext(context); final File cwd = new File("."); final AuthorizationSPI authProvider = new JsonFileAuthorizationFactory().create( new URL("file://" + cwd.getAbsolutePath() + "/src/test/resources/jsonAuthfile.json")); assertTrue(Arrays.equals(new String[] {"1", "2", "3"}, authProvider.getAuthorizations())); } @Test public void testUserDetails() throws MalformedURLException { final UserDetails ud = new User("fred", "fred", new ArrayList()); final SecurityContext context = new SecurityContext() { /** */ private static final long serialVersionUID = 1L; @Override public Authentication getAuthentication() { final Authentication auth = new UsernamePasswordAuthenticationToken(ud, "barney"); return auth; } @Override public void setAuthentication(final Authentication arg0) {} }; SecurityContextHolder.setContext(context); final File cwd = new File("."); final AuthorizationSPI authProvider = new JsonFileAuthorizationFactory().create( new URL("file://" + cwd.getAbsolutePath() + "/src/test/resources/jsonAuthfile.json")); assertTrue(Arrays.equals(new String[] {"1", "2", "3"}, authProvider.getAuthorizations())); } } ================================================ FILE: extensions/adapters/auth/src/test/resources/jsonAuthfile.json ================================================ { "authorizationSet": { "fred" : ["1","2","3"], "barney" : ["a"] } } ================================================ FILE: extensions/adapters/raster/.gitignore ================================================ src/main/java/org/locationtech/geowave/adapter/raster/protobuf ================================================ FILE: extensions/adapters/raster/pom.xml ================================================ 4.0.0 geowave-extension-parent org.locationtech.geowave ../../ 2.0.2-SNAPSHOT geowave-adapter-raster Geowave Raster Adapter Geowave Data Adapter for Raster Data com.google.protobuf protobuf-java it.geosolutions.imageio-ext imageio-ext-gdalgeotiff org.geotools gt-imageio-ext-gdal me.lemire.integercompression JavaFastPFOR 0.1.12 com.google.guava guava org.apache.commons commons-math net.lingala.zip4j zip4j 1.3.2 org.codehaus.plexus plexus-archiver 2.2 org.locationtech.geowave geowave-adapter-auth ${project.version} org.locationtech.geowave geowave-core-mapreduce ${project.version} org.locationtech.geowave geowave-core-geotime ${project.version} org.geotools gt-opengis org.geotools gt-main org.geotools gt-wps org.geoserver gs-wms gt-epsg-hsql org.geotools log4j log4j commons-beanutils commons-beanutils org.geotools gt-render org.geotools gt-epsg-wkt com.github.os72 protoc-jar-maven-plugin ${mavenprotoc.version} generate-sources run ${hbaseprotoc.version} src/main/java src/main/protobuf ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/FitToIndexGridCoverage.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster; import java.awt.image.RenderedImage; import java.awt.image.renderable.RenderableImage; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import org.locationtech.jts.geom.Geometry; import org.opengis.coverage.CannotEvaluateException; import org.opengis.coverage.PointOutsideCoverageException; import org.opengis.coverage.SampleDimension; import org.opengis.coverage.grid.GridCoverage; import org.opengis.coverage.grid.GridGeometry; import org.opengis.geometry.DirectPosition; import org.opengis.geometry.Envelope; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.opengis.util.Record; import org.opengis.util.RecordType; public class FitToIndexGridCoverage implements GridCoverage { private final GridCoverage gridCoverage; private final byte[] partitionKey; private final byte[] sortKey; private final Resolution resolution; private final Envelope originalEnvelope; private final Geometry footprintWorldGeometry; private final Geometry footprintScreenGeometry; private final Map properties; public FitToIndexGridCoverage( final GridCoverage gridCoverage, final byte[] partitionKey, final byte[] sortKey, final Resolution resolution, final Envelope originalEnvelope, final Geometry footprintWorldGeometry, final Geometry footprintScreenGeometry, final Map properties) { this.gridCoverage = gridCoverage; this.partitionKey = partitionKey; this.sortKey = sortKey; this.resolution = resolution; this.originalEnvelope = originalEnvelope; this.footprintWorldGeometry = footprintWorldGeometry; this.footprintScreenGeometry = footprintScreenGeometry; this.properties = properties; } public Map getProperties() { return properties; } public Geometry getFootprintWorldGeometry() { return footprintWorldGeometry; } public Geometry getFootprintScreenGeometry() { return footprintScreenGeometry; } public byte[] getPartitionKey() { return partitionKey; } public byte[] getSortKey() { return sortKey; } public Resolution getResolution() { return resolution; } public GridCoverage getOriginalCoverage() { return gridCoverage; } public Envelope getOriginalEnvelope() { return originalEnvelope; } @Override public boolean isDataEditable() { return gridCoverage.isDataEditable(); } @Override public GridGeometry getGridGeometry() { return gridCoverage.getGridGeometry(); } @Override public int[] getOptimalDataBlockSizes() { return gridCoverage.getOptimalDataBlockSizes(); } @Override public int getNumOverviews() { return gridCoverage.getNumOverviews(); } @Override public GridGeometry getOverviewGridGeometry(final int index) throws IndexOutOfBoundsException { return gridCoverage.getOverviewGridGeometry(index); } @Override public GridCoverage getOverview(final int index) throws IndexOutOfBoundsException { return gridCoverage.getOverview(index); } @Override public CoordinateReferenceSystem getCoordinateReferenceSystem() { return gridCoverage.getCoordinateReferenceSystem(); } @Override public Envelope getEnvelope() { return gridCoverage.getEnvelope(); } @Override public List getSources() { return gridCoverage.getSources(); } @Override public RecordType getRangeType() { return gridCoverage.getRangeType(); } @Override public Set evaluate(final DirectPosition p, final Collection list) throws PointOutsideCoverageException, CannotEvaluateException { return gridCoverage.evaluate(p, list); } @Override public RenderedImage getRenderedImage() { return gridCoverage.getRenderedImage(); } @Override public Object evaluate(final DirectPosition point) throws PointOutsideCoverageException, CannotEvaluateException { return gridCoverage.evaluate(point); } @Override public boolean[] evaluate(final DirectPosition point, final boolean[] destination) throws PointOutsideCoverageException, CannotEvaluateException, ArrayIndexOutOfBoundsException { return gridCoverage.evaluate(point, destination); } @Override public byte[] evaluate(final DirectPosition point, final byte[] destination) throws PointOutsideCoverageException, CannotEvaluateException, ArrayIndexOutOfBoundsException { return gridCoverage.evaluate(point, destination); } @Override public int[] evaluate(final DirectPosition point, final int[] destination) throws PointOutsideCoverageException, CannotEvaluateException, ArrayIndexOutOfBoundsException { return gridCoverage.evaluate(point, destination); } @Override public float[] evaluate(final DirectPosition point, final float[] destination) throws PointOutsideCoverageException, CannotEvaluateException, ArrayIndexOutOfBoundsException { return gridCoverage.evaluate(point, destination); } @Override public double[] evaluate(final DirectPosition point, final double[] destination) throws PointOutsideCoverageException, CannotEvaluateException, ArrayIndexOutOfBoundsException { return gridCoverage.evaluate(point, destination); } @Override public int getNumSampleDimensions() { return gridCoverage.getNumSampleDimensions(); } @Override public SampleDimension getSampleDimension(final int index) throws IndexOutOfBoundsException { return gridCoverage.getSampleDimension(index); } @Override public RenderableImage getRenderableImage(final int xAxis, final int yAxis) throws UnsupportedOperationException, IndexOutOfBoundsException { return gridCoverage.getRenderableImage(xAxis, yAxis); } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/ImageWorkerPredefineStats.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster; import java.awt.RenderingHints; import java.awt.image.RenderedImage; import java.io.File; import java.io.IOException; import javax.media.jai.Histogram; import javax.media.jai.PlanarImage; import javax.media.jai.RenderedImageAdapter; import org.apache.commons.lang3.tuple.Pair; import org.geotools.image.ImageWorker; public class ImageWorkerPredefineStats extends ImageWorker { public ImageWorkerPredefineStats() { super(); // TODO Auto-generated constructor stub } public ImageWorkerPredefineStats(final File input) throws IOException { super(input); } public ImageWorkerPredefineStats(final RenderedImage image) { super(image); } public ImageWorkerPredefineStats(final RenderingHints hints) { super(hints); } public ImageWorkerPredefineStats setStats(final Pair[] nameValuePairs) { image = new RenderedImageAdapter(image); for (final Pair pair : nameValuePairs) { ((PlanarImage) (image)).setProperty(pair.getLeft(), pair.getRight()); } return this; } public ImageWorkerPredefineStats setHistogram(final Histogram histogram) { image = new RenderedImageAdapter(image); ((PlanarImage) (image)).setProperty("histogram", histogram); return this; } public ImageWorkerPredefineStats setExtrema(final double[][] extrema) { image = new RenderedImageAdapter(image); ((PlanarImage) (image)).setProperty("extrema", extrema); return this; } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/RasterAdapterPersistableRegistry.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster; import org.locationtech.geowave.adapter.raster.adapter.ClientMergeableRasterTile; import org.locationtech.geowave.adapter.raster.adapter.InternalRasterDataAdapter; import org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter; import org.locationtech.geowave.adapter.raster.adapter.RasterTile; import org.locationtech.geowave.adapter.raster.adapter.ServerMergeableRasterTile; import org.locationtech.geowave.adapter.raster.adapter.merge.MultiAdapterServerMergeStrategy; import org.locationtech.geowave.adapter.raster.adapter.merge.RasterTileRowTransform; import org.locationtech.geowave.adapter.raster.adapter.merge.SingleAdapterServerMergeStrategy; import org.locationtech.geowave.adapter.raster.adapter.merge.nodata.NoDataByFilter; import org.locationtech.geowave.adapter.raster.adapter.merge.nodata.NoDataBySampleIndex; import org.locationtech.geowave.adapter.raster.adapter.merge.nodata.NoDataMergeStrategy; import org.locationtech.geowave.adapter.raster.stats.HistogramConfig; import org.locationtech.geowave.core.index.persist.InternalPersistableRegistry; import org.locationtech.geowave.core.index.persist.PersistableRegistrySpi; import org.locationtech.geowave.core.store.util.CompoundHierarchicalIndexStrategyWrapper; public class RasterAdapterPersistableRegistry implements PersistableRegistrySpi, InternalPersistableRegistry { @Override public PersistableIdAndConstructor[] getSupportedPersistables() { return new PersistableIdAndConstructor[] { new PersistableIdAndConstructor((short) 600, Resolution::new), new PersistableIdAndConstructor((short) 601, CompoundHierarchicalIndexStrategyWrapper::new), new PersistableIdAndConstructor((short) 602, RasterDataAdapter::new), new PersistableIdAndConstructor((short) 603, RasterTile::new), new PersistableIdAndConstructor((short) 604, RasterTileRowTransform::new), new PersistableIdAndConstructor((short) 605, MultiAdapterServerMergeStrategy::new), new PersistableIdAndConstructor((short) 606, NoDataByFilter::new), new PersistableIdAndConstructor((short) 607, NoDataBySampleIndex::new), new PersistableIdAndConstructor((short) 608, NoDataMergeStrategy::new), new PersistableIdAndConstructor((short) 609, HistogramConfig::new), new PersistableIdAndConstructor((short) 614, ServerMergeableRasterTile::new), new PersistableIdAndConstructor((short) 615, SingleAdapterServerMergeStrategy::new), new PersistableIdAndConstructor((short) 616, ClientMergeableRasterTile::new), // 617 used by RasterRegisteredIndexFieldMappers new PersistableIdAndConstructor((short) 618, InternalRasterDataAdapter::new)}; } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/RasterUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster; import java.awt.Color; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.Image; import java.awt.Rectangle; import java.awt.RenderingHints; import java.awt.Transparency; import java.awt.geom.AffineTransform; import java.awt.geom.Point2D; import java.awt.geom.Rectangle2D; import java.awt.image.BufferedImage; import java.awt.image.ColorModel; import java.awt.image.ComponentColorModel; import java.awt.image.DataBuffer; import java.awt.image.IndexColorModel; import java.awt.image.Raster; import java.awt.image.RenderedImage; import java.awt.image.SampleModel; import java.awt.image.WritableRaster; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.Hashtable; import java.util.Iterator; import java.util.List; import javax.media.jai.BorderExtender; import javax.media.jai.Histogram; import javax.media.jai.Interpolation; import javax.media.jai.JAI; import javax.media.jai.PlanarImage; import javax.media.jai.RasterFactory; import javax.media.jai.RenderedImageAdapter; import javax.media.jai.RenderedOp; import javax.media.jai.TiledImage; import org.geotools.coverage.Category; import org.geotools.coverage.CoverageFactoryFinder; import org.geotools.coverage.GridSampleDimension; import org.geotools.coverage.TypeMap; import org.geotools.coverage.grid.GridCoverage2D; import org.geotools.coverage.grid.GridCoverageFactory; import org.geotools.coverage.processing.Operations; import org.geotools.geometry.DirectPosition2D; import org.geotools.geometry.Envelope2D; import org.geotools.geometry.GeneralEnvelope; import org.geotools.geometry.jts.JTSFactoryFinder; import org.geotools.geometry.jts.ReferencedEnvelope; import org.geotools.image.ImageWorker; import org.geotools.image.util.ImageUtilities; import org.geotools.metadata.i18n.ErrorKeys; import org.geotools.metadata.i18n.Errors; import org.geotools.referencing.CRS; import org.geotools.referencing.operation.BufferedCoordinateOperationFactory; import org.geotools.referencing.operation.builder.GridToEnvelopeMapper; import org.geotools.referencing.operation.matrix.MatrixFactory; import org.geotools.referencing.operation.transform.ProjectiveTransform; import org.geotools.util.NumberRange; import org.geotools.util.factory.Hints; import org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter; import org.locationtech.geowave.adapter.raster.adapter.merge.RasterTileMergeStrategy; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.index.FloatCompareUtils; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.Polygon; import org.locationtech.jts.simplify.DouglasPeuckerSimplifier; import org.opengis.coverage.SampleDimension; import org.opengis.coverage.SampleDimensionType; import org.opengis.coverage.grid.GridCoverage; import org.opengis.geometry.Envelope; import org.opengis.geometry.MismatchedDimensionException; import org.opengis.referencing.FactoryException; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.opengis.referencing.datum.PixelInCell; import org.opengis.referencing.operation.CoordinateOperationFactory; import org.opengis.referencing.operation.MathTransform; import org.opengis.referencing.operation.Matrix; import org.opengis.referencing.operation.TransformException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.ImmutableMap; import com.sun.media.imageioimpl.common.BogusColorSpace; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; public class RasterUtils { private static int MAX_FILL_SIZE = 4_194_304; private static int MAX_FILL_SIZE_WIDTH = 2048; private static int MAX_FILL_SIZE_HEIGHT = 2048; private static final RenderingHints DEFAULT_RENDERING_HINTS = new RenderingHints( new ImmutableMap.Builder().put( RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY).put( RenderingHints.KEY_ALPHA_INTERPOLATION, RenderingHints.VALUE_ALPHA_INTERPOLATION_QUALITY).put( RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON).put( RenderingHints.KEY_COLOR_RENDERING, RenderingHints.VALUE_COLOR_RENDER_QUALITY).put( RenderingHints.KEY_DITHERING, RenderingHints.VALUE_DITHER_ENABLE).put( JAI.KEY_BORDER_EXTENDER, BorderExtender.createInstance(BorderExtender.BORDER_COPY)).put( Hints.LENIENT_DATUM_SHIFT, Boolean.TRUE).build()); public static final CoordinateOperationFactory OPERATION_FACTORY = new BufferedCoordinateOperationFactory(new Hints(Hints.LENIENT_DATUM_SHIFT, Boolean.TRUE)); private static Operations resampleOperations; private static final Logger LOGGER = LoggerFactory.getLogger(RasterUtils.class); private static final int MIN_SEGMENTS = 5; private static final int MAX_SEGMENTS = 500; private static final int MAX_VERTICES_BEFORE_SIMPLIFICATION = 20; private static final double SIMPLIFICATION_MAX_DEGREES = 0.0001; public static Geometry getFootprint( final GridCoverage gridCoverage, final CoordinateReferenceSystem targetCrs) { return getFootprint(getReferenceEnvelope(gridCoverage, targetCrs), gridCoverage); } public static ReferencedEnvelope getReferenceEnvelope( final GridCoverage gridCoverage, final CoordinateReferenceSystem targetCrs) { final CoordinateReferenceSystem sourceCrs = gridCoverage.getCoordinateReferenceSystem(); final Envelope sampleEnvelope = gridCoverage.getEnvelope(); final ReferencedEnvelope sampleReferencedEnvelope = new ReferencedEnvelope( new org.locationtech.jts.geom.Envelope( sampleEnvelope.getMinimum(0), sampleEnvelope.getMaximum(0), sampleEnvelope.getMinimum(1), sampleEnvelope.getMaximum(1)), gridCoverage.getCoordinateReferenceSystem()); ReferencedEnvelope projectedReferenceEnvelope = sampleReferencedEnvelope; if ((targetCrs != null) && !targetCrs.equals(sourceCrs)) { try { projectedReferenceEnvelope = sampleReferencedEnvelope.transform(targetCrs, true); } catch (TransformException | FactoryException e) { LOGGER.warn("Unable to transform envelope of grid coverage to " + targetCrs.getName(), e); } } return projectedReferenceEnvelope; } public static Geometry getFootprint( final ReferencedEnvelope projectedReferenceEnvelope, final GridCoverage gridCoverage) { try { final Envelope sampleEnvelope = gridCoverage.getEnvelope(); final double avgSpan = (projectedReferenceEnvelope.getSpan(0) + projectedReferenceEnvelope.getSpan(1)) / 2; final MathTransform gridCrsToWorldCrs = CRS.findMathTransform( gridCoverage.getCoordinateReferenceSystem(), projectedReferenceEnvelope.getCoordinateReferenceSystem(), true); final Coordinate[] polyCoords = getWorldCoordinates( sampleEnvelope.getMinimum(0), sampleEnvelope.getMinimum(1), sampleEnvelope.getMaximum(0), sampleEnvelope.getMaximum(1), gridCrsToWorldCrs.isIdentity() ? 2 : (int) Math.min( Math.max((avgSpan * MIN_SEGMENTS) / SIMPLIFICATION_MAX_DEGREES, MIN_SEGMENTS), MAX_SEGMENTS), gridCrsToWorldCrs); final Polygon poly = new GeometryFactory().createPolygon(polyCoords); if (polyCoords.length > MAX_VERTICES_BEFORE_SIMPLIFICATION) { final Geometry retVal = DouglasPeuckerSimplifier.simplify(poly, SIMPLIFICATION_MAX_DEGREES); if (retVal.isEmpty()) { return poly; } return retVal; } else { return poly; } } catch (MismatchedDimensionException | TransformException | FactoryException e1) { LOGGER.warn("Unable to calculate grid coverage footprint", e1); } return null; } public static Geometry combineIntoOneGeometry( final Geometry geometry1, final Geometry geometry2) { if (geometry1 == null) { return geometry2; } else if (geometry2 == null) { return geometry1; } final List geometry = new ArrayList<>(); geometry.add(geometry1); geometry.add(geometry2); return DouglasPeuckerSimplifier.simplify( combineIntoOneGeometry(geometry), SIMPLIFICATION_MAX_DEGREES); } private static Geometry combineIntoOneGeometry(final Collection geometries) { final GeometryFactory factory = JTSFactoryFinder.getGeometryFactory(null); // note the following geometry collection may be invalid (say with // overlapping polygons) final Geometry geometryCollection = factory.buildGeometry(geometries); // try { return geometryCollection.union(); // } // catch (Exception e) { // LOGGER.warn("Error creating a union of this geometry collection", e); // return geometryCollection; // } } private static Coordinate[] getWorldCoordinates( final double minX, final double minY, final double maxX, final double maxY, final int numPointsPerSegment, final MathTransform gridToCRS) throws MismatchedDimensionException, TransformException { final Point2D[] gridCoordinates = getGridCoordinates(minX, minY, maxX, maxY, numPointsPerSegment); final Coordinate[] worldCoordinates = new Coordinate[gridCoordinates.length]; for (int i = 0; i < gridCoordinates.length; i++) { final DirectPosition2D worldPt = new DirectPosition2D(); final DirectPosition2D dp = new DirectPosition2D(gridCoordinates[i]); gridToCRS.transform(dp, worldPt); worldCoordinates[i] = new Coordinate(worldPt.getX(), worldPt.getY()); } return worldCoordinates; } private static Point2D[] getGridCoordinates( final double minX, final double minY, final double maxX, final double maxY, final int numPointsPerSegment) { final Point2D[] coordinates = new Point2D[((numPointsPerSegment - 1) * 4) + 1]; fillCoordinates( true, minX, minY, maxY, (maxY - minY) / (numPointsPerSegment - 1), 0, coordinates); fillCoordinates( false, maxY, minX, maxX, (maxX - minX) / (numPointsPerSegment - 1), numPointsPerSegment - 1, coordinates); fillCoordinates( true, maxX, maxY, minY, (maxY - minY) / (numPointsPerSegment - 1), (numPointsPerSegment - 1) * 2, coordinates); fillCoordinates( false, minY, maxX, minX, (maxX - minX) / (numPointsPerSegment - 1), (numPointsPerSegment - 1) * 3, coordinates); return coordinates; } private static void fillCoordinates( final boolean constantX, final double constant, final double start, final double stop, final double inc, final int coordinateArrayOffset, final Point2D[] coordinates) { int i = coordinateArrayOffset; if (constantX) { final double x = constant; if (stop < start) { for (double y = start; y >= stop; y -= inc) { coordinates[i++] = new Point2D.Double(x, y); } } else { for (double y = start; y <= stop; y += inc) { coordinates[i++] = new Point2D.Double(x, y); } } } else { final double y = constant; if (stop < start) { double x = start; while (x >= stop) { coordinates[i] = new Point2D.Double(x, y); i++; x = start - ((i - coordinateArrayOffset) * inc); } } else { for (double x = start; x <= stop; x += inc) { coordinates[i++] = new Point2D.Double(x, y); } } } } /** * Creates a math transform using the information provided. * * @return The math transform. * @throws IllegalStateException if the grid range or the envelope were not set. */ public static MathTransform createTransform( final double[] idRangePerDimension, final MultiDimensionalNumericData fullBounds) throws IllegalStateException { final GridToEnvelopeMapper mapper = new GridToEnvelopeMapper(); final boolean swapXY = mapper.getSwapXY(); final boolean[] reverse = mapper.getReverseAxis(); final PixelInCell gridType = PixelInCell.CELL_CORNER; final int dimension = 2; /* * Setup the multi-dimensional affine transform for use with OpenGIS. According OpenGIS * specification, transforms must map pixel center. This is done by adding 0.5 to grid * coordinates. */ final double translate; if (PixelInCell.CELL_CENTER.equals(gridType)) { translate = 0.5; } else if (PixelInCell.CELL_CORNER.equals(gridType)) { translate = 0.0; } else { throw new IllegalStateException( Errors.format(ErrorKeys.ILLEGAL_ARGUMENT_$2, "gridType", gridType)); } final Matrix matrix = MatrixFactory.create(dimension + 1); final Double[] minValuesPerDimension = fullBounds.getMinValuesPerDimension(); final Double[] maxValuesPerDimension = fullBounds.getMaxValuesPerDimension(); for (int i = 0; i < dimension; i++) { // NOTE: i is a dimension in the 'gridRange' space (source // coordinates). // j is a dimension in the 'userRange' space (target coordinates). int j = i; if (swapXY) { j = 1 - j; } double scale = idRangePerDimension[j]; double offset; if ((reverse == null) || (j >= reverse.length) || !reverse[j]) { offset = minValuesPerDimension[j]; } else { scale = -scale; offset = maxValuesPerDimension[j]; } offset -= scale * (-translate); matrix.setElement(j, j, 0.0); matrix.setElement(j, i, scale); matrix.setElement(j, dimension, offset); } return ProjectiveTransform.create(matrix); } /** * Returns the math transform as a two-dimensional affine transform. * * @return The math transform as a two-dimensional affine transform. * @throws IllegalStateException if the math transform is not of the appropriate type. */ public static AffineTransform createAffineTransform( final double[] idRangePerDimension, final MultiDimensionalNumericData fullBounds) throws IllegalStateException { final MathTransform transform = createTransform(idRangePerDimension, fullBounds); if (transform instanceof AffineTransform) { return (AffineTransform) transform; } throw new IllegalStateException(Errors.format(ErrorKeys.NOT_AN_AFFINE_TRANSFORM)); } public static void fillWithNoDataValues( final WritableRaster raster, final double[][] noDataValues) { if ((noDataValues != null) && (noDataValues.length >= raster.getNumBands())) { final int fillSize = raster.getWidth() * raster.getHeight(); final double[] noDataFilledArray; if (fillSize > MAX_FILL_SIZE) { noDataFilledArray = new double[MAX_FILL_SIZE]; } else { noDataFilledArray = new double[fillSize]; } for (int b = 0; b < raster.getNumBands(); b++) { if ((noDataValues[b] != null) && (noDataValues[b].length > 0)) { // just fill every sample in this band with the first no // data value for that band Arrays.fill(noDataFilledArray, noDataValues[b][0]); if (fillSize > MAX_FILL_SIZE) { final int maxX = (raster.getMinX() + raster.getWidth()); final int maxY = (raster.getMinY() + raster.getHeight()); for (int x = raster.getMinX(); x < maxX; x += MAX_FILL_SIZE_WIDTH) { for (int y = raster.getMinY(); y < maxY; y += MAX_FILL_SIZE_HEIGHT) { raster.setSamples( x, y, ((x + MAX_FILL_SIZE_WIDTH) > maxX) ? maxX - x : MAX_FILL_SIZE_WIDTH, ((y + MAX_FILL_SIZE_HEIGHT) > maxY) ? maxY - y : MAX_FILL_SIZE_HEIGHT, b, noDataFilledArray); } } } else { raster.setSamples( raster.getMinX(), raster.getMinY(), raster.getWidth(), raster.getHeight(), b, noDataFilledArray); } } } } } public static synchronized GridCoverage2D mosaicGridCoverages( final Iterator gridCoverages, final Color backgroundColor, final Color outputTransparentColor, final Rectangle pixelDimension, final GeneralEnvelope requestEnvelope, final double levelResX, final double levelResY, final double[][] noDataValues, final boolean xAxisSwitch, final GridCoverageFactory coverageFactory, final String coverageName, final Interpolation interpolation, final Histogram histogram, final boolean scaleTo8BitSet, final boolean scaleTo8Bit, final ColorModel defaultColorModel) { if (pixelDimension == null) { LOGGER.error("Pixel dimension can not be null"); throw new IllegalArgumentException("Pixel dimension can not be null"); } final double rescaleX = levelResX / (requestEnvelope.getSpan(0) / pixelDimension.getWidth()); final double rescaleY = levelResY / (requestEnvelope.getSpan(1) / pixelDimension.getHeight()); final double width = pixelDimension.getWidth() / rescaleX; final double height = pixelDimension.getHeight() / rescaleY; final int imageWidth = (int) Math.max(Math.round(width), 1); final int imageHeight = (int) Math.max(Math.round(height), 1); BufferedImage image = null; int numDimensions; SampleDimension[] sampleDimensions = null; double[][] extrema = null; boolean extremaValid = false; while (gridCoverages.hasNext()) { final GridCoverage currentCoverage = gridCoverages.next(); if (sampleDimensions == null) { numDimensions = currentCoverage.getNumSampleDimensions(); sampleDimensions = new SampleDimension[numDimensions]; extrema = new double[2][numDimensions]; extremaValid = true; for (int d = 0; d < numDimensions; d++) { sampleDimensions[d] = currentCoverage.getSampleDimension(d); extrema[0][d] = sampleDimensions[d].getMinimumValue(); extrema[1][d] = sampleDimensions[d].getMaximumValue(); if ((extrema[1][d] - extrema[0][d]) <= 0) { extremaValid = false; } } } final Envelope coverageEnv = currentCoverage.getEnvelope(); final RenderedImage coverageImage = currentCoverage.getRenderedImage(); if (image == null) { image = copyImage(imageWidth, imageHeight, backgroundColor, noDataValues, coverageImage); } final int posx = (int) ((coverageEnv.getMinimum(0) - requestEnvelope.getMinimum(0)) / levelResX); final int posy = (int) ((requestEnvelope.getMaximum(1) - coverageEnv.getMaximum(1)) / levelResY); image.getRaster().setRect(posx, posy, coverageImage.getData()); } if (image == null) { image = getEmptyImage( imageWidth, imageHeight, backgroundColor, null, // the transparent color // will be used later defaultColorModel); } GeneralEnvelope resultEnvelope = null; if (xAxisSwitch) { final Rectangle2D tmp = new Rectangle2D.Double( requestEnvelope.getMinimum(1), requestEnvelope.getMinimum(0), requestEnvelope.getSpan(1), requestEnvelope.getSpan(0)); resultEnvelope = new GeneralEnvelope(tmp); resultEnvelope.setCoordinateReferenceSystem(requestEnvelope.getCoordinateReferenceSystem()); } else { resultEnvelope = requestEnvelope; } final double scaleX = rescaleX * (width / imageWidth); final double scaleY = rescaleY * (height / imageHeight); if ((Math.abs(scaleX - 1) > FloatCompareUtils.COMP_EPSILON) || (Math.abs(scaleY - 1) > FloatCompareUtils.COMP_EPSILON)) { image = rescaleImageViaPlanarImage( interpolation, rescaleX * (width / imageWidth), rescaleY * (height / imageHeight), image); } RenderedImage result = image; // hypothetically masking the output transparent color should happen // before histogram stretching, but the masking seems to only work now // when the image is bytes in each band which requires some amount of // modification to the original data, we'll use extrema if (extremaValid && scaleTo8Bit) { final int dataType = result.getData().getDataBuffer().getDataType(); switch (dataType) { // in case the original image has a USHORT pixel type without // being associated // with an index color model I would still go to 8 bits case DataBuffer.TYPE_USHORT: if (result.getColorModel() instanceof IndexColorModel) { break; } case DataBuffer.TYPE_DOUBLE: case DataBuffer.TYPE_FLOAT: if (!scaleTo8BitSet && (dataType != DataBuffer.TYPE_USHORT)) { break; } case DataBuffer.TYPE_INT: case DataBuffer.TYPE_SHORT: // rescale to byte final ImageWorkerPredefineStats w = new ImageWorkerPredefineStats(result); // it was found that geoserver will perform this, and worse // perform it on local extrema calculated from a single // tile, this is our one opportunity to at least ensure this // transformation is done without too much harm by using // global extrema result = w.setExtrema(extrema).rescaleToBytes().getRenderedImage(); break; default: // findbugs seems to want to have a default case, default is // to do nothing break; } } if (outputTransparentColor != null) { result = ImageUtilities.maskColor(outputTransparentColor, result); } if (histogram != null) { // we should perform histogram equalization final int numBands = histogram.getNumBands(); final float[][] cdFeq = new float[numBands][]; final double[][] computedExtrema = new double[2][numBands]; for (int b = 0; b < numBands; b++) { computedExtrema[0][b] = histogram.getLowValue(b); computedExtrema[1][b] = histogram.getHighValue(b); final int numBins = histogram.getNumBins()[b]; cdFeq[b] = new float[numBins]; for (int i = 0; i < numBins; i++) { cdFeq[b][i] = (float) (i + 1) / (float) (numBins); } } final RenderedImageAdapter adaptedResult = new RenderedImageAdapter(result); adaptedResult.setProperty("histogram", histogram); adaptedResult.setProperty("extrema", computedExtrema); result = JAI.create("matchcdf", adaptedResult, cdFeq); } return coverageFactory.create(coverageName, result, resultEnvelope); } private static long i = 0; @SuppressFBWarnings( value = {"RV_RETURN_VALUE_IGNORED_NO_SIDE_EFFECT"}, justification = "incorrect; drawImage has side effects") public static BufferedImage toBufferedImage(final Image image, final int type) { final BufferedImage bi = new BufferedImage(image.getWidth(null), image.getHeight(null), type); final Graphics g = bi.getGraphics(); g.drawImage(image, 0, 0, null); g.dispose(); return bi; } private static BufferedImage copyImage( final int targetWidth, final int targetHeight, final Color backgroundColor, final double[][] noDataValues, final RenderedImage originalImage) { Hashtable properties = null; if (originalImage.getPropertyNames() != null) { properties = new Hashtable<>(); for (final String name : originalImage.getPropertyNames()) { properties.put(name, originalImage.getProperty(name)); } } final SampleModel sm = originalImage.getSampleModel().createCompatibleSampleModel(targetWidth, targetHeight); final WritableRaster raster = Raster.createWritableRaster(sm, null); final ColorModel colorModel = originalImage.getColorModel(); final boolean alphaPremultiplied = colorModel.isAlphaPremultiplied(); RasterUtils.fillWithNoDataValues(raster, noDataValues); final BufferedImage image = new BufferedImage(colorModel, raster, alphaPremultiplied, properties); if (noDataValues == null) { final Graphics2D g2D = (Graphics2D) image.getGraphics(); final Color save = g2D.getColor(); g2D.setColor(backgroundColor); g2D.fillRect(0, 0, image.getWidth(), image.getHeight()); g2D.setColor(save); } return image; } private static BufferedImage rescaleImageViaPlanarImage( final Interpolation interpolation, final double rescaleX, final double rescaleY, final BufferedImage image) { final PlanarImage planarImage = new TiledImage(image, image.getWidth(), image.getHeight()); final ImageWorker w = new ImageWorker(planarImage); w.scale((float) rescaleX, (float) rescaleY, 0.0f, 0.0f, interpolation); final RenderedOp result = w.getRenderedOperation(); final Raster raster = result.getData(); final WritableRaster scaledImageRaster; if (raster instanceof WritableRaster) { scaledImageRaster = (WritableRaster) raster; } else { scaledImageRaster = raster.createCompatibleWritableRaster(); scaledImageRaster.setDataElements(0, 0, raster); } final ColorModel colorModel = image.getColorModel(); try { final BufferedImage scaledImage = new BufferedImage(colorModel, scaledImageRaster, image.isAlphaPremultiplied(), null); return scaledImage; } catch (final IllegalArgumentException e) { LOGGER.warn("Unable to rescale image", e); return image; } } public static void forceRenderingHints(final RenderingHints renderingHints) { resampleOperations = new Operations(renderingHints); } public static synchronized Operations getCoverageOperations() { if (resampleOperations == null) { resampleOperations = new Operations(DEFAULT_RENDERING_HINTS); } return resampleOperations; } public static BufferedImage getEmptyImage( final int width, final int height, final Color backgroundColor, final Color outputTransparentColor, final ColorModel defaultColorModel) { BufferedImage emptyImage = new BufferedImage( defaultColorModel, defaultColorModel.createCompatibleWritableRaster(width, height), defaultColorModel.isAlphaPremultiplied(), null); final Graphics2D g2D = (Graphics2D) emptyImage.getGraphics(); final Color save = g2D.getColor(); g2D.setColor(backgroundColor); g2D.fillRect(0, 0, emptyImage.getWidth(), emptyImage.getHeight()); g2D.setColor(save); if (outputTransparentColor != null) { emptyImage = new RenderedImageAdapter( ImageUtilities.maskColor(outputTransparentColor, emptyImage)).getAsBufferedImage(); } return emptyImage; } public static WritableRaster createRasterTypeDouble(final int numBands, final int tileSize) { final WritableRaster raster = RasterFactory.createBandedRaster( DataBuffer.TYPE_DOUBLE, tileSize, tileSize, numBands, null); final double[] defaultValues = new double[tileSize * tileSize * numBands]; Arrays.fill(defaultValues, Double.NaN); raster.setDataElements(0, 0, tileSize, tileSize, defaultValues); return raster; } public static RasterDataAdapter createDataAdapterTypeDouble( final String coverageName, final int numBands, final int tileSize) { return createDataAdapterTypeDouble(coverageName, numBands, tileSize, null); } public static RasterDataAdapter createDataAdapterTypeDouble( final String coverageName, final int numBands, final int tileSize, final RasterTileMergeStrategy mergeStrategy) { return createDataAdapterTypeDouble( coverageName, numBands, tileSize, null, null, null, mergeStrategy); } public static RasterDataAdapter createDataAdapterTypeDouble( final String coverageName, final int numBands, final int tileSize, final double[] minsPerBand, final double[] maxesPerBand, final String[] namesPerBand, final RasterTileMergeStrategy mergeStrategy) { final double[][] noDataValuesPerBand = new double[numBands][]; final double[] backgroundValuesPerBand = new double[numBands]; final int[] bitsPerSample = new int[numBands]; for (int i = 0; i < numBands; i++) { noDataValuesPerBand[i] = new double[] {Double.valueOf(Double.NaN)}; backgroundValuesPerBand[i] = Double.valueOf(Double.NaN); bitsPerSample[i] = DataBuffer.getDataTypeSize(DataBuffer.TYPE_DOUBLE); } final SampleModel sampleModel = createRasterTypeDouble(numBands, tileSize).getSampleModel(); return new RasterDataAdapter( coverageName, sampleModel, new ComponentColorModel( new BogusColorSpace(numBands), bitsPerSample, false, false, Transparency.OPAQUE, DataBuffer.TYPE_DOUBLE), new HashMap(), tileSize, minsPerBand, maxesPerBand, namesPerBand, noDataValuesPerBand, backgroundValuesPerBand, null, false, Interpolation.INTERP_NEAREST, false, mergeStrategy); } public static GridCoverage2D createCoverageTypeDouble( final String coverageName, final double westLon, final double eastLon, final double southLat, final double northLat, final WritableRaster raster) { final GridCoverageFactory gcf = CoverageFactoryFinder.getGridCoverageFactory(null); Envelope mapExtent; try { mapExtent = new ReferencedEnvelope( westLon, eastLon, southLat, northLat, GeometryUtils.getDefaultCRS()); } catch (final IllegalArgumentException e) { LOGGER.warn("Unable to use default CRS", e); mapExtent = new Envelope2D( new DirectPosition2D(westLon, southLat), new DirectPosition2D(eastLon, northLat)); } return gcf.create(coverageName, raster, mapExtent); } public static GridCoverage2D createCoverageTypeDouble( final String coverageName, final double westLon, final double eastLon, final double southLat, final double northLat, final double[] minPerBand, final double[] maxPerBand, final String[] namePerBand, final WritableRaster raster) { return createCoverageTypeDouble( coverageName, westLon, eastLon, southLat, northLat, minPerBand, maxPerBand, namePerBand, raster, GeometryUtils.DEFAULT_CRS_STR); } public static GridCoverage2D createCoverageTypeDouble( final String coverageName, final double westLon, final double eastLon, final double southLat, final double northLat, final double[] minPerBand, final double[] maxPerBand, final String[] namePerBand, final WritableRaster raster, final String crsCode) { final GridCoverageFactory gcf = CoverageFactoryFinder.getGridCoverageFactory(null); Envelope mapExtent; CoordinateReferenceSystem crs = null; if ((crsCode == null) || crsCode.isEmpty() || crsCode.equals(GeometryUtils.DEFAULT_CRS_STR)) { crs = GeometryUtils.getDefaultCRS(); } else { try { crs = CRS.decode(crsCode); } catch (final FactoryException e) { LOGGER.error("Unable to decode " + crsCode + " CRS", e); throw new RuntimeException("Unable to initialize " + crsCode + " object", e); } } try { mapExtent = new ReferencedEnvelope(westLon, eastLon, southLat, northLat, crs); } catch (final IllegalArgumentException e) { LOGGER.warn("Unable to use default CRS", e); mapExtent = new Envelope2D( new DirectPosition2D(westLon, southLat), new DirectPosition2D(eastLon, northLat)); } final GridSampleDimension[] bands = new GridSampleDimension[raster.getNumBands()]; create(namePerBand, raster.getSampleModel(), minPerBand, maxPerBand, bands); return gcf.create(coverageName, raster, mapExtent, bands); } /** * NOTE: This is a small bit of functionality "inspired by" * org.geotools.coverage.grid.RenderedSampleDimension ie. some of the code has been * modified/simplified from the original version, but it had private visibility and could not be * re-used as is Creates a set of sample dimensions for the data backing the given iterator. * Particularly, it was desirable to be able to provide the name per band which was not provided * in the original. * * @param name The name for each band of the data (e.g. "Elevation"). * @param model The image or raster sample model. * @param min The minimal value, or {@code null} for computing it automatically. * @param max The maximal value, or {@code null} for computing it automatically. * @param dst The array where to store sample dimensions. The array length must matches the number * of bands. */ private static void create( final CharSequence[] name, final SampleModel model, final double[] min, final double[] max, final GridSampleDimension[] dst) { final int numBands = dst.length; if ((min != null) && (min.length != numBands)) { throw new IllegalArgumentException( Errors.format(ErrorKeys.NUMBER_OF_BANDS_MISMATCH_$3, numBands, min.length, "min[i]")); } if ((name != null) && (name.length != numBands)) { throw new IllegalArgumentException( Errors.format(ErrorKeys.NUMBER_OF_BANDS_MISMATCH_$3, numBands, name.length, "name[i]")); } if ((max != null) && (max.length != numBands)) { throw new IllegalArgumentException( Errors.format(ErrorKeys.NUMBER_OF_BANDS_MISMATCH_$3, numBands, max.length, "max[i]")); } /* * Arguments are know to be valids. We now need to compute two ranges: * * STEP 1: Range of target (sample) values. This is computed in the following block. STEP 2: * Range of source (geophysics) values. It will be computed one block later. * * The target (sample) values will typically range from 0 to 255 or 0 to 65535, but the general * case is handled as well. If the source (geophysics) raster uses floating point numbers, then * a "nodata" category may be added in order to handle NaN values. If the source raster use * integer numbers instead, then we will rescale samples only if they would not fit in the * target data type. */ final SampleDimensionType sourceType = TypeMap.getSampleDimensionType(model, 0); final boolean sourceIsFloat = TypeMap.isFloatingPoint(sourceType); // Default to TYPE_BYTE for floating point images only; otherwise // keep unchanged. final SampleDimensionType targetType = sourceIsFloat ? SampleDimensionType.UNSIGNED_8BITS : sourceType; // Default setting: no scaling final boolean targetIsFloat = TypeMap.isFloatingPoint(targetType); NumberRange targetRange = TypeMap.getRange(targetType); Category[] categories = new Category[1]; final boolean needScaling; if (targetIsFloat) { // Never rescale if the target is floating point numbers. needScaling = false; } else if (sourceIsFloat) { // Always rescale for "float to integer" conversions. In addition, // Use 0 value as a "no data" category for unsigned data type only. needScaling = true; if (!TypeMap.isSigned(targetType)) { categories = new Category[2]; categories[1] = Category.NODATA; targetRange = TypeMap.getPositiveRange(targetType); } } else { // In "integer to integer" conversions, rescale only if // the target range is smaller than the source range. needScaling = !targetRange.contains(TypeMap.getRange(sourceType)); } /* * Now, constructs the sample dimensions. We will inconditionnaly provides a "nodata" category * for floating point images targeting unsigned integers, since we don't know if the user plan * to have NaN values. Even if the current image doesn't have NaN values, it could have NaN * later if the image uses a writable raster. */ for (int b = 0; b < numBands; b++) { // if (needScaling) { // sourceRange = NumberRange.create( // min[b], // max[b]).castTo( // sourceRange.getElementClass()); // categories[0] = new Category( // name[b], // null, // targetRange, // sourceRange); // } // else { // categories[0] = new Category( // name[b], // null, // targetRange, // LinearTransform1D.IDENTITY); // } // dst[b] = new GridSampleDimension( // name[b], // categories, // null); categories[0] = new Category(name[b], (Color) null, targetRange); dst[b] = new GridSampleDimension(name[b], categories, null); } } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/Resolution.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster; import java.nio.ByteBuffer; import java.util.Arrays; import org.locationtech.geowave.core.index.persist.Persistable; public class Resolution implements Comparable, Persistable { private double[] resolutionPerDimension; protected Resolution() {} public Resolution(final double[] resolutionPerDimension) { this.resolutionPerDimension = resolutionPerDimension; } public int getDimensions() { return resolutionPerDimension.length; } public double getResolution(final int dimension) { return resolutionPerDimension[dimension]; } public double[] getResolutionPerDimension() { return resolutionPerDimension; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + Arrays.hashCode(resolutionPerDimension); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final Resolution other = (Resolution) obj; return Arrays.equals(resolutionPerDimension, other.resolutionPerDimension); } @Override public int compareTo(final Resolution o) { double resSum = 0; double otherResSum = 0; for (final double res : resolutionPerDimension) { resSum += res; } for (final double res : o.resolutionPerDimension) { otherResSum += res; } return Double.compare(resSum, otherResSum); } @Override public byte[] toBinary() { final ByteBuffer buf = ByteBuffer.allocate(resolutionPerDimension.length * 8); for (final double val : resolutionPerDimension) { buf.putDouble(val); } return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int length = bytes.length / 8; resolutionPerDimension = new double[length]; for (int i = 0; i < length; i++) { resolutionPerDimension[i] = buf.getDouble(); } } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/ClientMergeableRasterTile.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.adapter; import java.awt.image.DataBuffer; import java.awt.image.SampleModel; import org.locationtech.geowave.adapter.raster.adapter.merge.RasterTileMergeStrategy; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.index.persist.Persistable; public class ClientMergeableRasterTile extends RasterTile { private RasterTileMergeStrategy mergeStrategy; private SampleModel sampleModel; public ClientMergeableRasterTile() {} public ClientMergeableRasterTile( final RasterTileMergeStrategy mergeStrategy, final SampleModel sampleModel, final DataBuffer dataBuffer, final T metadata) { super(dataBuffer, metadata); this.mergeStrategy = mergeStrategy; this.sampleModel = sampleModel; } @Override public void merge(final Mergeable merge) { if ((mergeStrategy != null) && (merge != null) && (merge instanceof RasterTile)) { mergeStrategy.merge(this, (RasterTile) merge, sampleModel); } else { super.merge(merge); } } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/GridCoverageWritable.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.adapter; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import org.apache.hadoop.io.Writable; import org.geotools.referencing.CRS; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.index.StringUtils; import org.opengis.referencing.FactoryException; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.clearspring.analytics.util.Varint; /** * This class is used by GridCoverageDataAdapter to persist GridCoverages. The adapter has * information regarding the sample model and color model so all that is necessary to persist is the * buffer and the envelope. */ public class GridCoverageWritable implements Writable { private static final Logger LOGGER = LoggerFactory.getLogger(GridCoverageWritable.class); private RasterTile rasterTile; private double minX; private double maxX; private double minY; private double maxY; private CoordinateReferenceSystem crs; public GridCoverageWritable() {} public GridCoverageWritable( final RasterTile rasterTile, final double minX, final double maxX, final double minY, final double maxY, final CoordinateReferenceSystem crs) { this.rasterTile = rasterTile; this.minX = minX; this.maxX = maxX; this.minY = minY; this.maxY = maxY; this.crs = crs; } public CoordinateReferenceSystem getCrs() { return crs; } public RasterTile getRasterTile() { return rasterTile; } public double getMinX() { return minX; } public double getMaxX() { return maxX; } public double getMinY() { return minY; } public double getMaxY() { return maxY; } @Override public void readFields(final DataInput input) throws IOException { final int rasterTileSize = Varint.readUnsignedVarInt(input); final byte[] rasterTileBinary = new byte[rasterTileSize]; input.readFully(rasterTileBinary); rasterTile = new RasterTile(); rasterTile.fromBinary(rasterTileBinary); minX = input.readDouble(); maxX = input.readDouble(); minY = input.readDouble(); maxY = input.readDouble(); final int crsStrSize = Varint.readUnsignedVarInt(input); if (crsStrSize > 0) { final byte[] crsStrBytes = new byte[crsStrSize]; input.readFully(crsStrBytes); final String crsStr = StringUtils.stringFromBinary(crsStrBytes); try { crs = CRS.decode(crsStr); } catch (final FactoryException e) { LOGGER.error("Unable to decode " + crsStr + " CRS", e); throw new RuntimeException("Unable to decode " + crsStr + " CRS", e); } } else { crs = GeometryUtils.getDefaultCRS(); } } @Override public void write(final DataOutput output) throws IOException { final byte[] rasterTileBinary = rasterTile.toBinary(); Varint.writeUnsignedVarInt(rasterTileBinary.length, output); output.write(rasterTileBinary); output.writeDouble(minX); output.writeDouble(maxX); output.writeDouble(minY); output.writeDouble(maxY); final String crsStr = (crs == null) || GeometryUtils.getDefaultCRS().equals(crs) ? "" : CRS.toSRS(crs); Varint.writeUnsignedVarInt(crsStr.length(), output); output.write(StringUtils.stringToBinary(crsStr)); } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/InternalRasterDataAdapter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.adapter; import org.locationtech.geowave.adapter.raster.FitToIndexGridCoverage; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.adapter.AdapterPersistenceEncoding; import org.locationtech.geowave.core.store.adapter.FitToIndexPersistenceEncoding; import org.locationtech.geowave.core.store.adapter.IndexedAdapterPersistenceEncoding; import org.locationtech.geowave.core.store.adapter.InternalDataAdapterImpl; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.VisibilityHandler; import org.locationtech.geowave.core.store.data.MultiFieldPersistentDataset; import org.locationtech.geowave.core.store.data.PersistentDataset; import org.locationtech.geowave.core.store.data.SingleFieldPersistentDataset; import org.locationtech.geowave.core.store.dimension.NumericDimensionField; import org.locationtech.geowave.core.store.index.CommonIndexModel; import org.opengis.coverage.grid.GridCoverage; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class InternalRasterDataAdapter extends InternalDataAdapterImpl { private static final Logger LOGGER = LoggerFactory.getLogger(InternalRasterDataAdapter.class); public InternalRasterDataAdapter() {} public InternalRasterDataAdapter(final RasterDataAdapter adapter, final short adapterId) { super(adapter, adapterId); } public InternalRasterDataAdapter( final RasterDataAdapter adapter, final short adapterId, final VisibilityHandler visibilityHandler) { super(adapter, adapterId, visibilityHandler); } @Override public GridCoverage decode( final IndexedAdapterPersistenceEncoding data, final AdapterToIndexMapping indexMapping, final Index index) { final Object rasterTile = data.getAdapterExtendedData().getValue(RasterDataAdapter.DATA_FIELD_ID); if ((rasterTile == null) || !(rasterTile instanceof RasterTile)) { return null; } return ((RasterDataAdapter) adapter).getCoverageFromRasterTile( (RasterTile) rasterTile, data.getInsertionPartitionKey(), data.getInsertionSortKey(), index); } @Override public AdapterPersistenceEncoding encode( final GridCoverage entry, final AdapterToIndexMapping indexMapping, final Index index) { final PersistentDataset adapterExtendedData = new SingleFieldPersistentDataset<>(); adapterExtendedData.addValue( RasterDataAdapter.DATA_FIELD_ID, ((RasterDataAdapter) adapter).getRasterTileFromCoverage(entry)); final AdapterPersistenceEncoding encoding; if (entry instanceof FitToIndexGridCoverage) { encoding = new FitToIndexPersistenceEncoding( getAdapterId(), new byte[0], new MultiFieldPersistentDataset<>(), adapterExtendedData, ((FitToIndexGridCoverage) entry).getPartitionKey(), ((FitToIndexGridCoverage) entry).getSortKey()); } else { // this shouldn't happen LOGGER.warn("Grid coverage is not fit to the index"); encoding = new AdapterPersistenceEncoding( getAdapterId(), new byte[0], new MultiFieldPersistentDataset<>(), adapterExtendedData); } return encoding; } @Override public boolean isCommonIndexField( final AdapterToIndexMapping indexMapping, final String fieldName) { return false; } @Override public int getPositionOfOrderedField(final CommonIndexModel model, final String fieldName) { int i = 0; for (final NumericDimensionField dimensionField : model.getDimensions()) { if (fieldName.equals(dimensionField.getFieldName())) { return i; } i++; } if (fieldName.equals(RasterDataAdapter.DATA_FIELD_ID)) { return i; } return -1; } @Override public String getFieldNameForPosition(final CommonIndexModel model, final int position) { if (position < model.getDimensions().length) { int i = 0; for (final NumericDimensionField dimensionField : model.getDimensions()) { if (i == position) { return dimensionField.getFieldName(); } i++; } } else { final int numDimensions = model.getDimensions().length; if (position == numDimensions) { return RasterDataAdapter.DATA_FIELD_ID; } } return null; } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/MosaicPropertyGenerator.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.adapter; import java.awt.image.RenderedImage; import java.awt.image.renderable.ParameterBlock; import javax.media.jai.RenderedOp; import org.geotools.coverage.util.CoverageUtilities; import com.sun.media.jai.util.PropertyGeneratorImpl; public class MosaicPropertyGenerator extends PropertyGeneratorImpl { /** */ private static final long serialVersionUID = 1L; public MosaicPropertyGenerator() { super( new String[] {"sourceThreshold"}, new Class[] {double[][].class}, new Class[] {RenderedOp.class}); } @Override public Object getProperty(final String name, final Object opNode) { validate(name, opNode); if ((opNode instanceof RenderedOp) && name.equalsIgnoreCase("sourceThreshold")) { final RenderedOp op = (RenderedOp) opNode; final ParameterBlock pb = op.getParameterBlock(); // Retrieve the rendered source image and its ROI. final RenderedImage src = pb.getRenderedSource(0); final Object property = src.getProperty("sourceThreshold"); if (property != null) { return property; } // Getting the Threshold to use final double threshold = CoverageUtilities.getMosaicThreshold(src.getSampleModel().getDataType()); // Setting the Threshold object for the mosaic return new double[][] {{threshold}}; } return java.awt.Image.UndefinedProperty; } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/RasterDataAdapter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.adapter; import java.awt.Color; import java.awt.Point; import java.awt.Rectangle; import java.awt.geom.AffineTransform; import java.awt.geom.NoninvertibleTransformException; import java.awt.image.BufferedImage; import java.awt.image.ColorModel; import java.awt.image.DataBuffer; import java.awt.image.Raster; import java.awt.image.RenderedImage; import java.awt.image.SampleModel; import java.awt.image.WritableRaster; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.NavigableMap; import java.util.Set; import java.util.TreeMap; import javax.measure.Unit; import javax.media.jai.Interpolation; import javax.media.jai.InterpolationBicubic2; import javax.media.jai.InterpolationBilinear; import javax.media.jai.InterpolationNearest; import javax.media.jai.PlanarImage; import javax.media.jai.remote.SerializableState; import javax.media.jai.remote.SerializerFactory; import org.apache.commons.lang3.tuple.Pair; import org.apache.commons.math.util.MathUtils; import org.geotools.coverage.Category; import org.geotools.coverage.CoverageFactoryFinder; import org.geotools.coverage.GridSampleDimension; import org.geotools.coverage.TypeMap; import org.geotools.coverage.grid.GridCoverage2D; import org.geotools.coverage.grid.GridCoverageFactory; import org.geotools.coverage.grid.GridEnvelope2D; import org.geotools.coverage.grid.GridGeometry2D; import org.geotools.coverage.processing.Operations; import org.geotools.coverage.util.CoverageUtilities; import org.geotools.geometry.GeneralEnvelope; import org.geotools.geometry.jts.GeometryClipper; import org.geotools.geometry.jts.JTS; import org.geotools.geometry.jts.ReferencedEnvelope; import org.geotools.metadata.i18n.Vocabulary; import org.geotools.metadata.i18n.VocabularyKeys; import org.geotools.referencing.operation.projection.MapProjection; import org.geotools.referencing.operation.transform.AffineTransform2D; import org.geotools.renderer.lite.RendererUtilities; import org.geotools.util.NumberRange; import org.geotools.util.SimpleInternationalString; import org.locationtech.geowave.adapter.raster.FitToIndexGridCoverage; import org.locationtech.geowave.adapter.raster.RasterUtils; import org.locationtech.geowave.adapter.raster.Resolution; import org.locationtech.geowave.adapter.raster.adapter.merge.MultiAdapterServerMergeStrategy; import org.locationtech.geowave.adapter.raster.adapter.merge.RasterTileMergeStrategy; import org.locationtech.geowave.adapter.raster.adapter.merge.RasterTileRowTransform; import org.locationtech.geowave.adapter.raster.adapter.merge.SingleAdapterServerMergeStrategy; import org.locationtech.geowave.adapter.raster.adapter.merge.nodata.NoDataMergeStrategy; import org.locationtech.geowave.adapter.raster.adapter.warp.WarpRIF; import org.locationtech.geowave.adapter.raster.stats.HistogramConfig; import org.locationtech.geowave.adapter.raster.stats.RasterBoundingBoxStatistic; import org.locationtech.geowave.adapter.raster.stats.RasterHistogramStatistic; import org.locationtech.geowave.adapter.raster.stats.RasterOverviewStatistic; import org.locationtech.geowave.adapter.raster.util.SampleModelPersistenceUtils; import org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition; import org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition; import org.locationtech.geowave.core.geotime.store.dimension.CustomCRSSpatialDimension; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.CompoundIndexStrategy; import org.locationtech.geowave.core.index.HierarchicalNumericIndexStrategy; import org.locationtech.geowave.core.index.HierarchicalNumericIndexStrategy.SubStrategy; import org.locationtech.geowave.core.index.IndexUtils; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.numeric.BasicNumericDataset; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.adapter.FieldDescriptorBuilder; import org.locationtech.geowave.core.store.adapter.IndexDependentDataAdapter; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.RowBuilder; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.api.VisibilityHandler; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldWriter; import org.locationtech.geowave.core.store.statistics.DefaultStatisticsProvider; import org.locationtech.geowave.core.store.util.CompoundHierarchicalIndexStrategyWrapper; import org.locationtech.geowave.core.store.util.IteratorWrapper; import org.locationtech.geowave.core.store.util.IteratorWrapper.Converter; import org.locationtech.geowave.mapreduce.HadoopDataAdapter; import org.locationtech.geowave.mapreduce.HadoopWritableSerializer; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.PrecisionModel; import org.opengis.coverage.ColorInterpretation; import org.opengis.coverage.SampleDimension; import org.opengis.coverage.SampleDimensionType; import org.opengis.coverage.grid.GridCoverage; import org.opengis.coverage.grid.GridEnvelope; import org.opengis.geometry.Envelope; import org.opengis.referencing.FactoryException; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.opengis.referencing.datum.PixelInCell; import org.opengis.referencing.operation.TransformException; import org.opengis.util.InternationalString; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.internal.Lists; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; public class RasterDataAdapter implements IndexDependentDataAdapter, HadoopDataAdapter, RowMergingDataAdapter>, DefaultStatisticsProvider { // Moved static initialization to constructor (staticInit) public static final String TILE_METADATA_PROPERTY_KEY = "TILE_METADATA"; private static boolean classInit = false; private static Object CLASS_INIT_MUTEX = new Object(); private static final Logger LOGGER = LoggerFactory.getLogger(RasterDataAdapter.class); protected static final String DATA_FIELD_ID = "image"; public static final int DEFAULT_TILE_SIZE = 256; public static final boolean DEFAULT_BUILD_PYRAMID = false; public static final boolean DEFAULT_BUILD_HISTOGRAM = true; private static final FieldDescriptor IMAGE_FIELD = new FieldDescriptorBuilder<>(RasterTile.class).fieldName(DATA_FIELD_ID).build(); private static final FieldDescriptor[] FIELDS = new FieldDescriptor[] {IMAGE_FIELD}; /** A transparent color for missing data. */ private static final Color TRANSPARENT = new Color(0, 0, 0, 0); private String coverageName; protected int tileSize; private SampleModel sampleModel; private ColorModel colorModel; private Map metadata; private HistogramConfig histogramConfig; private double[][] noDataValuesPerBand; private double[] minsPerBand; private double[] maxesPerBand; private String[] namesPerBand; private double[] backgroundValuesPerBand; private boolean buildPyramid; private RasterTileMergeStrategy mergeStrategy; private boolean equalizeHistogram; private Interpolation interpolation; public RasterDataAdapter() {} public RasterDataAdapter( final String coverageName, final Map metadata, final GridCoverage2D originalGridCoverage) { this( coverageName, metadata, originalGridCoverage, DEFAULT_TILE_SIZE, DEFAULT_BUILD_PYRAMID, DEFAULT_BUILD_HISTOGRAM, new double[originalGridCoverage.getNumSampleDimensions()][], new NoDataMergeStrategy()); } public RasterDataAdapter( final String coverageName, final Map metadata, final GridCoverage2D originalGridCoverage, final int tileSize, final boolean buildPyramid) { this( coverageName, metadata, originalGridCoverage, tileSize, buildPyramid, DEFAULT_BUILD_HISTOGRAM, new double[originalGridCoverage.getNumSampleDimensions()][], new NoDataMergeStrategy()); } public RasterDataAdapter( final String coverageName, final Map metadata, final GridCoverage2D originalGridCoverage, final int tileSize, final boolean buildPyramid, final boolean buildHistogram, final double[][] noDataValuesPerBand) { this( coverageName, metadata, originalGridCoverage, tileSize, buildPyramid, buildHistogram, noDataValuesPerBand, new NoDataMergeStrategy()); } public RasterDataAdapter( final String coverageName, final Map metadata, final GridCoverage2D originalGridCoverage, final int tileSize, final boolean buildPyramid, final boolean buildHistogram, final double[][] noDataValuesPerBand, final RasterTileMergeStrategy mergeStrategy) { staticInit(); final RenderedImage img = originalGridCoverage.getRenderedImage(); final SampleModel imgSampleModel = img.getSampleModel(); if ((imgSampleModel.getWidth() != tileSize) || (imgSampleModel.getHeight() != tileSize)) { sampleModel = imgSampleModel.createCompatibleSampleModel(tileSize, tileSize); } else { sampleModel = imgSampleModel; } colorModel = img.getColorModel(); this.metadata = metadata; this.coverageName = coverageName; this.tileSize = tileSize; if (buildHistogram) { histogramConfig = new HistogramConfig(sampleModel); } else { histogramConfig = null; } if ((noDataValuesPerBand != null) && (noDataValuesPerBand.length != 0)) { this.noDataValuesPerBand = noDataValuesPerBand; backgroundValuesPerBand = new double[noDataValuesPerBand.length]; for (int d = 0; d < this.noDataValuesPerBand.length; d++) { if ((noDataValuesPerBand[d] != null) && (noDataValuesPerBand[d].length > 0)) { backgroundValuesPerBand[d] = noDataValuesPerBand[d][0]; } else { backgroundValuesPerBand[d] = 0.0; } } } else { this.noDataValuesPerBand = new double[originalGridCoverage.getNumSampleDimensions()][]; for (int d = 0; d < this.noDataValuesPerBand.length; d++) { this.noDataValuesPerBand[d] = originalGridCoverage.getSampleDimension(d).getNoDataValues(); } backgroundValuesPerBand = CoverageUtilities.getBackgroundValues(originalGridCoverage); } this.buildPyramid = buildPyramid; this.mergeStrategy = mergeStrategy; } public RasterDataAdapter( final String coverageName, final SampleModel sampleModel, final ColorModel colorModel, final Map metadata, final int tileSize, final double[][] noDataValuesPerBand, final double[] backgroundValuesPerBand, final boolean buildPyramid) { this( coverageName, sampleModel, colorModel, metadata, tileSize, noDataValuesPerBand, backgroundValuesPerBand, new HistogramConfig(sampleModel), true, Interpolation.INTERP_NEAREST, buildPyramid, new NoDataMergeStrategy()); } public RasterDataAdapter(final RasterDataAdapter adapter, final String coverageName) { this(adapter, coverageName, adapter.tileSize); } public RasterDataAdapter( final RasterDataAdapter adapter, final String coverageName, final int tileSize) { this( coverageName, adapter.getSampleModel().createCompatibleSampleModel(tileSize, tileSize), adapter.getColorModel(), adapter.getMetadata(), tileSize, adapter.getNoDataValuesPerBand(), adapter.backgroundValuesPerBand, adapter.histogramConfig, adapter.equalizeHistogram, interpolationToByte(adapter.interpolation), adapter.buildPyramid, adapter.mergeStrategy == null ? null : adapter.mergeStrategy); } public RasterDataAdapter( final RasterDataAdapter adapter, final String coverageName, final RasterTileMergeStrategy mergeStrategy) { this( coverageName, adapter.getSampleModel(), adapter.getColorModel(), adapter.getMetadata(), adapter.tileSize, null, null, null, adapter.getNoDataValuesPerBand(), adapter.backgroundValuesPerBand, adapter.histogramConfig, adapter.equalizeHistogram, interpolationToByte(adapter.interpolation), adapter.buildPyramid, mergeStrategy); } public RasterDataAdapter( final String coverageName, final SampleModel sampleModel, final ColorModel colorModel, final Map metadata, final int tileSize, final double[][] noDataValuesPerBand, final double[] backgroundValuesPerBand, final HistogramConfig histogramConfig, final boolean equalizeHistogram, final int interpolationType, final boolean buildPyramid, final RasterTileMergeStrategy mergeStrategy) { this( coverageName, sampleModel, colorModel, metadata, tileSize, null, null, null, noDataValuesPerBand, backgroundValuesPerBand, histogramConfig, equalizeHistogram, interpolationType, buildPyramid, mergeStrategy); } public RasterDataAdapter( final String coverageName, final SampleModel sampleModel, final ColorModel colorModel, final Map metadata, final int tileSize, final double[] minsPerBand, final double[] maxesPerBand, final String[] namesPerBand, final double[][] noDataValuesPerBand, final double[] backgroundValuesPerBand, final HistogramConfig histogramConfig, final boolean equalizeHistogram, final int interpolationType, final boolean buildPyramid, final RasterTileMergeStrategy mergeStrategy) { staticInit(); this.coverageName = coverageName; this.tileSize = tileSize; if ((sampleModel.getWidth() != tileSize) || (sampleModel.getHeight() != tileSize)) { this.sampleModel = sampleModel.createCompatibleSampleModel(tileSize, tileSize); } else { this.sampleModel = sampleModel; } this.colorModel = colorModel; this.metadata = metadata; this.minsPerBand = minsPerBand; this.maxesPerBand = maxesPerBand; this.namesPerBand = namesPerBand; this.noDataValuesPerBand = noDataValuesPerBand; this.backgroundValuesPerBand = backgroundValuesPerBand; // a null histogram config will result in histogram statistics not being // accumulated this.histogramConfig = histogramConfig; this.buildPyramid = buildPyramid; this.equalizeHistogram = equalizeHistogram; interpolation = Interpolation.getInstance(interpolationType); this.mergeStrategy = mergeStrategy; } @SuppressFBWarnings private static void staticInit() { // check outside of synchronized block to optimize performance if (!classInit) { synchronized (CLASS_INIT_MUTEX) { // check again within synchonized block to ensure thread safety if (!classInit) { try { GeometryUtils.initClassLoader(); SourceThresholdFixMosaicDescriptor.register(false); WarpRIF.register(false); MapProjection.SKIP_SANITY_CHECKS = true; classInit = true; } catch (final Exception e) { LOGGER.error("Error in static init", e); } } } } } @Override public Iterator convertToIndex(final Index index, final GridCoverage gridCoverage) { final HierarchicalNumericIndexStrategy indexStrategy = CompoundHierarchicalIndexStrategyWrapper.findHierarchicalStrategy(index.getIndexStrategy()); if (indexStrategy != null) { final CoordinateReferenceSystem sourceCrs = gridCoverage.getCoordinateReferenceSystem(); final Envelope sampleEnvelope = gridCoverage.getEnvelope(); final ReferencedEnvelope sampleReferencedEnvelope = new ReferencedEnvelope( new org.locationtech.jts.geom.Envelope( sampleEnvelope.getMinimum(0), sampleEnvelope.getMaximum(0), sampleEnvelope.getMinimum(1), sampleEnvelope.getMaximum(1)), gridCoverage.getCoordinateReferenceSystem()); ReferencedEnvelope projectedReferenceEnvelope = sampleReferencedEnvelope; final CoordinateReferenceSystem indexCrs = GeometryUtils.getIndexCrs(index); if (!indexCrs.equals(sourceCrs)) { try { projectedReferenceEnvelope = sampleReferencedEnvelope.transform(indexCrs, true); } catch (TransformException | FactoryException e) { LOGGER.warn("Unable to transform envelope of grid coverage to Index CRS", e); } } final MultiDimensionalNumericData bounds; if (indexCrs.equals(GeometryUtils.getDefaultCRS())) { bounds = IndexUtils.clampAtIndexBounds( GeometryUtils.basicConstraintSetFromEnvelope( projectedReferenceEnvelope).getIndexConstraints(indexStrategy), indexStrategy); } else { bounds = IndexUtils.clampAtIndexBounds( GeometryUtils.getBoundsFromEnvelope(projectedReferenceEnvelope), indexStrategy); } final GridEnvelope gridEnvelope = gridCoverage.getGridGeometry().getGridRange(); // only one set of constraints..hence reference '0' element final double[] tileRangePerDimension = new double[bounds.getDimensionCount()]; final Double[] maxValuesPerDimension = bounds.getMaxValuesPerDimension(); final Double[] minValuesPerDimension = bounds.getMinValuesPerDimension(); for (int d = 0; d < tileRangePerDimension.length; d++) { tileRangePerDimension[d] = ((maxValuesPerDimension[d] - minValuesPerDimension[d]) * tileSize) / gridEnvelope.getSpan(d); } final TreeMap substrategyMap = new TreeMap<>(); for (final SubStrategy pyramidLevel : indexStrategy.getSubStrategies()) { final double[] idRangePerDimension = pyramidLevel.getIndexStrategy().getHighestPrecisionIdRangePerDimension(); // to create a pyramid, ingest into each substrategy that is // lower resolution than the sample set in at least one // dimension and the one substrategy that is at least the same // resolution or higher resolution to retain the original // resolution as well as possible double maxSubstrategyResToSampleSetRes = -Double.MAX_VALUE; for (int d = 0; d < tileRangePerDimension.length; d++) { final double substrategyResToSampleSetRes = idRangePerDimension[d] / tileRangePerDimension[d]; maxSubstrategyResToSampleSetRes = Math.max(maxSubstrategyResToSampleSetRes, substrategyResToSampleSetRes); } substrategyMap.put(maxSubstrategyResToSampleSetRes, pyramidLevel); } // all entries will be greater than 1 (lower resolution pyramid // levels) // also try to find the one entry that is closest to 1.0 without // going over (this will be the full resolution level) // add an epsilon to try to catch any roundoff error final double fullRes = 1.0 + MathUtils.EPSILON; final Entry fullResEntry = substrategyMap.floorEntry(fullRes); final List pyramidLevels = new ArrayList<>(); if (fullResEntry != null) { pyramidLevels.add(fullResEntry.getValue()); } if (buildPyramid) { final NavigableMap map = substrategyMap.tailMap(fullRes, false); pyramidLevels.addAll(map.values()); } if (pyramidLevels.isEmpty()) { // this case shouldn't occur theoretically, but just in case, // make sure the substrategy closest to 1.0 is used final Entry bestEntry = substrategyMap.higherEntry(1.0); pyramidLevels.add(bestEntry.getValue()); } return new IteratorWrapper<>( pyramidLevels.iterator(), new MosaicPerPyramidLevelBuilder( bounds, gridCoverage, tileSize, backgroundValuesPerBand, RasterUtils.getFootprint(projectedReferenceEnvelope, gridCoverage), interpolation, projectedReferenceEnvelope.getCoordinateReferenceSystem())); } LOGGER.warn( "Strategy is not an instance of HierarchicalNumericIndexStrategy : " + index.getIndexStrategy().getClass().getName()); return Collections.emptyIterator(); } private static class MosaicPerPyramidLevelBuilder implements Converter { private final MultiDimensionalNumericData originalBounds; private final GridCoverage originalData; private final int tileSize; private final double[] backgroundValuesPerBand; private final Geometry footprint; private final Interpolation defaultInterpolation; private final CoordinateReferenceSystem crs; public MosaicPerPyramidLevelBuilder( final MultiDimensionalNumericData originalBounds, final GridCoverage originalData, final int tileSize, final double[] backgroundValuesPerBand, final Geometry footprint, final Interpolation defaultInterpolation, final CoordinateReferenceSystem crs) { this.originalBounds = originalBounds; this.originalData = originalData; this.tileSize = tileSize; this.backgroundValuesPerBand = backgroundValuesPerBand; this.footprint = footprint; this.defaultInterpolation = defaultInterpolation; this.crs = crs; } @Override public Iterator convert(final SubStrategy pyramidLevel) { // get all pairs of partition/sort keys for insertionIds that // represent the original bounds at this pyramid level final Iterator> insertionIds = pyramidLevel.getIndexStrategy().getInsertionIds( originalBounds).getPartitionKeys().stream().flatMap( partition -> partition.getSortKeys().stream().map( sortKey -> Pair.of(partition.getPartitionKey(), sortKey))).iterator(); return new Iterator() { @Override public boolean hasNext() { return insertionIds.hasNext(); } @Override public GridCoverage next() { Pair insertionId = insertionIds.next(); if (insertionId == null) { return null; } final MultiDimensionalNumericData rangePerDimension = pyramidLevel.getIndexStrategy().getRangeForId( insertionId.getLeft(), insertionId.getRight()); final NumericDimensionDefinition[] dimensions = pyramidLevel.getIndexStrategy().getOrderedDimensionDefinitions(); int longitudeIndex = 0, latitudeIndex = 1; final double[] minDP = new double[2]; final double[] maxDP = new double[2]; for (int d = 0; d < dimensions.length; d++) { if (dimensions[d] instanceof LatitudeDefinition) { latitudeIndex = d; minDP[1] = originalBounds.getMinValuesPerDimension()[d]; maxDP[1] = originalBounds.getMaxValuesPerDimension()[d]; } else if (dimensions[d] instanceof LongitudeDefinition) { longitudeIndex = d; minDP[0] = originalBounds.getMinValuesPerDimension()[d]; maxDP[0] = originalBounds.getMaxValuesPerDimension()[d]; } else if (dimensions[d] instanceof CustomCRSSpatialDimension) { minDP[d] = originalBounds.getMinValuesPerDimension()[d]; maxDP[d] = originalBounds.getMaxValuesPerDimension()[d]; } } final Envelope originalEnvelope = new GeneralEnvelope(minDP, maxDP); final Double[] minsPerDimension = rangePerDimension.getMinValuesPerDimension(); final Double[] maxesPerDimension = rangePerDimension.getMaxValuesPerDimension(); final ReferencedEnvelope mapExtent = new ReferencedEnvelope( minsPerDimension[longitudeIndex], maxesPerDimension[longitudeIndex], minsPerDimension[latitudeIndex], maxesPerDimension[latitudeIndex], crs); final AffineTransform worldToScreenTransform = RendererUtilities.worldToScreenTransform( mapExtent, new Rectangle(tileSize, tileSize)); GridGeometry2D insertionIdGeometry; try { final AffineTransform2D gridToCRS = new AffineTransform2D(worldToScreenTransform.createInverse()); insertionIdGeometry = new GridGeometry2D( new GridEnvelope2D(new Rectangle(tileSize, tileSize)), PixelInCell.CELL_CORNER, gridToCRS, crs, null); final double[] tileRes = pyramidLevel.getIndexStrategy().getHighestPrecisionIdRangePerDimension(); final double[] pixelRes = new double[tileRes.length]; for (int d = 0; d < tileRes.length; d++) { pixelRes[d] = tileRes[d] / tileSize; } Geometry footprintWithinTileWorldGeom = null; Geometry footprintWithinTileScreenGeom = null; try { // using fixed precision for geometry factory will // round screen geometry values to the nearest // pixel, which seems to be the most appropriate // behavior final Geometry wholeFootprintScreenGeom = new GeometryFactory(new PrecisionModel(PrecisionModel.FIXED)).createGeometry( JTS.transform(footprint, new AffineTransform2D(worldToScreenTransform))); final org.locationtech.jts.geom.Envelope fullTileEnvelope = new org.locationtech.jts.geom.Envelope(0, tileSize, 0, tileSize); final GeometryClipper tileClipper = new GeometryClipper(fullTileEnvelope); footprintWithinTileScreenGeom = tileClipper.clip(wholeFootprintScreenGeom, true); if (footprintWithinTileScreenGeom == null) { // for some reason the original image // footprint // falls outside this insertion ID LOGGER.warn( "Original footprint geometry (" + originalData.getGridGeometry() + ") falls outside the insertion bounds (" + insertionIdGeometry + ")"); return null; } footprintWithinTileWorldGeom = JTS.transform( // change the precision model back // to JTS // default from fixed precision new GeometryFactory().createGeometry(footprintWithinTileScreenGeom), gridToCRS); if (footprintWithinTileScreenGeom.covers( new GeometryFactory().toGeometry(fullTileEnvelope))) { // if the screen geometry fully covers the // tile, // don't bother carrying it forward footprintWithinTileScreenGeom = null; } } catch (final TransformException e) { LOGGER.warn("Unable to calculate geometry of footprint for tile", e); } Interpolation tileInterpolation = defaultInterpolation; final int dataType = originalData.getRenderedImage().getSampleModel().getDataType(); // TODO a JAI bug "workaround" in GeoTools does not // work, this is a workaround for the GeoTools bug // see https://jira.codehaus.org/browse/GEOT-3585, // and // line 666-698 of // org.geotools.coverage.processing.operation.Resampler2D // (gt-coverage-12.1) if ((dataType == DataBuffer.TYPE_FLOAT) || (dataType == DataBuffer.TYPE_DOUBLE)) { final Envelope tileEnvelope = insertionIdGeometry.getEnvelope(); final ReferencedEnvelope tileReferencedEnvelope = new ReferencedEnvelope( new org.locationtech.jts.geom.Envelope( tileEnvelope.getMinimum(0), tileEnvelope.getMaximum(0), tileEnvelope.getMinimum(1), tileEnvelope.getMaximum(1)), crs); final Geometry tileJTSGeometry = new GeometryFactory().toGeometry(tileReferencedEnvelope); if (!footprint.contains(tileJTSGeometry)) { tileInterpolation = Interpolation.getInstance(Interpolation.INTERP_NEAREST); } } GridCoverage resampledCoverage = (GridCoverage) RasterUtils.getCoverageOperations().resample( originalData, crs, insertionIdGeometry, tileInterpolation, backgroundValuesPerBand); // NOTE: for now this is commented out, but // beware the // resample operation under certain conditions, // this requires more investigation rather than // adding a // hacky fix // sometimes the resample results in an image that // is // not tileSize in width and height although the // insertionIdGeometry is telling it to resample to // tileSize // in these cases, check and perform a rescale to // finalize the grid coverage to guarantee it is the // correct tileSize final GridEnvelope e = resampledCoverage.getGridGeometry().getGridRange(); boolean resize = false; for (int d = 0; d < e.getDimension(); d++) { if (e.getSpan(d) != tileSize) { resize = true; break; } } if (resize) { resampledCoverage = Operations.DEFAULT.scale( resampledCoverage, (double) tileSize / (double) e.getSpan(0), (double) tileSize / (double) e.getSpan(1), -resampledCoverage.getRenderedImage().getMinX(), -resampledCoverage.getRenderedImage().getMinY()); } if ((resampledCoverage.getRenderedImage().getWidth() != tileSize) || (resampledCoverage.getRenderedImage().getHeight() != tileSize) || (resampledCoverage.getRenderedImage().getMinX() != 0) || (resampledCoverage.getRenderedImage().getMinY() != 0)) { resampledCoverage = Operations.DEFAULT.scale( resampledCoverage, 1, 1, -resampledCoverage.getRenderedImage().getMinX(), -resampledCoverage.getRenderedImage().getMinY()); } if (pyramidLevel.getIndexStrategy() instanceof CompoundIndexStrategy) { // this is exclusive on the end, and the tier is set // so just get the id based on the lowest half of // the multidimensional data final Double[] centroids = rangePerDimension.getCentroidPerDimension(); final Double[] mins = rangePerDimension.getMinValuesPerDimension(); final NumericRange[] ranges = new NumericRange[centroids.length]; for (int d = 0; d < centroids.length; d++) { ranges[d] = new NumericRange(mins[d], centroids[d]); } insertionId = pyramidLevel.getIndexStrategy().getInsertionIds( new BasicNumericDataset(ranges)).getFirstPartitionAndSortKeyPair(); // this is intended to allow the partitioning // algorithm to use a consistent multi-dimensional // dataset (so if hashing is done on the // multi-dimensional data, it will be a consistent // hash for each tile and merge strategies will work // correctly) } return new FitToIndexGridCoverage( resampledCoverage, insertionId.getLeft(), insertionId.getRight(), new Resolution(pixelRes), originalEnvelope, footprintWithinTileWorldGeom, footprintWithinTileScreenGeom, getProperties(originalData)); } catch (IllegalArgumentException | NoninvertibleTransformException e) { LOGGER.warn("Unable to calculate transformation for grid coordinates on write", e); } return null; } @Override public void remove() { insertionIds.remove(); } }; } } @Override public String getTypeName() { return getCoverageName(); } @Override public byte[] getDataId(final GridCoverage entry) { return new byte[0]; } @Override public InternalDataAdapter asInternalAdapter(final short internalAdapterId) { return new InternalRasterDataAdapter(this, internalAdapterId); } @Override public InternalDataAdapter asInternalAdapter( final short internalAdapterId, final VisibilityHandler visibilityHandler) { return new InternalRasterDataAdapter(this, internalAdapterId, visibilityHandler); } public GridCoverage getCoverageFromRasterTile( final RasterTile rasterTile, final byte[] partitionKey, final byte[] sortKey, final Index index) { final MultiDimensionalNumericData indexRange = index.getIndexStrategy().getRangeForId(partitionKey, sortKey); final NumericDimensionDefinition[] orderedDimensions = index.getIndexStrategy().getOrderedDimensionDefinitions(); final Double[] minsPerDimension = indexRange.getMinValuesPerDimension(); final Double[] maxesPerDimension = indexRange.getMaxValuesPerDimension(); Double minX = null; Double maxX = null; Double minY = null; Double maxY = null; boolean wgs84 = true; for (int d = 0; d < orderedDimensions.length; d++) { if (orderedDimensions[d] instanceof LongitudeDefinition) { minX = minsPerDimension[d]; maxX = maxesPerDimension[d]; } else if (orderedDimensions[d] instanceof LatitudeDefinition) { minY = minsPerDimension[d]; maxY = maxesPerDimension[d]; } else if (orderedDimensions[d] instanceof CustomCRSSpatialDimension) { wgs84 = false; } } if (wgs84 && ((minX == null) || (minY == null) || (maxX == null) || (maxY == null))) { return null; } final CoordinateReferenceSystem indexCrs = GeometryUtils.getIndexCrs(index); final ReferencedEnvelope mapExtent = new ReferencedEnvelope( minsPerDimension[0], maxesPerDimension[0], minsPerDimension[1], maxesPerDimension[1], indexCrs); try { return prepareCoverage(rasterTile, tileSize, mapExtent); } catch (final IOException e) { LOGGER.warn("Unable to build grid coverage from adapter encoded data", e); } return null; } /** * This method is responsible for creating a coverage from the supplied {@link RenderedImage}. * * @param image * @return * @throws IOException */ private GridCoverage2D prepareCoverage( final RasterTile rasterTile, final int tileSize, final ReferencedEnvelope mapExtent) throws IOException { final DataBuffer dataBuffer = rasterTile.getDataBuffer(); final Persistable tileMetadata = rasterTile.getMetadata(); final SampleModel sm = sampleModel.createCompatibleSampleModel(tileSize, tileSize); final boolean alphaPremultiplied = colorModel.isAlphaPremultiplied(); final WritableRaster raster = Raster.createWritableRaster(sm, dataBuffer, null); final int numBands = sm.getNumBands(); final BufferedImage image = new BufferedImage(colorModel, raster, alphaPremultiplied, null); // creating bands final ColorModel cm = image.getColorModel(); final GridSampleDimension[] bands = new GridSampleDimension[numBands]; final Set bandNames = new HashSet<>(); // setting bands names. for (int i = 0; i < numBands; i++) { ColorInterpretation colorInterpretation = null; String bandName = null; if (cm != null) { // === color interpretation colorInterpretation = TypeMap.getColorInterpretation(cm, i); if (colorInterpretation == null) { throw new IOException("Unrecognized sample dimension type"); } bandName = colorInterpretation.name(); if ((colorInterpretation == ColorInterpretation.UNDEFINED) || bandNames.contains(bandName)) { // make sure we create no duplicate band names bandName = "Band" + (i + 1); } } else { // no color model bandName = "Band" + (i + 1); colorInterpretation = ColorInterpretation.UNDEFINED; } // sample dimension type final SampleDimensionType st = TypeMap.getSampleDimensionType(sm, i); if (st == null) { LOGGER.error("Could not get sample dimension type, getSampleDimensionType returned null"); throw new IOException( "Could not get sample dimension type, getSampleDimensionType returned null"); } // set some no data values, as well as Min and Max values double noData; double min = -Double.MAX_VALUE, max = Double.MAX_VALUE; if (st.compareTo(SampleDimensionType.REAL_32BITS) == 0) { noData = Float.NaN; } else if (st.compareTo(SampleDimensionType.REAL_64BITS) == 0) { noData = Double.NaN; } else if (st.compareTo(SampleDimensionType.SIGNED_16BITS) == 0) { noData = Short.MIN_VALUE; min = Short.MIN_VALUE; max = Short.MAX_VALUE; } else if (st.compareTo(SampleDimensionType.SIGNED_32BITS) == 0) { noData = Integer.MIN_VALUE; min = Integer.MIN_VALUE; max = Integer.MAX_VALUE; } else if (st.compareTo(SampleDimensionType.SIGNED_8BITS) == 0) { noData = -128; min = -128; max = 127; } else { // unsigned noData = 0; min = 0; // compute max if (st.compareTo(SampleDimensionType.UNSIGNED_1BIT) == 0) { max = 1; } else if (st.compareTo(SampleDimensionType.UNSIGNED_2BITS) == 0) { max = 3; } else if (st.compareTo(SampleDimensionType.UNSIGNED_4BITS) == 0) { max = 7; } else if (st.compareTo(SampleDimensionType.UNSIGNED_8BITS) == 0) { max = 255; } else if (st.compareTo(SampleDimensionType.UNSIGNED_16BITS) == 0) { max = 65535; } else if (st.compareTo(SampleDimensionType.UNSIGNED_32BITS) == 0) { max = Math.pow(2, 32) - 1; } } if ((noDataValuesPerBand != null) && (noDataValuesPerBand[i] != null) && (noDataValuesPerBand[i].length > 0)) { // just take the first value, even if there are multiple noData = noDataValuesPerBand[i][0]; } if ((minsPerBand != null) && (minsPerBand.length > i)) { min = minsPerBand[i]; } if ((maxesPerBand != null) && (maxesPerBand.length > i)) { max = maxesPerBand[i]; } if ((namesPerBand != null) && (namesPerBand.length > i)) { bandName = namesPerBand[i]; } bands[i] = new SimplifiedGridSampleDimension( bandName, st, colorInterpretation, noData, min, max, 1, // no // scale 0, // no offset null); } final AffineTransform worldToScreenTransform = RendererUtilities.worldToScreenTransform(mapExtent, new Rectangle(tileSize, tileSize)); try { final AffineTransform2D gridToCRS = new AffineTransform2D(worldToScreenTransform.createInverse()); final GridCoverageFactory gcf = CoverageFactoryFinder.getGridCoverageFactory(null); final Map properties = new HashMap(); if (metadata != null) { properties.putAll(metadata); } if (tileMetadata != null) { properties.put(TILE_METADATA_PROPERTY_KEY, tileMetadata); } return gcf.create( coverageName, image, new GridGeometry2D( new GridEnvelope2D(PlanarImage.wrapRenderedImage(image).getBounds()), PixelInCell.CELL_CORNER, gridToCRS, mapExtent.getCoordinateReferenceSystem(), null), bands, null, properties); } catch (IllegalArgumentException | NoninvertibleTransformException e) { LOGGER.warn("Unable to calculate transformation for grid coordinates on read", e); } return null; } private static Map getProperties(final GridCoverage entry) { Map originalCoverageProperties = new HashMap<>(); if (entry instanceof GridCoverage2D) { originalCoverageProperties = ((GridCoverage2D) entry).getProperties(); } else if (entry instanceof FitToIndexGridCoverage) { originalCoverageProperties = ((FitToIndexGridCoverage) entry).getProperties(); } return originalCoverageProperties; } public ClientMergeableRasterTile getRasterTileFromCoverage(final GridCoverage entry) { return new ClientMergeableRasterTile( mergeStrategy, sampleModel, getRaster(entry).getDataBuffer(), mergeStrategy == null ? null : mergeStrategy.getMetadata(entry, this)); } public Raster getRaster(final GridCoverage entry) { final SampleModel sm = sampleModel.createCompatibleSampleModel(tileSize, tileSize); return entry.getRenderedImage().copyData(new InternalWritableRaster(sm, new Point())); } @Override public FieldReader getReader(final String fieldName) { if (DATA_FIELD_ID.equals(fieldName)) { return (FieldReader) new RasterTileReader(); } return null; } @Override public byte[] toBinary() { final byte[] coverageNameBytes = StringUtils.stringToBinary(coverageName); final byte[] sampleModelBinary = SampleModelPersistenceUtils.getSampleModelBinary(sampleModel); final byte[] colorModelBinary = getColorModelBinary(colorModel); int metadataBinaryLength = 0; final List entryBinaries = new ArrayList<>(); for (final Entry e : metadata.entrySet()) { final byte[] keyBytes = StringUtils.stringToBinary(e.getKey()); final byte[] valueBytes = e.getValue() == null ? new byte[0] : StringUtils.stringToBinary(e.getValue()); final int entryBinaryLength = VarintUtils.unsignedIntByteLength(keyBytes.length) + valueBytes.length + keyBytes.length; final ByteBuffer buf = ByteBuffer.allocate(entryBinaryLength); VarintUtils.writeUnsignedInt(keyBytes.length, buf); buf.put(keyBytes); buf.put(valueBytes); entryBinaries.add(buf.array()); metadataBinaryLength += (entryBinaryLength + VarintUtils.unsignedIntByteLength(entryBinaryLength)); } byte[] histogramConfigBinary; if (histogramConfig != null) { histogramConfigBinary = PersistenceUtils.toBinary(histogramConfig); } else { histogramConfigBinary = new byte[] {}; } final byte[] noDataBinary = getNoDataBinary(noDataValuesPerBand); final byte[] backgroundBinary; if (backgroundValuesPerBand != null) { final int totalBytes = (backgroundValuesPerBand.length * 8); final ByteBuffer backgroundBuf = ByteBuffer.allocate(totalBytes); for (final double backgroundValue : backgroundValuesPerBand) { backgroundBuf.putDouble(backgroundValue); } backgroundBinary = backgroundBuf.array(); } else { backgroundBinary = new byte[] {}; } final byte[] minsBinary; if (minsPerBand != null) { final int totalBytes = (minsPerBand.length * 8); final ByteBuffer minsBuf = ByteBuffer.allocate(totalBytes); for (final double min : minsPerBand) { minsBuf.putDouble(min); } minsBinary = minsBuf.array(); } else { minsBinary = new byte[] {}; } final byte[] maxesBinary; if (maxesPerBand != null) { final int totalBytes = (maxesPerBand.length * 8); final ByteBuffer maxesBuf = ByteBuffer.allocate(totalBytes); for (final double max : maxesPerBand) { maxesBuf.putDouble(max); } maxesBinary = maxesBuf.array(); } else { maxesBinary = new byte[] {}; } final byte[] namesBinary; final int namesLength; if (namesPerBand != null) { int totalBytes = 0; final List namesBinaries = new ArrayList<>(namesPerBand.length); for (final String name : namesPerBand) { final byte[] nameBinary = StringUtils.stringToBinary(name); final int size = nameBinary.length + VarintUtils.unsignedIntByteLength(nameBinary.length); final ByteBuffer nameBuf = ByteBuffer.allocate(size); totalBytes += size; VarintUtils.writeUnsignedInt(nameBinary.length, nameBuf); nameBuf.put(nameBinary); namesBinaries.add(nameBuf.array()); } final ByteBuffer namesBuf = ByteBuffer.allocate(totalBytes); for (final byte[] nameBinary : namesBinaries) { namesBuf.put(nameBinary); } namesBinary = namesBuf.array(); namesLength = namesPerBand.length; } else { namesBinary = new byte[] {}; namesLength = 0; } byte[] mergeStrategyBinary; if (mergeStrategy != null) { mergeStrategyBinary = PersistenceUtils.toBinary(mergeStrategy); } else { mergeStrategyBinary = new byte[] {}; } final ByteBuffer buf = ByteBuffer.allocate( coverageNameBytes.length + sampleModelBinary.length + colorModelBinary.length + metadataBinaryLength + histogramConfigBinary.length + noDataBinary.length + minsBinary.length + maxesBinary.length + namesBinary.length + backgroundBinary.length + mergeStrategyBinary.length + VarintUtils.unsignedIntByteLength(tileSize) + VarintUtils.unsignedIntByteLength(coverageNameBytes.length) + VarintUtils.unsignedIntByteLength(sampleModelBinary.length) + VarintUtils.unsignedIntByteLength(colorModelBinary.length) + VarintUtils.unsignedIntByteLength(entryBinaries.size()) + VarintUtils.unsignedIntByteLength(histogramConfigBinary.length) + VarintUtils.unsignedIntByteLength(noDataBinary.length) + VarintUtils.unsignedIntByteLength(minsBinary.length) + VarintUtils.unsignedIntByteLength(maxesBinary.length) + VarintUtils.unsignedIntByteLength(namesLength) + VarintUtils.unsignedIntByteLength(backgroundBinary.length) + VarintUtils.unsignedIntByteLength(mergeStrategyBinary.length) + 3); VarintUtils.writeUnsignedInt(tileSize, buf); VarintUtils.writeUnsignedInt(coverageNameBytes.length, buf); buf.put(coverageNameBytes); VarintUtils.writeUnsignedInt(sampleModelBinary.length, buf); buf.put(sampleModelBinary); VarintUtils.writeUnsignedInt(colorModelBinary.length, buf); buf.put(colorModelBinary); VarintUtils.writeUnsignedInt(entryBinaries.size(), buf); for (final byte[] entryBinary : entryBinaries) { VarintUtils.writeUnsignedInt(entryBinary.length, buf); buf.put(entryBinary); } VarintUtils.writeUnsignedInt(histogramConfigBinary.length, buf); buf.put(histogramConfigBinary); VarintUtils.writeUnsignedInt(noDataBinary.length, buf); buf.put(noDataBinary); VarintUtils.writeUnsignedInt(minsBinary.length, buf); buf.put(minsBinary); VarintUtils.writeUnsignedInt(maxesBinary.length, buf); buf.put(maxesBinary); VarintUtils.writeUnsignedInt(namesLength, buf); buf.put(namesBinary); VarintUtils.writeUnsignedInt(backgroundBinary.length, buf); buf.put(backgroundBinary); VarintUtils.writeUnsignedInt(mergeStrategyBinary.length, buf); buf.put(mergeStrategyBinary); buf.put(buildPyramid ? (byte) 1 : (byte) 0); buf.put(equalizeHistogram ? (byte) 1 : (byte) 0); buf.put(interpolationToByte(interpolation)); return buf.array(); } protected static byte interpolationToByte(final Interpolation interpolation) { // this is silly because it seems like a translation JAI should provide, // but it seems its not provided and its the most efficient approach // (rather than serializing class names) if (interpolation instanceof InterpolationNearest) { return Interpolation.INTERP_NEAREST; } if (interpolation instanceof InterpolationBilinear) { return Interpolation.INTERP_BILINEAR; } if (interpolation instanceof InterpolationBicubic2) { return Interpolation.INTERP_BICUBIC_2; } return Interpolation.INTERP_BICUBIC; } protected static byte[] getColorModelBinary(final ColorModel colorModel) { final SerializableState serializableColorModel = SerializerFactory.getState(colorModel); try { final ByteArrayOutputStream baos = new ByteArrayOutputStream(); final ObjectOutputStream oos = new ObjectOutputStream(baos); oos.writeObject(serializableColorModel); return baos.toByteArray(); } catch (final IOException e) { LOGGER.warn("Unable to serialize sample model", e); } return new byte[] {}; } protected static byte[] getNoDataBinary(final double[][] noDataValuesPerBand) { if (noDataValuesPerBand != null) { int totalBytes = 0; final List noDataValuesBytes = new ArrayList<>(noDataValuesPerBand.length); for (final double[] noDataValues : noDataValuesPerBand) { int length = 0; if (noDataValues != null) { length = noDataValues.length; } final int thisBytes = VarintUtils.unsignedIntByteLength(length) + (length * 8); totalBytes += thisBytes; final ByteBuffer noDataBuf = ByteBuffer.allocate(thisBytes); VarintUtils.writeUnsignedInt(length, noDataBuf); if (noDataValues != null) { for (final double noDataValue : noDataValues) { noDataBuf.putDouble(noDataValue); } } noDataValuesBytes.add(noDataBuf.array()); } totalBytes += VarintUtils.unsignedIntByteLength(noDataValuesPerBand.length); final ByteBuffer noDataBuf = ByteBuffer.allocate(totalBytes); VarintUtils.writeUnsignedInt(noDataValuesPerBand.length, noDataBuf); for (final byte[] noDataValueBytes : noDataValuesBytes) { noDataBuf.put(noDataValueBytes); } return noDataBuf.array(); } else { return new byte[] {}; } } @Override public void fromBinary(final byte[] bytes) { staticInit(); final ByteBuffer buf = ByteBuffer.wrap(bytes); tileSize = VarintUtils.readUnsignedInt(buf); final int coverageNameLength = VarintUtils.readUnsignedInt(buf); final byte[] coverageNameBinary = ByteArrayUtils.safeRead(buf, coverageNameLength); coverageName = StringUtils.stringFromBinary(coverageNameBinary); final int sampleModelLength = VarintUtils.readUnsignedInt(buf); final byte[] sampleModelBinary = ByteArrayUtils.safeRead(buf, sampleModelLength); try { sampleModel = SampleModelPersistenceUtils.getSampleModel(sampleModelBinary); } catch (final Exception e) { LOGGER.warn("Unable to deserialize sample model", e); } final int colorModelLength = VarintUtils.readUnsignedInt(buf); final byte[] colorModelBinary = ByteArrayUtils.safeRead(buf, colorModelLength); try { final ByteArrayInputStream bais = new ByteArrayInputStream(colorModelBinary); final ObjectInputStream ois = new ObjectInputStream(bais); final Object o = ois.readObject(); if ((o instanceof SerializableState) && (((SerializableState) o).getObject() instanceof ColorModel)) { colorModel = (ColorModel) ((SerializableState) o).getObject(); } } catch (final Exception e) { LOGGER.warn("Unable to deserialize color model", e); } final int numMetadataEntries = VarintUtils.readUnsignedInt(buf); metadata = new HashMap<>(); for (int i = 0; i < numMetadataEntries; i++) { final int entryBinaryLength = VarintUtils.readUnsignedInt(buf); final byte[] entryBinary = ByteArrayUtils.safeRead(buf, entryBinaryLength); final ByteBuffer entryBuf = ByteBuffer.wrap(entryBinary); final int keyLength = VarintUtils.readUnsignedInt(entryBuf); final byte[] keyBinary = ByteArrayUtils.safeRead(entryBuf, keyLength); final byte[] valueBinary = new byte[entryBuf.remaining()]; entryBuf.get(valueBinary); metadata.put( StringUtils.stringFromBinary(keyBinary), StringUtils.stringFromBinary(valueBinary)); } final int histogramConfigLength = VarintUtils.readUnsignedInt(buf); if (histogramConfigLength == 0) { histogramConfig = null; } else { final byte[] histogramConfigBinary = ByteArrayUtils.safeRead(buf, histogramConfigLength); histogramConfig = (HistogramConfig) PersistenceUtils.fromBinary(histogramConfigBinary); } final int noDataBinaryLength = VarintUtils.readUnsignedInt(buf); if (noDataBinaryLength == 0) { noDataValuesPerBand = null; } else { final int numBands = VarintUtils.readUnsignedInt(buf); ByteArrayUtils.verifyBufferSize(buf, numBands); noDataValuesPerBand = new double[numBands][]; for (int b = 0; b < noDataValuesPerBand.length; b++) { final int bandLength = VarintUtils.readUnsignedInt(buf); ByteArrayUtils.verifyBufferSize(buf, bandLength); noDataValuesPerBand[b] = new double[bandLength]; for (int i = 0; i < noDataValuesPerBand[b].length; i++) { noDataValuesPerBand[b][i] = buf.getDouble(); } } } final int minsBinaryLength = VarintUtils.readUnsignedInt(buf); if (minsBinaryLength == 0) { minsPerBand = null; } else { ByteArrayUtils.verifyBufferSize(buf, minsBinaryLength); minsPerBand = new double[minsBinaryLength / 8]; for (int b = 0; b < minsPerBand.length; b++) { minsPerBand[b] = buf.getDouble(); } } final int maxesBinaryLength = VarintUtils.readUnsignedInt(buf); if (maxesBinaryLength == 0) { maxesPerBand = null; } else { ByteArrayUtils.verifyBufferSize(buf, maxesBinaryLength); maxesPerBand = new double[maxesBinaryLength / 8]; for (int b = 0; b < maxesPerBand.length; b++) { maxesPerBand[b] = buf.getDouble(); } } final int namesLength = VarintUtils.readUnsignedInt(buf); if (namesLength == 0) { namesPerBand = null; } else { ByteArrayUtils.verifyBufferSize(buf, namesLength); namesPerBand = new String[namesLength]; for (int b = 0; b < namesPerBand.length; b++) { final int nameSize = VarintUtils.readUnsignedInt(buf); ByteArrayUtils.verifyBufferSize(buf, nameSize); final byte[] nameBytes = new byte[nameSize]; buf.get(nameBytes); namesPerBand[b] = StringUtils.stringFromBinary(nameBytes); } } final int backgroundBinaryLength = VarintUtils.readUnsignedInt(buf); if (backgroundBinaryLength == 0) { backgroundValuesPerBand = null; } else { ByteArrayUtils.verifyBufferSize(buf, backgroundBinaryLength); backgroundValuesPerBand = new double[backgroundBinaryLength / 8]; for (int b = 0; b < backgroundValuesPerBand.length; b++) { backgroundValuesPerBand[b] = buf.getDouble(); } } final int mergeStrategyBinaryLength = VarintUtils.readUnsignedInt(buf); if (mergeStrategyBinaryLength == 0) { mergeStrategy = null; } else { final byte[] mergeStrategyBinary = ByteArrayUtils.safeRead(buf, mergeStrategyBinaryLength); mergeStrategy = (RasterTileMergeStrategy) PersistenceUtils.fromBinary(mergeStrategyBinary); } buildPyramid = (buf.get() != 0); equalizeHistogram = (buf.get() != 0); interpolation = Interpolation.getInstance(buf.get()); } @Override public FieldWriter getWriter(final String fieldName) { if (DATA_FIELD_ID.equals(fieldName)) { return (FieldWriter) new RasterTileWriter(); } return null; } public double[][] getNoDataValuesPerBand() { return noDataValuesPerBand; } public Map getMetadata() { return metadata; } public String getCoverageName() { return coverageName; } public SampleModel getSampleModel() { return sampleModel; } public ColorModel getColorModel() { return colorModel; } public int getTileSize() { return tileSize; } private static final class SimplifiedGridSampleDimension extends GridSampleDimension implements SampleDimension { /** */ private static final long serialVersionUID = 2227219522016820587L; private final double nodata; private final double minimum; private final double maximum; private final double scale; private final double offset; private final Unit unit; private final SampleDimensionType type; private final ColorInterpretation color; private final Category bkg; public SimplifiedGridSampleDimension( final CharSequence description, final SampleDimensionType type, final ColorInterpretation color, final double nodata, final double minimum, final double maximum, final double scale, final double offset, final Unit unit) { super( description, // first attempt to retain the min and max with a "normal" // category !Double.isNaN(minimum) && !Double.isNaN(maximum) ? new Category[] { new Category( Vocabulary.formatInternational(VocabularyKeys.NORMAL), (Color) null, NumberRange.create(minimum, maximum)),} : // if that doesn't work, attempt to retain the nodata // category !Double.isNaN(nodata) ? new Category[] { new Category( Vocabulary.formatInternational(VocabularyKeys.NODATA), new Color(0, 0, 0, 0), NumberRange.create(nodata, nodata))} : null, unit); this.nodata = nodata; this.minimum = minimum; this.maximum = maximum; this.scale = scale; this.offset = offset; this.unit = unit; this.type = type; this.color = color; bkg = new Category("Background", TRANSPARENT, 0); } @Override public double getMaximumValue() { return maximum; } @Override public double getMinimumValue() { return minimum; } @Override public double[] getNoDataValues() throws IllegalStateException { return new double[] {nodata}; } @Override public double getOffset() throws IllegalStateException { return offset; } @Override public NumberRange getRange() { return super.getRange(); } @Override public SampleDimensionType getSampleDimensionType() { return type; } @Override public Unit getUnits() { return unit; } @Override public double getScale() { return scale; } @Override public ColorInterpretation getColorInterpretation() { return color; } @Override public InternationalString[] getCategoryNames() throws IllegalStateException { return new InternationalString[] {SimpleInternationalString.wrap("Background")}; } @Override public boolean equals(final Object obj) { if (!(obj instanceof SimplifiedGridSampleDimension)) { return false; } return super.equals(obj); } @Override public int hashCode() { return super.hashCode(); } } private static class InternalWritableRaster extends WritableRaster { // the constructor is protected, so this class is intended as a simple // way to access the constructor protected InternalWritableRaster(final SampleModel sampleModel, final Point origin) { super(sampleModel, origin); } } public Map getConfiguredOptions(final short internalAdapterId) { final Map configuredOptions = new HashMap<>(); if (mergeStrategy != null) { final String mergeStrategyStr = ByteArrayUtils.byteArrayToString( PersistenceUtils.toBinary( new SingleAdapterServerMergeStrategy( internalAdapterId, sampleModel, mergeStrategy))); configuredOptions.put(RasterTileRowTransform.MERGE_STRATEGY_KEY, mergeStrategyStr); } return configuredOptions; } @Override public HadoopWritableSerializer createWritableSerializer() { return new HadoopWritableSerializer() { @Override public GridCoverageWritable toWritable(final GridCoverage entry) { final Envelope env = entry.getEnvelope(); final DataBuffer dataBuffer = entry.getRenderedImage().copyData( new InternalWritableRaster( sampleModel.createCompatibleSampleModel(tileSize, tileSize), new Point())).getDataBuffer(); Persistable metadata = null; if (entry instanceof GridCoverage2D) { final Object metadataObj = ((GridCoverage2D) entry).getProperty(TILE_METADATA_PROPERTY_KEY); if ((metadataObj != null) && (metadataObj instanceof Persistable)) { metadata = (Persistable) metadataObj; } } return new GridCoverageWritable( new RasterTile(dataBuffer, metadata), env.getMinimum(0), env.getMaximum(0), env.getMinimum(1), env.getMaximum(1), env.getCoordinateReferenceSystem()); } @Override public GridCoverage fromWritable(final GridCoverageWritable writable) { final ReferencedEnvelope mapExtent = new ReferencedEnvelope( writable.getMinX(), writable.getMaxX(), writable.getMinY(), writable.getMaxY(), writable.getCrs()); try { return prepareCoverage(writable.getRasterTile(), tileSize, mapExtent); } catch (final IOException e) { LOGGER.error("Unable to read raster data", e); } return null; } }; } public boolean isEqualizeHistogram() { return equalizeHistogram; } public Interpolation getInterpolation() { return interpolation; } @Override public Map getOptions( final short internalAdapterId, final Map existingOptions) { final Map configuredOptions = getConfiguredOptions(internalAdapterId); if (existingOptions == null) { return configuredOptions; } final Map mergedOptions = new HashMap<>(configuredOptions); for (final Entry e : existingOptions.entrySet()) { final String configuredValue = configuredOptions.get(e.getKey()); if ((e.getValue() == null) && (configuredValue == null)) { continue; } else if ((e.getValue() == null) || ((e.getValue() != null) && !e.getValue().equals(configuredValue))) { final String newValue = mergeOption(e.getKey(), e.getValue(), configuredValue); if ((newValue != null) && newValue.equals(e.getValue())) { // once merged the value didn't // change, so just continue continue; } if (newValue == null) { mergedOptions.remove(e.getKey()); } else { mergedOptions.put(e.getKey(), newValue); } } } for (final Entry e : configuredOptions.entrySet()) { if (!existingOptions.containsKey(e.getKey())) { // existing value should be null // because this key is contained in // the merged set if (e.getValue() == null) { continue; } else { final String newValue = mergeOption(e.getKey(), null, e.getValue()); if (newValue == null) { mergedOptions.remove(e.getKey()); } else { mergedOptions.put(e.getKey(), newValue); } } } } return mergedOptions; } private String mergeOption( final String optionKey, final String currentValue, final String nextValue) { if ((currentValue == null) || currentValue.trim().isEmpty()) { return nextValue; } else if ((nextValue == null) || nextValue.trim().isEmpty()) { return currentValue; } if (RasterTileRowTransform.MERGE_STRATEGY_KEY.equals(optionKey)) { final byte[] currentStrategyBytes = ByteArrayUtils.byteArrayFromString(currentValue); final byte[] nextStrategyBytes = ByteArrayUtils.byteArrayFromString(nextValue); final Object currentObj = PersistenceUtils.fromBinary(currentStrategyBytes); MultiAdapterServerMergeStrategy currentStrategy; if (currentObj instanceof SingleAdapterServerMergeStrategy) { currentStrategy = new MultiAdapterServerMergeStrategy<>((SingleAdapterServerMergeStrategy) currentObj); } else if (currentObj instanceof MultiAdapterServerMergeStrategy) { currentStrategy = (MultiAdapterServerMergeStrategy) currentObj; } else { // this is unexpected behavior and should never happen, consider // logging a message return nextValue; } final Object nextObj = PersistenceUtils.fromBinary(nextStrategyBytes); MultiAdapterServerMergeStrategy nextStrategy; if (nextObj instanceof SingleAdapterServerMergeStrategy) { nextStrategy = new MultiAdapterServerMergeStrategy<>((SingleAdapterServerMergeStrategy) nextObj); } else if (nextObj instanceof MultiAdapterServerMergeStrategy) { nextStrategy = (MultiAdapterServerMergeStrategy) nextObj; } else { // this is unexpected behavior and should never happen, consider // logging a message return currentValue; } currentStrategy.merge(nextStrategy); return ByteArrayUtils.byteArrayToString(PersistenceUtils.toBinary(currentStrategy)); } return nextValue; } @Override public RowTransform> getTransform() { if (mergeStrategy != null) { return new RasterTileRowTransform(); } else { return null; } } @Override public Map describe() { final Map description = RowMergingDataAdapter.super.describe(); description.put("Tile Size", String.valueOf(tileSize)); return description; } @Override public Object getFieldValue(final GridCoverage entry, final String fieldName) { return getRasterTileFromCoverage(entry); } @Override public Class getDataClass() { return GridCoverage.class; } @Override public List>> getDefaultStatistics() { final List> statistics = Lists.newArrayList(); final RasterOverviewStatistic overview = new RasterOverviewStatistic(getTypeName()); overview.setInternal(); statistics.add(overview); final RasterBoundingBoxStatistic bbox = new RasterBoundingBoxStatistic(getTypeName()); bbox.setInternal(); statistics.add(bbox); if (histogramConfig != null) { final RasterHistogramStatistic histogram = new RasterHistogramStatistic(getTypeName(), histogramConfig); histogram.setInternal(); statistics.add(histogram); } return statistics; } @Override public RowBuilder newRowBuilder(final FieldDescriptor[] outputFieldDescriptors) { // this is not used because the decode method of internal adapter is overridden with specialized // logic return null; } @Override public FieldDescriptor[] getFieldDescriptors() { return FIELDS; } @Override public FieldDescriptor getFieldDescriptor(final String fieldName) { return IMAGE_FIELD; } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/RasterRegisteredIndexFieldMappers.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.adapter; import org.locationtech.geowave.core.store.index.IndexFieldMapperRegistrySPI; public class RasterRegisteredIndexFieldMappers implements IndexFieldMapperRegistrySPI { @Override public RegisteredFieldMapper[] getRegisteredFieldMappers() { return new RegisteredFieldMapper[] { new RegisteredFieldMapper(RasterTileSpatialFieldMapper::new, (short) 617)}; } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/RasterTile.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.adapter; import java.awt.image.DataBuffer; import java.nio.ByteBuffer; import org.locationtech.geowave.adapter.raster.util.DataBufferPersistenceUtils; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class RasterTile implements Mergeable { private static final Logger LOGGER = LoggerFactory.getLogger(RasterTile.class); private DataBuffer dataBuffer; private T metadata; public RasterTile() { super(); } public RasterTile(final DataBuffer dataBuffer, final T metadata) { this.dataBuffer = dataBuffer; this.metadata = metadata; } public DataBuffer getDataBuffer() { return dataBuffer; } public T getMetadata() { return metadata; } @Override public byte[] toBinary() { final byte[] dataBufferBinary = DataBufferPersistenceUtils.getDataBufferBinary(dataBuffer); byte[] metadataBytes; if (metadata != null) { metadataBytes = PersistenceUtils.toBinary(metadata); } else { metadataBytes = new byte[] {}; } final ByteBuffer buf = ByteBuffer.allocate( metadataBytes.length + dataBufferBinary.length + VarintUtils.unsignedIntByteLength(metadataBytes.length)); VarintUtils.writeUnsignedInt(metadataBytes.length, buf); buf.put(metadataBytes); buf.put(dataBufferBinary); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { try { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int metadataLength = VarintUtils.readUnsignedInt(buf); if (metadataLength > 0) { final byte[] metadataBytes = ByteArrayUtils.safeRead(buf, metadataLength); metadata = (T) PersistenceUtils.fromBinary(metadataBytes); } final byte[] dataBufferBytes = new byte[buf.remaining()]; buf.get(dataBufferBytes); dataBuffer = DataBufferPersistenceUtils.getDataBuffer(dataBufferBytes); } catch (final Exception e) { LOGGER.warn("Unable to deserialize data buffer", e); } } public void setDataBuffer(final DataBuffer dataBuffer) { this.dataBuffer = dataBuffer; } public void setMetadata(final T metadata) { this.metadata = metadata; } @Override public void merge(final Mergeable merge) { // This will get wrapped as a MergeableRasterTile by the combiner to // support merging } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/RasterTileReader.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.adapter; import org.locationtech.geowave.core.store.data.field.FieldReader; public class RasterTileReader implements FieldReader> { @Override public RasterTile readField(final byte[] fieldData) { // the class name is not prefaced in the payload, we are assuming it is // a raster tile implementation and instantiating it directly final RasterTile retVal = new RasterTile(); if (retVal != null) { retVal.fromBinary(fieldData); } return retVal; } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/RasterTileSpatialFieldMapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.adapter; import java.util.List; import org.locationtech.geowave.core.geotime.adapter.SpatialFieldMapper; import org.locationtech.geowave.core.store.api.RowBuilder; import org.locationtech.jts.geom.Geometry; /** * An index field mapper for `RasterTiles`. This class does not actually do any mapping because the * mapping is handled by special logic in the adapter. Never the less, it is needed so that GeoWave * is able to map the raster data adapter to a spatial index. */ public class RasterTileSpatialFieldMapper extends SpatialFieldMapper { @Override protected Geometry getNativeGeometry(List nativeFieldValues) { // Unused, since adapter handles the mapping manually return null; } @Override public void toAdapter(final Geometry indexFieldValue, final RowBuilder rowBuilder) { // Unused, since adapter handles the mapping manually } @Override public Class adapterFieldType() { return RasterTile.class; } @Override public short adapterFieldCount() { return 1; } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/RasterTileWriter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.adapter; import org.locationtech.geowave.core.store.data.field.FieldWriter; public class RasterTileWriter implements FieldWriter> { @Override public byte[] writeField(final RasterTile fieldValue) { // there is no need to preface the payload with the class name and a // length of the class name, the implementation is assumed to be known // on read so we can save space on persistence return fieldValue.toBinary(); } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/ServerMergeableRasterTile.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.adapter; import java.awt.image.DataBuffer; import org.locationtech.geowave.adapter.raster.adapter.merge.ServerMergeStrategy; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.index.persist.Persistable; public class ServerMergeableRasterTile extends RasterTile { private ServerMergeStrategy mergeStrategy; private short dataAdapterId; public ServerMergeableRasterTile() { // this isn't really meant to be persisted, its instantiated using the // other constructor for merging purposes only leveraging the // RootMergeStrategy (also not persistable) // because this implements mergeable though and is technically // persistable, this constructor is provided and us registered for // consistency } public ServerMergeableRasterTile( final DataBuffer dataBuffer, final T metadata, final ServerMergeStrategy mergeStrategy, final short dataAdapterId) { super(dataBuffer, metadata); this.mergeStrategy = mergeStrategy; this.dataAdapterId = dataAdapterId; } public short getDataAdapterId() { return dataAdapterId; } @Override public void merge(final Mergeable merge) { if ((mergeStrategy != null) && (merge != null) && (merge instanceof RasterTile)) { mergeStrategy.merge(this, (RasterTile) merge, dataAdapterId); } else { super.merge(merge); } } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/SourceThresholdFixMosaicDescriptor.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.adapter; import javax.media.jai.JAI; import javax.media.jai.OperationRegistry; import javax.media.jai.ParameterListDescriptor; import javax.media.jai.ParameterListDescriptorImpl; import javax.media.jai.PropertyGenerator; import javax.media.jai.operator.MosaicDescriptor; import com.sun.media.jai.opimage.MosaicRIF; /** * this is a workaround because GeoTools resampling will force the source threshold to be 1.0 on * Mosaic operations, which will mask all values under 1.0. * org.geotools.coverage.processing.operation.Resample2D line 631 in gt-coverage-12.1 * *

This is mostly the same as MosaicDescriptor with the one key difference being that the * default source threshold is Double.MIN_VALUE instead of 1.0 */ public class SourceThresholdFixMosaicDescriptor extends MosaicDescriptor { /** An array of ParameterListDescriptor for each mode. */ private final ParameterListDescriptor defaultParamListDescriptor; /** */ private static final long serialVersionUID = 1L; /** The parameter class list for this operation. */ private static final Class[] paramClasses = { javax.media.jai.operator.MosaicType.class, javax.media.jai.PlanarImage[].class, javax.media.jai.ROI[].class, double[][].class, double[].class}; /** The parameter name list for this operation. */ private static final String[] paramNames = {"mosaicType", "sourceAlpha", "sourceROI", "sourceThreshold", "backgroundValues"}; /** The parameter default value list for this operation. */ private static final Object[] paramDefaults = {MOSAIC_TYPE_OVERLAY, null, null, new double[][] {{Double.MIN_VALUE // if this is less than or equal to 0, it will only work on the // first band because of a bug with the source extender within JAI's // Mosaic operation }}, new double[] {0.0}}; static boolean registered = false; public static synchronized void register(final boolean force) { if (!registered || force) { final OperationRegistry registry = JAI.getDefaultInstance().getOperationRegistry(); registry.unregisterDescriptor(new MosaicDescriptor()); registry.registerDescriptor(new SourceThresholdFixMosaicDescriptor()); // there seems to be a bug in jai-ext, line 1211 of // concurrentoperationregistry null pointer exception registry.registerFactory("rendered", "Mosaic", "com.sun.media.jai", new MosaicRIF()); registered = true; } } public SourceThresholdFixMosaicDescriptor() { super(); defaultParamListDescriptor = new ParameterListDescriptorImpl(this, paramNames, paramClasses, paramDefaults, null); } @Override public PropertyGenerator[] getPropertyGenerators(final String modeName) { return new PropertyGenerator[] {new MosaicPropertyGenerator()}; } @Override public ParameterListDescriptor getParameterListDescriptor(final String modeName) { return defaultParamListDescriptor; } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/SourceThresholdMosaicDescriptor.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.adapter; import javax.media.jai.ParameterListDescriptor; import javax.media.jai.ParameterListDescriptorImpl; import javax.media.jai.PropertyGenerator; import javax.media.jai.operator.MosaicDescriptor; public class SourceThresholdMosaicDescriptor extends MosaicDescriptor { /** An array of ParameterListDescriptor for each mode. */ private final ParameterListDescriptor defaultParamListDescriptor; /** */ private static final long serialVersionUID = 1L; /** The parameter class list for this operation. */ private static final Class[] paramClasses = { javax.media.jai.operator.MosaicType.class, javax.media.jai.PlanarImage[].class, javax.media.jai.ROI[].class, double[][].class, double[].class}; /** The parameter name list for this operation. */ private static final String[] paramNames = {"mosaicType", "sourceAlpha", "sourceROI", "sourceThreshold", "backgroundValues"}; /** The parameter default value list for this operation. */ private static final Object[] paramDefaults = {MOSAIC_TYPE_OVERLAY, null, null, new double[][] {{Double.MIN_VALUE}}, new double[] {0.0}}; public SourceThresholdMosaicDescriptor() { super(); defaultParamListDescriptor = new ParameterListDescriptorImpl(this, paramNames, paramClasses, paramDefaults, null); } @Override public PropertyGenerator[] getPropertyGenerators(final String modeName) { return new PropertyGenerator[] {new MosaicPropertyGenerator()}; } @Override public ParameterListDescriptor getParameterListDescriptor(final String modeName) { return defaultParamListDescriptor; } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/merge/MultiAdapterServerMergeStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.adapter.merge; import java.awt.image.SampleModel; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.locationtech.geowave.adapter.raster.adapter.RasterTile; import org.locationtech.geowave.adapter.raster.util.SampleModelPersistenceUtils; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; public class MultiAdapterServerMergeStrategy implements ServerMergeStrategy, Mergeable { private static final Logger LOGGER = LoggerFactory.getLogger(MultiAdapterServerMergeStrategy.class); // the purpose for these maps instead of a list of samplemodel and adapter // ID pairs is to allow for multiple adapters to share the same sample model protected Map sampleModels = new HashMap<>(); public Map adapterIdToSampleModelKey = new HashMap<>(); public Map> childMergeStrategies = new HashMap<>(); public Map adapterIdToChildMergeStrategyKey = new HashMap<>(); public MultiAdapterServerMergeStrategy() {} public MultiAdapterServerMergeStrategy( final SingleAdapterServerMergeStrategy singleAdapterMergeStrategy) { sampleModels.put(0, singleAdapterMergeStrategy.sampleModel); adapterIdToSampleModelKey.put(singleAdapterMergeStrategy.internalAdapterId, 0); childMergeStrategies.put(0, singleAdapterMergeStrategy.mergeStrategy); adapterIdToChildMergeStrategyKey.put(singleAdapterMergeStrategy.internalAdapterId, 0); } public SampleModel getSampleModel(final short internalAdapterId) { synchronized (this) { final Integer sampleModelId = adapterIdToSampleModelKey.get(internalAdapterId); if (sampleModelId != null) { return sampleModels.get(sampleModelId); } return null; } } public RasterTileMergeStrategy getChildMergeStrategy(final short internalAdapterId) { synchronized (this) { final Integer childMergeStrategyId = adapterIdToChildMergeStrategyKey.get(internalAdapterId); if (childMergeStrategyId != null) { return childMergeStrategies.get(childMergeStrategyId); } return null; } } @Override public void merge(final Mergeable merge) { synchronized (this) { if ((merge != null) && (merge instanceof MultiAdapterServerMergeStrategy)) { final MultiAdapterServerMergeStrategy other = (MultiAdapterServerMergeStrategy) merge; mergeMaps( sampleModels, adapterIdToSampleModelKey, other.sampleModels, other.adapterIdToSampleModelKey); mergeMaps( childMergeStrategies, adapterIdToChildMergeStrategyKey, other.childMergeStrategies, other.adapterIdToChildMergeStrategyKey); } } } private static void mergeMaps( final Map thisValues, final Map thisAdapterIdToValueKeys, final Map otherValues, final Map otherAdapterIdToValueKeys) { // this was generalized to apply to both sample models and merge // strategies, comments refer to sample models but in general it is also // applied to merge strategies // first check for sample models that exist in 'other' that do // not exist in 'this' for (final Entry sampleModelEntry : otherValues.entrySet()) { if (!thisValues.containsValue(sampleModelEntry.getValue())) { // we need to add this sample model final List adapterIds = new ArrayList<>(); // find all adapter IDs associated with this sample // model for (final Entry adapterIdEntry : otherAdapterIdToValueKeys.entrySet()) { if (adapterIdEntry.getValue().equals(sampleModelEntry.getKey())) { adapterIds.add(adapterIdEntry.getKey()); } } if (!adapterIds.isEmpty()) { addValue(adapterIds, sampleModelEntry.getValue(), thisValues, thisAdapterIdToValueKeys); } } } // next check for adapter IDs that exist in 'other' that do not // exist in 'this' for (final Entry adapterIdEntry : otherAdapterIdToValueKeys.entrySet()) { if (!thisAdapterIdToValueKeys.containsKey(adapterIdEntry.getKey())) { // find the sample model associated with the adapter ID // in 'other' and find what Integer it is with in 'this' final T sampleModel = otherValues.get(adapterIdEntry.getValue()); if (sampleModel != null) { // because the previous step added any missing // sample models, it should be a fair assumption // that the sample model exists in 'this' for (final Entry sampleModelEntry : thisValues.entrySet()) { if (sampleModel.equals(sampleModelEntry.getValue())) { // add the sample model key to the // adapterIdToSampleModelKey map thisAdapterIdToValueKeys.put(adapterIdEntry.getKey(), sampleModelEntry.getKey()); break; } } } } } } private static synchronized void addValue( final List adapterIds, final T sampleModel, final Map values, final Map adapterIdToValueKeys) { int nextId = 1; boolean idAvailable = false; while (!idAvailable) { boolean idMatched = false; for (final Integer id : values.keySet()) { if (nextId == id.intValue()) { idMatched = true; break; } } if (idMatched) { // try the next incremental ID nextId++; } else { // its not matched so we can use it idAvailable = true; } } values.put(nextId, sampleModel); for (final Short adapterId : adapterIds) { adapterIdToValueKeys.put(adapterId, nextId); } } @SuppressFBWarnings( value = {"DLS_DEAD_LOCAL_STORE"}, justification = "Incorrect warning, sampleModelBinary used") @Override public byte[] toBinary() { int byteCount = 0; final List sampleModelBinaries = new ArrayList<>(); final List sampleModelKeys = new ArrayList<>(); int successfullySerializedModels = 0; int successfullySerializedModelAdapters = 0; final Set successfullySerializedModelIds = new HashSet<>(); for (final Entry entry : sampleModels.entrySet()) { final SampleModel sampleModel = entry.getValue(); try { final byte[] sampleModelBinary = SampleModelPersistenceUtils.getSampleModelBinary(sampleModel); byteCount += sampleModelBinary.length; byteCount += VarintUtils.unsignedIntByteLength(sampleModelBinary.length); byteCount += VarintUtils.unsignedIntByteLength(entry.getKey()); sampleModelBinaries.add(sampleModelBinary); sampleModelKeys.add(entry.getKey()); successfullySerializedModels++; successfullySerializedModelIds.add(entry.getKey()); } catch (final Exception e) { LOGGER.warn("Unable to serialize sample model", e); } } byteCount += VarintUtils.unsignedIntByteLength(successfullySerializedModelIds.size()); for (final Entry entry : adapterIdToSampleModelKey.entrySet()) { if (successfullySerializedModelIds.contains(entry.getValue())) { byteCount += VarintUtils.unsignedShortByteLength(entry.getKey()); byteCount += VarintUtils.unsignedIntByteLength(entry.getValue()); successfullySerializedModelAdapters++; } } byteCount += VarintUtils.unsignedIntByteLength(successfullySerializedModelAdapters); final List mergeStrategyBinaries = new ArrayList<>(); final List mergeStrategyKeys = new ArrayList<>(); int successfullySerializedMergeStrategies = 0; int successfullySerializedMergeAdapters = 0; final Set successfullySerializedMergeIds = new HashSet<>(); for (final Entry> entry : childMergeStrategies.entrySet()) { final RasterTileMergeStrategy mergeStrategy = entry.getValue(); final byte[] mergeStrategyBinary = PersistenceUtils.toBinary(mergeStrategy); byteCount += mergeStrategyBinary.length; byteCount += VarintUtils.unsignedIntByteLength(mergeStrategyBinary.length); byteCount += VarintUtils.unsignedIntByteLength(entry.getKey()); mergeStrategyBinaries.add(mergeStrategyBinary); mergeStrategyKeys.add(entry.getKey()); successfullySerializedMergeStrategies++; successfullySerializedMergeIds.add(entry.getKey()); } byteCount += VarintUtils.unsignedIntByteLength(successfullySerializedMergeStrategies); for (final Entry entry : adapterIdToChildMergeStrategyKey.entrySet()) { if (successfullySerializedMergeIds.contains(entry.getValue())) { byteCount += VarintUtils.unsignedShortByteLength(entry.getKey()); byteCount += VarintUtils.unsignedIntByteLength(entry.getValue()); successfullySerializedMergeAdapters++; } } byteCount += VarintUtils.unsignedIntByteLength(successfullySerializedMergeAdapters); final ByteBuffer buf = ByteBuffer.allocate(byteCount); VarintUtils.writeUnsignedInt(successfullySerializedModels, buf); for (int i = 0; i < successfullySerializedModels; i++) { final byte[] sampleModelBinary = sampleModelBinaries.get(i); VarintUtils.writeUnsignedInt(sampleModelBinary.length, buf); buf.put(sampleModelBinary); VarintUtils.writeUnsignedInt(sampleModelKeys.get(i), buf); } VarintUtils.writeUnsignedInt(successfullySerializedModelAdapters, buf); for (final Entry entry : adapterIdToSampleModelKey.entrySet()) { if (successfullySerializedModelIds.contains(entry.getValue())) { VarintUtils.writeUnsignedShort(entry.getKey(), buf); VarintUtils.writeUnsignedInt(entry.getValue(), buf); } } VarintUtils.writeUnsignedInt(successfullySerializedMergeStrategies, buf); for (int i = 0; i < successfullySerializedMergeStrategies; i++) { final byte[] mergeStrategyBinary = mergeStrategyBinaries.get(i); VarintUtils.writeUnsignedInt(mergeStrategyBinary.length, buf); buf.put(mergeStrategyBinary); VarintUtils.writeUnsignedInt(mergeStrategyKeys.get(i), buf); } VarintUtils.writeUnsignedInt(successfullySerializedMergeAdapters, buf); for (final Entry entry : adapterIdToChildMergeStrategyKey.entrySet()) { if (successfullySerializedModelIds.contains(entry.getValue())) { VarintUtils.writeUnsignedShort(entry.getKey(), buf); VarintUtils.writeUnsignedInt(entry.getValue(), buf); } } return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int sampleModelSize = VarintUtils.readUnsignedInt(buf); sampleModels = new HashMap<>(sampleModelSize); for (int i = 0; i < sampleModelSize; i++) { final byte[] sampleModelBinary = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); if (sampleModelBinary.length > 0) { try { final int sampleModelKey = VarintUtils.readUnsignedInt(buf); final SampleModel sampleModel = SampleModelPersistenceUtils.getSampleModel(sampleModelBinary); sampleModels.put(sampleModelKey, sampleModel); } catch (final Exception e) { LOGGER.warn("Unable to deserialize sample model", e); } } else { LOGGER.warn("Sample model binary is empty, unable to deserialize"); } } final int sampleModelAdapterIdSize = VarintUtils.readUnsignedInt(buf); adapterIdToSampleModelKey = new HashMap<>(sampleModelAdapterIdSize); for (int i = 0; i < sampleModelAdapterIdSize; i++) { adapterIdToSampleModelKey.put( VarintUtils.readUnsignedShort(buf), VarintUtils.readUnsignedInt(buf)); } final int mergeStrategySize = VarintUtils.readUnsignedInt(buf); childMergeStrategies = new HashMap<>(mergeStrategySize); for (int i = 0; i < mergeStrategySize; i++) { final byte[] mergeStrategyBinary = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); if (mergeStrategyBinary.length > 0) { try { final RasterTileMergeStrategy mergeStrategy = (RasterTileMergeStrategy) PersistenceUtils.fromBinary(mergeStrategyBinary); final int mergeStrategyKey = VarintUtils.readUnsignedInt(buf); if (mergeStrategy != null) { childMergeStrategies.put(mergeStrategyKey, mergeStrategy); } } catch (final Exception e) { LOGGER.warn("Unable to deserialize merge strategy", e); } } else { LOGGER.warn("Merge strategy binary is empty, unable to deserialize"); } } final int mergeStrategyAdapterIdSize = VarintUtils.readUnsignedInt(buf); adapterIdToChildMergeStrategyKey = new HashMap<>(mergeStrategyAdapterIdSize); for (int i = 0; i < mergeStrategyAdapterIdSize; i++) { adapterIdToChildMergeStrategyKey.put( VarintUtils.readUnsignedShort(buf), VarintUtils.readUnsignedInt(buf)); } } // public T getMetadata( // final GridCoverage tileGridCoverage, // final Map originalCoverageProperties, // final RasterDataAdapter dataAdapter ) { // final RasterTileMergeStrategy childMergeStrategy = // getChildMergeStrategy(dataAdapter.getAdapterId()); // if (childMergeStrategy != null) { // return childMergeStrategy.getMetadata( // tileGridCoverage, // dataAdapter); // } // return null; // } @Override public void merge( final RasterTile thisTile, final RasterTile nextTile, final short internalAdapterId) { final RasterTileMergeStrategy childMergeStrategy = getChildMergeStrategy(internalAdapterId); if (childMergeStrategy != null) { childMergeStrategy.merge(thisTile, nextTile, getSampleModel(internalAdapterId)); } } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/merge/RasterTileMergeStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.adapter.merge; import java.awt.image.SampleModel; import org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter; import org.locationtech.geowave.adapter.raster.adapter.RasterTile; import org.locationtech.geowave.core.index.persist.Persistable; import org.opengis.coverage.grid.GridCoverage; public interface RasterTileMergeStrategy extends Persistable { public void merge(RasterTile thisTile, RasterTile nextTile, SampleModel sampleModel); public T getMetadata(GridCoverage tileGridCoverage, RasterDataAdapter dataAdapter); } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/merge/RasterTileRowTransform.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.adapter.merge; import java.io.IOException; import java.util.Map; import org.locationtech.geowave.adapter.raster.adapter.RasterTile; import org.locationtech.geowave.adapter.raster.adapter.ServerMergeableRasterTile; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter.RowTransform; /** * This class can be used by both the RasterTileCombiner and the RasterTileVisibilityCombiner to * execute the merge strategy */ public class RasterTileRowTransform implements RowTransform { public static final String TRANSFORM_NAME = "RasterTile"; public static final String MERGE_STRATEGY_KEY = "MERGE_STRATEGY"; private ServerMergeStrategy mergeStrategy; // this priority is fairly arbitrary at the moment private static final int RASTER_TILE_PRIORITY = 4; public Mergeable transform(final short internalAdapterId, final Mergeable mergeable) { if ((mergeable != null) && (mergeable instanceof RasterTile)) { final RasterTile rasterTile = (RasterTile) mergeable; return new ServerMergeableRasterTile<>( rasterTile.getDataBuffer(), rasterTile.getMetadata(), mergeStrategy, internalAdapterId); } return mergeable; } @Override public void initOptions(final Map options) throws IOException { final String mergeStrategyStr = options.get(MERGE_STRATEGY_KEY); if (mergeStrategyStr != null) { final byte[] mergeStrategyBytes = ByteArrayUtils.byteArrayFromString(mergeStrategyStr); mergeStrategy = (ServerMergeStrategy) PersistenceUtils.fromBinary(mergeStrategyBytes); } } @Override public Mergeable getRowAsMergeableObject( final short internalAdapterId, final ByteArray fieldId, final byte[] rowValueBinary) { final RasterTile mergeable = new RasterTile(); if (mergeable != null) { mergeable.fromBinary(rowValueBinary); } return transform(internalAdapterId, mergeable); } @Override public byte[] getBinaryFromMergedObject(final Mergeable rowObject) { return rowObject.toBinary(); } @Override public byte[] toBinary() { return new byte[] {}; } @Override public void fromBinary(final byte[] bytes) {} @Override public String getTransformName() { return TRANSFORM_NAME; } @Override public int getBaseTransformPriority() { return RASTER_TILE_PRIORITY; } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/merge/ServerMergeStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.adapter.merge; import org.locationtech.geowave.adapter.raster.adapter.RasterTile; import org.locationtech.geowave.core.index.persist.Persistable; public interface ServerMergeStrategy { public void merge( final RasterTile thisTile, final RasterTile nextTile, final short internalAdapterId); } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/merge/SimpleAbstractMergeStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.adapter.merge; import java.awt.image.Raster; import java.awt.image.SampleModel; import java.awt.image.WritableRaster; import org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter; import org.locationtech.geowave.adapter.raster.adapter.RasterTile; import org.locationtech.geowave.adapter.raster.adapter.ServerMergeableRasterTile; import org.locationtech.geowave.core.index.persist.Persistable; import org.opengis.coverage.grid.GridCoverage; public abstract class SimpleAbstractMergeStrategy implements RasterTileMergeStrategy { protected SimpleAbstractMergeStrategy() { super(); } private static final long serialVersionUID = 8937483748317L; @Override public void merge( final RasterTile thisTile, final RasterTile nextTile, final SampleModel sampleModel) { // this strategy aims for latest tile // with data values, but where there // is no data in the latest and there is data in the earlier tile, it // fills the data from the earlier tile if ((nextTile != null) && (nextTile instanceof ServerMergeableRasterTile)) { final WritableRaster nextRaster = Raster.createWritableRaster(sampleModel, nextTile.getDataBuffer(), null); final WritableRaster thisRaster = Raster.createWritableRaster(sampleModel, thisTile.getDataBuffer(), null); mergeRasters(thisTile, nextTile, thisRaster, nextRaster); } } protected void mergeRasters( final RasterTile thisTile, final RasterTile nextTile, final WritableRaster thisRaster, final WritableRaster nextRaster) { final int maxX = nextRaster.getMinX() + nextRaster.getWidth(); final int maxY = nextRaster.getMinY() + nextRaster.getHeight(); for (int b = 0; b < nextRaster.getNumBands(); b++) { for (int x = nextRaster.getMinX(); x < maxX; x++) { for (int y = nextRaster.getMinY(); y < maxY; y++) { final double thisSample = thisRaster.getSampleDouble(x, y, b); final double nextSample = nextRaster.getSampleDouble(x, y, b); thisRaster.setSample(x, y, b, getSample(x, y, b, thisSample, nextSample)); } } } } protected abstract double getSample(int x, int y, int b, double thisSample, double nextSample); @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } return true; } @Override public int hashCode() { return (int) serialVersionUID; // this looks correct based on behaviour of equals?!? should return the // same hash code for all instances } @Override public byte[] toBinary() { return new byte[] {}; } @Override public void fromBinary(final byte[] bytes) {} @Override public T getMetadata(final GridCoverage tileGridCoverage, final RasterDataAdapter dataAdapter) { return null; } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/merge/SingleAdapterServerMergeStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.adapter.merge; import java.awt.image.SampleModel; import java.nio.ByteBuffer; import java.util.Map; import org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter; import org.locationtech.geowave.adapter.raster.adapter.RasterTile; import org.locationtech.geowave.adapter.raster.util.SampleModelPersistenceUtils; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.opengis.coverage.grid.GridCoverage; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; public class SingleAdapterServerMergeStrategy implements ServerMergeStrategy, Persistable { private static final Logger LOGGER = LoggerFactory.getLogger(SingleAdapterServerMergeStrategy.class); // the purpose for these maps instead of a list of samplemodel and adapter // ID pairs is to allow for multiple adapters to share the same sample model protected short internalAdapterId; protected SampleModel sampleModel; protected RasterTileMergeStrategy mergeStrategy; public SingleAdapterServerMergeStrategy() {} public SingleAdapterServerMergeStrategy( final short internalAdapterId, final SampleModel sampleModel, final RasterTileMergeStrategy mergeStrategy) { this.internalAdapterId = internalAdapterId; this.sampleModel = sampleModel; this.mergeStrategy = mergeStrategy; } @SuppressFBWarnings( value = {"DLS_DEAD_LOCAL_STORE"}, justification = "Incorrect warning, sampleModelBinary used") @Override public byte[] toBinary() { final byte[] sampleModelBinary = SampleModelPersistenceUtils.getSampleModelBinary(sampleModel); final byte[] mergeStrategyBinary = PersistenceUtils.toBinary(mergeStrategy); final int byteCount = sampleModelBinary.length + VarintUtils.unsignedIntByteLength(sampleModelBinary.length) + VarintUtils.unsignedShortByteLength(internalAdapterId) + mergeStrategyBinary.length + VarintUtils.unsignedIntByteLength(mergeStrategyBinary.length); final ByteBuffer buf = ByteBuffer.allocate(byteCount); VarintUtils.writeUnsignedInt(sampleModelBinary.length, buf); buf.put(sampleModelBinary); VarintUtils.writeUnsignedShort(internalAdapterId, buf); VarintUtils.writeUnsignedInt(mergeStrategyBinary.length, buf); buf.put(mergeStrategyBinary); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final byte[] sampleModelBinary = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); if (sampleModelBinary.length > 0) { try { sampleModel = SampleModelPersistenceUtils.getSampleModel(sampleModelBinary); } catch (final Exception e) { LOGGER.warn("Unable to deserialize sample model", e); } } else { LOGGER.warn("Sample model binary is empty, unable to deserialize"); } internalAdapterId = VarintUtils.readUnsignedShort(buf); final byte[] mergeStrategyBinary = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); if (mergeStrategyBinary.length > 0) { try { mergeStrategy = (RasterTileMergeStrategy) PersistenceUtils.fromBinary(mergeStrategyBinary); } catch (final Exception e) { LOGGER.warn("Unable to deserialize merge strategy", e); } } else { LOGGER.warn("Merge strategy binary is empty, unable to deserialize"); } } @Override public void merge( final RasterTile thisTile, final RasterTile nextTile, final short internalAdapterId) { if (mergeStrategy != null) { mergeStrategy.merge(thisTile, nextTile, sampleModel); } } public T getMetadata( final GridCoverage tileGridCoverage, final Map originalCoverageProperties, final RasterDataAdapter dataAdapter) { if (mergeStrategy != null) { return mergeStrategy.getMetadata(tileGridCoverage, dataAdapter); } return null; } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/merge/nodata/NoDataByFilter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.adapter.merge.nodata; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import java.util.Set; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.geotime.util.TWKBReader; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.GeoWaveSerializationException; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.io.ParseException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class NoDataByFilter implements NoDataMetadata { private static final Logger LOGGER = LoggerFactory.getLogger(NoDataByFilter.class); private Geometry shape; private double[][] noDataPerBand; public NoDataByFilter() {} public NoDataByFilter(final Geometry shape, final double[][] noDataPerBand) { this.shape = shape; this.noDataPerBand = noDataPerBand; } public Geometry getShape() { return shape; } public double[][] getNoDataPerBand() { return noDataPerBand; } @Override public boolean isNoData(final SampleIndex index, final double value) { if ((noDataPerBand != null) && (noDataPerBand.length > index.getBand())) { for (final double noDataVal : noDataPerBand[index.getBand()]) { // use object equality to capture NaN, and positive and negative // infinite equality if (new Double(value).equals(new Double(noDataVal))) { return true; } } } if ((shape != null) && !shape.intersects( new GeometryFactory().createPoint(new Coordinate(index.getX(), index.getY())))) { return true; } return false; } @Override public byte[] toBinary() { final byte[] noDataBinary; if ((noDataPerBand != null) && (noDataPerBand.length > 0)) { int totalBytes = 0; final List noDataValuesBytes = new ArrayList<>(noDataPerBand.length); for (final double[] noDataValues : noDataPerBand) { final int thisBytes = VarintUtils.unsignedIntByteLength(noDataValues.length) + (noDataValues.length * 8); totalBytes += thisBytes; final ByteBuffer noDataBuf = ByteBuffer.allocate(thisBytes); VarintUtils.writeUnsignedInt(noDataValues.length, noDataBuf); for (final double noDataValue : noDataValues) { noDataBuf.putDouble(noDataValue); } noDataValuesBytes.add(noDataBuf.array()); } totalBytes += VarintUtils.unsignedIntByteLength(noDataPerBand.length); final ByteBuffer noDataBuf = ByteBuffer.allocate(totalBytes); VarintUtils.writeUnsignedInt(noDataPerBand.length, noDataBuf); for (final byte[] noDataValueBytes : noDataValuesBytes) { noDataBuf.put(noDataValueBytes); } noDataBinary = noDataBuf.array(); } else { noDataBinary = new byte[] {}; } final byte[] geometryBinary; if (shape == null) { geometryBinary = new byte[0]; } else { geometryBinary = GeometryUtils.geometryToBinary(shape, GeometryUtils.MAX_GEOMETRY_PRECISION); } final ByteBuffer buf = ByteBuffer.allocate( geometryBinary.length + noDataBinary.length + VarintUtils.unsignedIntByteLength(noDataBinary.length)); VarintUtils.writeUnsignedInt(noDataBinary.length, buf); buf.put(noDataBinary); buf.put(geometryBinary); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int noDataBinaryLength = VarintUtils.readUnsignedInt(buf); final int geometryBinaryLength = bytes.length - noDataBinaryLength - VarintUtils.unsignedIntByteLength(noDataBinaryLength); if (noDataBinaryLength == 0) { noDataPerBand = new double[][] {}; } else { final int numBands = VarintUtils.readUnsignedInt(buf); ByteArrayUtils.verifyBufferSize(buf, numBands); noDataPerBand = new double[numBands][]; for (int b = 0; b < noDataPerBand.length; b++) { final int bandLength = VarintUtils.readUnsignedInt(buf); ByteArrayUtils.verifyBufferSize(buf, bandLength); noDataPerBand[b] = new double[bandLength]; for (int i = 0; i < noDataPerBand[b].length; i++) { noDataPerBand[b][i] = buf.getDouble(); } } } if (geometryBinaryLength > 0) { try { shape = new TWKBReader().read(buf); } catch (final ParseException e) { throw new GeoWaveSerializationException("Unable to deserialize geometry data", e); } } else { shape = null; } } @Override public Set getNoDataIndices() { return null; } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/merge/nodata/NoDataBySampleIndex.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.adapter.merge.nodata; import java.nio.ByteBuffer; import java.util.HashSet; import java.util.Set; import org.locationtech.geowave.core.index.VarintUtils; public class NoDataBySampleIndex implements NoDataMetadata { private Set noDataIndexSet; public NoDataBySampleIndex() { super(); } public NoDataBySampleIndex(final Set noDataIndexSet) { this.noDataIndexSet = noDataIndexSet; } @Override public byte[] toBinary() { int byteLength = 0; for (final SampleIndex i : noDataIndexSet) { byteLength += VarintUtils.unsignedIntByteLength(i.getX()) + VarintUtils.unsignedIntByteLength(i.getY()) + VarintUtils.unsignedIntByteLength(i.getBand()); } byteLength += VarintUtils.unsignedIntByteLength(noDataIndexSet.size()); final ByteBuffer buf = ByteBuffer.allocate(byteLength); VarintUtils.writeUnsignedInt(noDataIndexSet.size(), buf); for (final SampleIndex i : noDataIndexSet) { VarintUtils.writeUnsignedInt(i.getX(), buf); VarintUtils.writeUnsignedInt(i.getY(), buf); VarintUtils.writeUnsignedInt(i.getBand(), buf); } return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int size = VarintUtils.readUnsignedInt(buf); noDataIndexSet = new HashSet<>(size); for (int i = 0; i < size; i++) { final int x = VarintUtils.readUnsignedInt(buf); final int y = VarintUtils.readUnsignedInt(buf); final int b = VarintUtils.readUnsignedInt(buf); noDataIndexSet.add(new SampleIndex(x, y, b)); } } @Override public boolean isNoData(final SampleIndex index, final double sampleValue) { return noDataIndexSet.contains(index); } @Override public Set getNoDataIndices() { return noDataIndexSet; } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/merge/nodata/NoDataMergeStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.adapter.merge.nodata; import java.awt.image.Raster; import java.awt.image.SampleModel; import java.awt.image.WritableRaster; import org.locationtech.geowave.adapter.raster.FitToIndexGridCoverage; import org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter; import org.locationtech.geowave.adapter.raster.adapter.RasterTile; import org.locationtech.geowave.adapter.raster.adapter.merge.RasterTileMergeStrategy; import org.locationtech.geowave.adapter.raster.adapter.merge.nodata.NoDataMetadata.SampleIndex; import org.opengis.coverage.grid.GridCoverage; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class NoDataMergeStrategy implements RasterTileMergeStrategy { public NoDataMergeStrategy() {} private static final long serialVersionUID = 38473874l; private static final Logger LOGGER = LoggerFactory.getLogger(NoDataMergeStrategy.class); @Override public void merge( final RasterTile thisTile, final RasterTile nextTile, final SampleModel sampleModel) { // this strategy aims for latest tile with data values, but where there // is no data in the latest and there is data in the earlier tile, it // fills the data from the earlier tile // if next tile is null or if this tile does not have metadata, just // keep this tile as is if ((nextTile != null) && (thisTile.getMetadata() != null)) { final NoDataMetadata thisTileMetadata = thisTile.getMetadata(); final NoDataMetadata nextTileMetadata = nextTile.getMetadata(); final WritableRaster thisRaster = Raster.createWritableRaster(sampleModel, thisTile.getDataBuffer(), null); final WritableRaster nextRaster = Raster.createWritableRaster(sampleModel, nextTile.getDataBuffer(), null); final int maxX = thisRaster.getMinX() + thisRaster.getWidth(); final int maxY = thisRaster.getMinY() + thisRaster.getHeight(); boolean recalculateMetadata = false; for (int b = 0; b < thisRaster.getNumBands(); b++) { for (int x = thisRaster.getMinX(); x < maxX; x++) { for (int y = thisRaster.getMinY(); y < maxY; y++) { if (thisTileMetadata.isNoData( new SampleIndex(x, y, b), thisRaster.getSampleDouble(x, y, b))) { final double sample = nextRaster.getSampleDouble(x, y, b); if ((nextTileMetadata == null) || !nextTileMetadata.isNoData(new SampleIndex(x, y, b), sample)) { // we only need to recalculate metadata if // the raster is overwritten, // otherwise just use this raster's // metadata recalculateMetadata = true; thisRaster.setSample(x, y, b, sample); } } } } } if (recalculateMetadata) { thisTile.setMetadata( NoDataMetadataFactory.mergeMetadata( thisTileMetadata, thisRaster, nextTileMetadata, nextRaster)); } } } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } return true; } @Override public int hashCode() { return (int) serialVersionUID; // this looks correct based on behaviour of equals?!? should return the // same hash code for all instances } @Override public byte[] toBinary() { return new byte[] {}; } @Override public void fromBinary(final byte[] bytes) {} @Override public NoDataMetadata getMetadata( final GridCoverage tileGridCoverage, final RasterDataAdapter dataAdapter) { if (tileGridCoverage instanceof FitToIndexGridCoverage) { return NoDataMetadataFactory.createMetadata( dataAdapter.getNoDataValuesPerBand(), ((FitToIndexGridCoverage) tileGridCoverage).getFootprintScreenGeometry(), tileGridCoverage.getRenderedImage().getData()); } return NoDataMetadataFactory.createMetadata( dataAdapter.getNoDataValuesPerBand(), null, tileGridCoverage.getRenderedImage().getData()); } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/merge/nodata/NoDataMetadata.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.adapter.merge.nodata; import java.util.Set; import org.locationtech.geowave.core.index.persist.Persistable; public interface NoDataMetadata extends Persistable { public static class SampleIndex { private final int x; private final int y; private final int b; public SampleIndex(final int x, final int y, final int b) { this.x = x; this.y = y; this.b = b; } public int getX() { return x; } public int getY() { return y; } public int getBand() { return b; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + b; result = (prime * result) + x; result = (prime * result) + y; return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final SampleIndex other = (SampleIndex) obj; if (b != other.b) { return false; } if (x != other.x) { return false; } if (y != other.y) { return false; } return true; } } public boolean isNoData(SampleIndex index, double sampleValue); public Set getNoDataIndices(); } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/merge/nodata/NoDataMetadataFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.adapter.merge.nodata; import java.awt.image.Raster; import java.awt.image.WritableRaster; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import org.locationtech.geowave.adapter.raster.adapter.merge.nodata.NoDataMetadata.SampleIndex; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; public class NoDataMetadataFactory { private static class NoDataSummary { private final Set indices; private final double[][] usedNoDataValues; public NoDataSummary(final Set indices, final double[][] usedNoDataValues) { this.indices = indices; this.usedNoDataValues = usedNoDataValues; } } private static final int MAX_LIST_NO_DATA = 20; public static NoDataMetadata createMetadata( final double[][] allNoDataValues, final Geometry shape, final Raster data) { final NoDataSummary noDataSummary = getNoDataSummary(allNoDataValues, shape, data); return createMetadata(noDataSummary, new Geometry[] {shape}, data.getWidth(), data.getHeight()); } public static NoDataMetadata mergeMetadata( final NoDataMetadata noDataMetadata1, final WritableRaster raster1, final NoDataMetadata noDataMetadata2, final WritableRaster raster2) { if ((noDataMetadata1 == null) || (noDataMetadata2 == null)) { // this implies that there is no nodata values in one of the rasters // so there is no nodata values in the merge return null; } final Set noDataIndices1 = noDataMetadata1.getNoDataIndices(); final Set noDataIndices2 = noDataMetadata2.getNoDataIndices(); if ((noDataIndices1 != null) && (noDataIndices2 != null)) { // simple case, just take the intersection of the sets noDataIndices2.retainAll(noDataIndices1); return new NoDataBySampleIndex(noDataIndices2); } else if (noDataIndices1 != null) { // just determine which of the no data indices are covered by the // second set of metadata and remove them return mergeMetadataBySummary(noDataIndices1, noDataMetadata2, raster2); } else if (noDataIndices2 != null) { // just determine which of the no data indices are covered by the // first set of metadata and remove them return mergeMetadataBySummary(noDataIndices2, noDataMetadata1, raster1); } else if ((noDataMetadata1 instanceof NoDataByFilter) && (noDataMetadata2 instanceof NoDataByFilter)) { final NoDataByFilter noDataByFilter1 = ((NoDataByFilter) noDataMetadata1); final NoDataByFilter noDataByFilter2 = ((NoDataByFilter) noDataMetadata2); final double[][] noDataPerBand1 = noDataByFilter1.getNoDataPerBand(); final double[][] noDataPerBand2 = noDataByFilter2.getNoDataPerBand(); // union the no data values from each filter final int numBands = Math.min(noDataPerBand1.length, noDataPerBand2.length); final double[][] allNoDataValues = new double[numBands][]; for (int b = 0; b < numBands; b++) { final Set noDataValuesInBand = new HashSet<>(); if (noDataPerBand1[b] != null) { for (final double noDataValue : noDataPerBand1[b]) { noDataValuesInBand.add(noDataValue); } } if (noDataPerBand2[b] != null) { for (final double noDataValue : noDataPerBand2[b]) { noDataValuesInBand.add(noDataValue); } } allNoDataValues[b] = new double[noDataValuesInBand.size()]; int i = 0; final Iterator it = noDataValuesInBand.iterator(); while (it.hasNext()) { allNoDataValues[b][i++] = it.next(); } } return mergeMetadataBySummary( allNoDataValues, noDataByFilter1, raster1, noDataByFilter2, raster2); } else { // this should never happen because the only implementations of // metadata are by index or by filter but just in case iteratively // go through every sample, determine if its covered by the first or // the second set of metadata and use the indices return exhaustiveMergeMetadata(noDataMetadata1, raster1, noDataMetadata2, raster2); } } private static NoDataMetadata createMetadata( final NoDataSummary noDataSummary, final Geometry[] shapes, final int width, final int height) { if (noDataSummary.indices.size() > MAX_LIST_NO_DATA) { Geometry finalShape; if ((shapes == null) || (shapes.length == 0)) { finalShape = null; } else { finalShape = shapes[0]; if ((shapes.length > 1) && (finalShape != null)) { for (int i = 1; i < shapes.length; i++) { if (shapes[i] == null) { finalShape = null; break; } else { finalShape = finalShape.union(shapes[i]); } } } } if ((finalShape != null) && finalShape.covers( new GeometryFactory().toGeometry(new Envelope(0, width, 0, height)))) { // if the coverage of this geometric union ever gets to the // point that it fully covers the raster, stop storing it and // just set the geometry to null finalShape = null; } return new NoDataByFilter(finalShape, noDataSummary.usedNoDataValues); } else if (!noDataSummary.indices.isEmpty()) { // just go through every raster sample and determine whether it // qualifies as null data return new NoDataBySampleIndex(noDataSummary.indices); } else { // the "no data" samples in the dataset must be 0, so just return // null for the metadata return null; } } private static NoDataMetadata mergeMetadataBySummary( final Set noDataIndices, final NoDataMetadata noDataMetadata, final WritableRaster raster) { final Iterator indices = noDataIndices.iterator(); while (indices.hasNext()) { final SampleIndex index = indices.next(); if (!noDataMetadata.isNoData( index, raster.getSampleDouble(index.getX(), index.getY(), index.getBand()))) { indices.remove(); } } return new NoDataBySampleIndex(noDataIndices); } private static NoDataMetadata exhaustiveMergeMetadata( final NoDataMetadata noDataMetadata1, final WritableRaster raster1, final NoDataMetadata noDataMetadata2, final WritableRaster raster2) { final int width = Math.min(raster1.getWidth(), raster2.getWidth()); final int height = Math.min(raster1.getHeight(), raster2.getHeight()); final int numBands = Math.min(raster1.getNumBands(), raster2.getNumBands()); final Set indices = new HashSet<>(); for (int b = 0; b < numBands; b++) { for (int x = 0; x < width; x++) { for (int y = 0; y < height; y++) { final SampleIndex index = new SampleIndex(x, y, b); if (noDataMetadata1.isNoData(index, raster1.getSampleDouble(x, y, b)) && noDataMetadata2.isNoData(index, raster2.getSampleDouble(x, y, b))) { indices.add(index); } } } } return new NoDataBySampleIndex(indices); } private static NoDataMetadata mergeMetadataBySummary( final double[][] allNoDataValues, final NoDataByFilter noDataMetadata1, final WritableRaster raster1, final NoDataByFilter noDataMetadata2, final WritableRaster raster2) { final NoDataSummary noDataSummary = getNoDataSummary(allNoDataValues, noDataMetadata1, raster1, noDataMetadata2, raster2); return createMetadata( noDataSummary, new Geometry[] {noDataMetadata1.getShape(), noDataMetadata2.getShape()}, raster2.getWidth(), // both // rasters // better // be // the // same // dimensions raster2.getHeight()); } private static NoDataSummary getNoDataSummary( final double[][] allNoDataValues, final NoDataByFilter noDataMetadata1, final WritableRaster raster1, final NoDataByFilter noDataMetadata2, final WritableRaster raster2) { final int width = Math.min(raster1.getWidth(), raster2.getWidth()); final int height = Math.min(raster1.getHeight(), raster2.getHeight()); final int numBands = Math.min(raster1.getNumBands(), raster2.getNumBands()); return getNoDataSummary( allNoDataValues, new MultiShape(new Geometry[] {noDataMetadata1.getShape(), noDataMetadata2.getShape()}), new MultiRaster(new Raster[] {raster1, raster2}), width, height, numBands); } private static NoDataSummary getNoDataSummary( final double[][] allNoDataValues, final Geometry shape, final Raster data) { return getNoDataSummary( allNoDataValues, new SingleShape(shape), new SingleRaster(data), data.getWidth(), data.getHeight(), data.getNumBands()); } private static NoDataSummary getNoDataSummary( final double[][] allNoDataValues, final NoDataByCoordinate shape, final NoDataBySample data, final int width, final int height, final int numBands) { final Set[] noDataValuesPerBand; boolean skipNoData; final Set indices = new HashSet<>(); if (allNoDataValues == null) { skipNoData = true; noDataValuesPerBand = null; if (shape == null) { return new NoDataSummary(indices, new double[][] {}); } } else { noDataValuesPerBand = new Set[numBands]; for (int b = 0; b < numBands; b++) { noDataValuesPerBand[b] = new HashSet<>(); } skipNoData = false; } for (int x = 0; x < width; x++) { for (int y = 0; y < height; y++) { if (shape.isNoData(x, y)) { for (int b = 0; b < numBands; b++) { indices.add(new SampleIndex(x, y, b)); } // this will ignore the no data values for this x,y // which should be fine because the shape will // always classify this x,y as "no data" } else if (!skipNoData) { for (int b = 0; b < numBands; b++) { if (allNoDataValues[b] == null) { continue; } else { final double[] samples = data.getSampleValues(x, y, b); for (int i = 0; i < allNoDataValues[b].length; i++) { // if a single sample is not a "no data" value // then it is valid boolean noData = true; for (final double sample : samples) { // we wrap it with Object equality to make // sure we generically catch special // cases, such as NaN and positive and // negative infinite if (!new Double(sample).equals(allNoDataValues[b][i])) { noData = false; break; } } if (noData) { indices.add(new SampleIndex(x, y, b)); if ((noDataValuesPerBand != null) && (noDataValuesPerBand[b] != null)) { noDataValuesPerBand[b].add(allNoDataValues[b][i]); } } } } } } } } final double[][] usedNoDataValues; if (!skipNoData && (noDataValuesPerBand != null)) { usedNoDataValues = new double[noDataValuesPerBand.length][]; for (int b = 0; b < noDataValuesPerBand.length; b++) { usedNoDataValues[b] = new double[noDataValuesPerBand[b].size()]; int i = 0; final Iterator noDataValues = noDataValuesPerBand[b].iterator(); while (noDataValues.hasNext()) { usedNoDataValues[b][i++] = noDataValues.next(); } } } else { usedNoDataValues = new double[][] {}; } return new NoDataSummary(indices, usedNoDataValues); } private static interface NoDataByCoordinate { public boolean isNoData(int x, int y); } private static interface NoDataBySample { public double[] getSampleValues(int x, int y, int b); } private static class SingleShape implements NoDataByCoordinate { private final Geometry shape; public SingleShape(final Geometry shape) { this.shape = shape; } @Override public boolean isNoData(final int x, final int y) { return ((shape != null) && !shape.intersects(new GeometryFactory().createPoint(new Coordinate(x, y)))); } } private static class MultiShape implements NoDataByCoordinate { private final Geometry[] shapes; private boolean acceptNone = false; public MultiShape(final Geometry[] shapes) { this.shapes = shapes; if ((shapes == null) || (shapes.length == 0)) { acceptNone = true; } else { for (final Geometry shape : shapes) { if (shape == null) { acceptNone = true; } } } } @Override public boolean isNoData(final int x, final int y) { if (!acceptNone) { for (final Geometry shape : shapes) { // if any one intersects the point than it is not "no data" // based on shape if (shape.intersects(new GeometryFactory().createPoint(new Coordinate(x, y)))) { return false; } } return true; } return false; } } private static class SingleRaster implements NoDataBySample { private final Raster raster; public SingleRaster(final Raster raster) { this.raster = raster; } @Override public double[] getSampleValues(final int x, final int y, final int b) { return new double[] {raster.getSampleDouble(x, y, b)}; } } private static class MultiRaster implements NoDataBySample { private final Raster[] rasters; public MultiRaster(final Raster[] rasters) { this.rasters = rasters; } @Override public double[] getSampleValues(final int x, final int y, final int b) { final double[] samples = new double[rasters.length]; for (int i = 0; i < rasters.length; i++) { samples[i] = rasters[i].getSampleDouble(x, y, b); } return samples; } } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/warp/WarpNearestOpImage.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ /* * JAI-Ext - OpenSource Java Advanced Image Extensions Library http://www.geo-solutions.it/ * Copyright 2014 GeoSolutions Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You may obtain a copy of the License * at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in * writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific * language governing permissions and limitations under the License. */ package org.locationtech.geowave.adapter.raster.adapter.warp; import java.awt.image.ColorModel; import java.awt.image.DataBuffer; import java.awt.image.IndexColorModel; import java.awt.image.RenderedImage; import java.awt.image.SampleModel; import java.util.Map; import javax.media.jai.ImageLayout; import javax.media.jai.Interpolation; import javax.media.jai.PlanarImage; import javax.media.jai.ROI; import javax.media.jai.RasterAccessor; import javax.media.jai.Warp; import javax.media.jai.iterator.RandomIter; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import it.geosolutions.jaiext.iterators.RandomIterFactory; import it.geosolutions.jaiext.range.Range; /** * This is code entirely intended to get around an issue on line 265 of WarpOpImage in jai-ext. The * following code does not work if the source is significant lower resolution than the destination * and seems unnecessary in general: * *

roiTile = roi.intersect(new ROIShape(srcRectExpanded)); * *

An OpImage implementing the general "Warp" operation as described in * javax.media.jai.operator.WarpDescriptor. It supports the nearest-neighbor interpolation. * *

The layout for the destination image may be specified via the ImageLayout * parameter. However, only those settings suitable for this operation will be used. The unsuitable * settings will be replaced by default suitable values. An optional ROI object and a NoData Range * can be used. If a backward mapped pixel lies outside ROI or it is a NoData, then the destination * pixel value is a background value. * * @since EA2 * @see javax.media.jai.Warp * @see javax.media.jai.WarpOpImage * @see javax.media.jai.operator.WarpDescriptor * @see WarpRIF */ @SuppressWarnings("unchecked") @SuppressFBWarnings final class WarpNearestOpImage extends WarpOpImage { /** LookupTable used for a faster NoData check */ private byte[][] byteLookupTable; /** * Constructs a WarpNearestOpImage. * * @param source The source image. * @param config RenderingHints used in calculations. * @param layout The destination image layout. * @param warp An object defining the warp algorithm. * @param interp An object describing the interpolation method. * @param roi input ROI object used. * @param noData NoData Range object used for checking if NoData are present. */ public WarpNearestOpImage( final RenderedImage source, final Map config, final ImageLayout layout, final Warp warp, final Interpolation interp, final ROI sourceROI, final Range noData, final double[] bkg) { super( source, layout, config, false, null, // extender not needed in // nearest-neighbor // interpolation interp, warp, bkg, sourceROI, noData); /* * If the source has IndexColorModel, override the default setting in OpImage. The dest shall * have exactly the same SampleModel and ColorModel as the source. Note, in this case, the * source should have an integral data type. */ final ColorModel srcColorModel = source.getColorModel(); if (srcColorModel instanceof IndexColorModel) { sampleModel = source.getSampleModel().createCompatibleSampleModel(tileWidth, tileHeight); colorModel = srcColorModel; } /* * Selection of a destinationNoData value for each datatype */ final SampleModel sm = source.getSampleModel(); // Source image data Type final int srcDataType = sm.getDataType(); // Creation of a lookuptable containing the values to use for no data if ((srcDataType == DataBuffer.TYPE_BYTE) && hasNoData) { final int numBands = getNumBands(); byteLookupTable = new byte[numBands][256]; for (int b = 0; b < numBands; b++) { for (int i = 0; i < byteLookupTable[0].length; i++) { final byte value = (byte) i; if (noDataRange.contains(value)) { byteLookupTable[b][i] = (byte) backgroundValues[b]; } else { byteLookupTable[b][i] = value; } } } } } @Override protected void computeRectByte( final PlanarImage src, final RasterAccessor dst, final RandomIter roiIter, final boolean roiContainsTile) { // Random Iterator on the source image bounds final RandomIter iter = RandomIterFactory.create(src, src.getBounds(), TILE_CACHED, ARRAY_CALC); // Initial settings final int minX = src.getMinX(); final int maxX = src.getMaxX(); final int minY = src.getMinY(); final int maxY = src.getMaxY(); final int dstWidth = dst.getWidth(); final int dstHeight = dst.getHeight(); final int dstBands = dst.getNumBands(); final int lineStride = dst.getScanlineStride(); final int pixelStride = dst.getPixelStride(); final int[] bandOffsets = dst.getBandOffsets(); final byte[][] data = dst.getByteDataArrays(); final float[] warpData = new float[2 * dstWidth]; int lineOffset = 0; // NO ROI AND NODATA if (caseA || (caseB && roiContainsTile)) { for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; // Calculation of the warp for the selected row warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { /* * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round * to get the nearest neighbor. This is different from the standard nearest * implementation. */ final int sx = round(warpData[count++]); final int sy = round(warpData[count++]); // If the pixel is outside the input image bounds if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (byte) backgroundValues[b]; } } } else { // Nearest interpolation for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (byte) (iter.getSample(sx, sy, b) & 0xFF); } } pixelOffset += pixelStride; } } // ONLY ROI } else if (caseB) { for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; // Calculation of the warp for the selected row warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { /* * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round * to get the nearest neighbor. This is different from the standard nearest * implementation. */ final int sx = round(warpData[count++]); final int sy = round(warpData[count++]); if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (byte) backgroundValues[b]; } } } else { // SG if we falls outside the roi we use the background // value if (!(roiBounds.contains(sx, sy) && (roiIter.getSample(sx, sy, 0) > 0))) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (byte) backgroundValues[b]; } } } else { // Else the related source pixel is set for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (byte) (iter.getSample(sx, sy, b) & 0xFF); } } } pixelOffset += pixelStride; } } // ONLY NODATA } else if (caseC || (hasROI && hasNoData && roiContainsTile)) { for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; // Calculation of the warp for the selected row warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { /* * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round * to get the nearest neighbor. This is different from the standard nearest * implementation. */ final int sx = round(warpData[count++]); final int sy = round(warpData[count++]); if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (byte) backgroundValues[b]; } } } else { // The related source pixel is set if it isn't a nodata for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = byteLookupTable[b][iter.getSample(sx, sy, b)]; } } pixelOffset += pixelStride; } } // BOTH ROI AND NODATA } else { for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; // Calculation of the warp for the selected row warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { /* * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round * to get the nearest neighbor. This is different from the standard nearest * implementation. */ final int sx = round(warpData[count++]); final int sy = round(warpData[count++]); if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (byte) backgroundValues[b]; } } } else { // SG if we falls outside the roi we use the background // value if (!(roiBounds.contains(sx, sy) && roiBounds.contains(sx, sy) && (roiIter.getSample(sx, sy, 0) > 0))) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (byte) backgroundValues[b]; } } } else { // The related source pixel is set if it isn't a // nodata for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = byteLookupTable[b][iter.getSample(sx, sy, b)]; } } } pixelOffset += pixelStride; } } } iter.done(); } @Override protected void computeRectUShort( final PlanarImage src, final RasterAccessor dst, final RandomIter roiIter, final boolean roiContainsTile) { // Random Iterator on the source image bounds final RandomIter iter = RandomIterFactory.create(src, src.getBounds(), TILE_CACHED, ARRAY_CALC); // Initial settings final int minX = src.getMinX(); final int maxX = src.getMaxX(); final int minY = src.getMinY(); final int maxY = src.getMaxY(); final int dstWidth = dst.getWidth(); final int dstHeight = dst.getHeight(); final int dstBands = dst.getNumBands(); final int lineStride = dst.getScanlineStride(); final int pixelStride = dst.getPixelStride(); final int[] bandOffsets = dst.getBandOffsets(); final short[][] data = dst.getShortDataArrays(); final float[] warpData = new float[2 * dstWidth]; int lineOffset = 0; // NO ROI AND NODATA if (caseA || (caseB && roiContainsTile)) { for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; // Calculation of the warp for the selected row warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { /* * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round * to get the nearest neighbor. This is different from the standard nearest * implementation. */ final int sx = round(warpData[count++]); final int sy = round(warpData[count++]); // If the pixel is outside the input image bounds if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b]; } } } else { // Nearest interpolation for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (short) (iter.getSample(sx, sy, b) & 0xFFFF); } } pixelOffset += pixelStride; } } // ONLY ROI } else if (caseB) { for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; // Calculation of the warp for the selected row warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { /* * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round * to get the nearest neighbor. This is different from the standard nearest * implementation. */ final int sx = round(warpData[count++]); final int sy = round(warpData[count++]); if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b]; } } } else { // SG if we falls outside the roi we use the background // value if (!(roiBounds.contains(sx, sy) && roiBounds.contains(sx, sy) && (roiIter.getSample(sx, sy, 0) > 0))) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b]; } } } else { // Else the related source pixel is set for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (short) (iter.getSample(sx, sy, b) & 0xFFFF); } } } pixelOffset += pixelStride; } } // ONLY NODATA } else if (caseC || (hasROI && hasNoData && roiContainsTile)) { short inputValue = 0; for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; // Calculation of the warp for the selected row warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { /* * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round * to get the nearest neighbor. This is different from the standard nearest * implementation. */ final int sx = round(warpData[count++]); final int sy = round(warpData[count++]); if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b]; } } } else { // The related source pixel is set if it isn't a nodata for (int b = 0; b < dstBands; b++) { // Input value selected inputValue = (short) (iter.getSample(sx, sy, b) & 0xFFFF); if (noDataRange.contains(inputValue)) { data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b]; } else { data[b][pixelOffset + bandOffsets[b]] = inputValue; } } } pixelOffset += pixelStride; } } // BOTH ROI AND NODATA } else { short inputValue = 0; for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; // Calculation of the warp for the selected row warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { /* * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round * to get the nearest neighbor. This is different from the standard nearest * implementation. */ final int sx = round(warpData[count++]); final int sy = round(warpData[count++]); if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b]; } } } else { // SG if we falls outside the roi we use the background // value if (!(roiBounds.contains(sx, sy) && (roiIter.getSample(sx, sy, 0) > 0))) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b]; } } } else { // The related source pixel is set if it isn't a // nodata for (int b = 0; b < dstBands; b++) { // Input value selected inputValue = (short) (iter.getSample(sx, sy, b) & 0xFFFF); if (noDataRange.contains(inputValue)) { data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b]; } else { data[b][pixelOffset + bandOffsets[b]] = inputValue; } } } } pixelOffset += pixelStride; } } } iter.done(); } @Override protected void computeRectShort( final PlanarImage src, final RasterAccessor dst, final RandomIter roiIter, final boolean roiContainsTile) { // Random Iterator on the source image bounds final RandomIter iter = RandomIterFactory.create(src, src.getBounds(), TILE_CACHED, ARRAY_CALC); // Initial settings final int minX = src.getMinX(); final int maxX = src.getMaxX(); final int minY = src.getMinY(); final int maxY = src.getMaxY(); final int dstWidth = dst.getWidth(); final int dstHeight = dst.getHeight(); final int dstBands = dst.getNumBands(); final int lineStride = dst.getScanlineStride(); final int pixelStride = dst.getPixelStride(); final int[] bandOffsets = dst.getBandOffsets(); final short[][] data = dst.getShortDataArrays(); final float[] warpData = new float[2 * dstWidth]; int lineOffset = 0; // NO ROI AND NODATA if (caseA || (caseB && roiContainsTile)) { for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; // Calculation of the warp for the selected row warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { /* * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round * to get the nearest neighbor. This is different from the standard nearest * implementation. */ final int sx = round(warpData[count++]); final int sy = round(warpData[count++]); // If the pixel is outside the input image bounds if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b]; } } } else { // Nearest interpolation for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (short) iter.getSample(sx, sy, b); } } pixelOffset += pixelStride; } } // ONLY ROI } else if (caseB) { for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; // Calculation of the warp for the selected row warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { /* * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round * to get the nearest neighbor. This is different from the standard nearest * implementation. */ final int sx = round(warpData[count++]); final int sy = round(warpData[count++]); if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b]; } } } else { // SG if we falls outside the roi we use the background // value if (!(roiBounds.contains(sx, sy) && (roiIter.getSample(sx, sy, 0) > 0))) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b]; } } } else { // Else the related source pixel is set for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (short) iter.getSample(sx, sy, b); } } } pixelOffset += pixelStride; } } // ONLY NODATA } else if (caseC || (hasROI && hasNoData && roiContainsTile)) { short inputValue = 0; for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; // Calculation of the warp for the selected row warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { /* * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round * to get the nearest neighbor. This is different from the standard nearest * implementation. */ final int sx = round(warpData[count++]); final int sy = round(warpData[count++]); if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b]; } } } else { // The related source pixel is set if it isn't a nodata for (int b = 0; b < dstBands; b++) { // Input value selected inputValue = (short) iter.getSample(sx, sy, b); if (noDataRange.contains(inputValue)) { data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b]; } else { data[b][pixelOffset + bandOffsets[b]] = inputValue; } } } pixelOffset += pixelStride; } } // BOTH ROI AND NODATA } else { short inputValue = 0; for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; // Calculation of the warp for the selected row warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { /* * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round * to get the nearest neighbor. This is different from the standard nearest * implementation. */ final int sx = round(warpData[count++]); final int sy = round(warpData[count++]); if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b]; } } } else { // SG if we falls outside the roi we use the background // value if (!(roiBounds.contains(sx, sy) && (roiIter.getSample(sx, sy, 0) > 0))) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b]; } } } else { // The related source pixel is set if it isn't a // nodata for (int b = 0; b < dstBands; b++) { // Input value selected inputValue = (short) iter.getSample(sx, sy, b); if (noDataRange.contains(inputValue)) { data[b][pixelOffset + bandOffsets[b]] = (short) backgroundValues[b]; } else { data[b][pixelOffset + bandOffsets[b]] = inputValue; } } } } pixelOffset += pixelStride; } } } iter.done(); } @Override protected void computeRectInt( final PlanarImage src, final RasterAccessor dst, final RandomIter roiIter, final boolean roiContainsTile) { // Random Iterator on the source image bounds final RandomIter iter = RandomIterFactory.create(src, src.getBounds(), TILE_CACHED, ARRAY_CALC); // Initial settings final int minX = src.getMinX(); final int maxX = src.getMaxX(); final int minY = src.getMinY(); final int maxY = src.getMaxY(); final int dstWidth = dst.getWidth(); final int dstHeight = dst.getHeight(); final int dstBands = dst.getNumBands(); final int lineStride = dst.getScanlineStride(); final int pixelStride = dst.getPixelStride(); final int[] bandOffsets = dst.getBandOffsets(); final int[][] data = dst.getIntDataArrays(); final float[] warpData = new float[2 * dstWidth]; int lineOffset = 0; // NO ROI AND NODATA if (caseA || (caseB && roiContainsTile)) { for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; // Calculation of the warp for the selected row warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { /* * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round * to get the nearest neighbor. This is different from the standard nearest * implementation. */ final int sx = round(warpData[count++]); final int sy = round(warpData[count++]); // If the pixel is outside the input image bounds if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (int) backgroundValues[b]; } } } else { // Nearest interpolation for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = iter.getSample(sx, sy, b); } } pixelOffset += pixelStride; } } // ONLY ROI } else if (caseB) { for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; // Calculation of the warp for the selected row warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { /* * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round * to get the nearest neighbor. This is different from the standard nearest * implementation. */ final int sx = round(warpData[count++]); final int sy = round(warpData[count++]); if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (int) backgroundValues[b]; } } } else { // SG if we falls outside the roi we use the background // value if (!(roiBounds.contains(sx, sy) && (roiIter.getSample(sx, sy, 0) > 0))) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (int) backgroundValues[b]; } } } else { // Else the related source pixel is set for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = iter.getSample(sx, sy, b); } } } pixelOffset += pixelStride; } } // ONLY NODATA } else if (caseC || (hasROI && hasNoData && roiContainsTile)) { int inputValue = 0; for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; // Calculation of the warp for the selected row warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { /* * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round * to get the nearest neighbor. This is different from the standard nearest * implementation. */ final int sx = round(warpData[count++]); final int sy = round(warpData[count++]); if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (int) backgroundValues[b]; } } } else { // The related source pixel is set if it isn't a nodata for (int b = 0; b < dstBands; b++) { // Input value selected inputValue = iter.getSample(sx, sy, b); if (noDataRange.contains(inputValue)) { data[b][pixelOffset + bandOffsets[b]] = (int) backgroundValues[b]; } else { data[b][pixelOffset + bandOffsets[b]] = inputValue; } } } pixelOffset += pixelStride; } } // BOTH ROI AND NODATA } else { int inputValue = 0; for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; // Calculation of the warp for the selected row warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { /* * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round * to get the nearest neighbor. This is different from the standard nearest * implementation. */ final int sx = round(warpData[count++]); final int sy = round(warpData[count++]); if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (int) backgroundValues[b]; } } } else { // SG if we falls outside the roi we use the background // value if (!(roiBounds.contains(sx, sy) && (roiIter.getSample(sx, sy, 0) > 0))) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (int) backgroundValues[b]; } } } else { // The related source pixel is set if it isn't a // nodata for (int b = 0; b < dstBands; b++) { // Input value selected inputValue = iter.getSample(sx, sy, b); if (noDataRange.contains(inputValue)) { data[b][pixelOffset + bandOffsets[b]] = (int) backgroundValues[b]; } else { data[b][pixelOffset + bandOffsets[b]] = inputValue; } } } } pixelOffset += pixelStride; } } } iter.done(); } @Override protected void computeRectFloat( final PlanarImage src, final RasterAccessor dst, final RandomIter roiIter, final boolean roiContainsTile) { // Random Iterator on the source image bounds final RandomIter iter = RandomIterFactory.create(src, src.getBounds(), TILE_CACHED, ARRAY_CALC); // Initial settings final int minX = src.getMinX(); final int maxX = src.getMaxX(); final int minY = src.getMinY(); final int maxY = src.getMaxY(); final int dstWidth = dst.getWidth(); final int dstHeight = dst.getHeight(); final int dstBands = dst.getNumBands(); final int lineStride = dst.getScanlineStride(); final int pixelStride = dst.getPixelStride(); final int[] bandOffsets = dst.getBandOffsets(); final float[][] data = dst.getFloatDataArrays(); final float[] warpData = new float[2 * dstWidth]; int lineOffset = 0; // NO ROI AND NODATA if (caseA || (caseB && roiContainsTile)) { for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; // Calculation of the warp for the selected row warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { /* * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round * to get the nearest neighbor. This is different from the standard nearest * implementation. */ final int sx = round(warpData[count++]); final int sy = round(warpData[count++]); // If the pixel is outside the input image bounds if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (float) backgroundValues[b]; } } } else { // Nearest interpolation for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = iter.getSampleFloat(sx, sy, b); } } pixelOffset += pixelStride; } } // ONLY ROI } else if (caseB) { for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; // Calculation of the warp for the selected row warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { /* * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round * to get the nearest neighbor. This is different from the standard nearest * implementation. */ final int sx = round(warpData[count++]); final int sy = round(warpData[count++]); if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (float) backgroundValues[b]; } } } else { // SG if we falls outside the roi we use the background // value if (!(roiBounds.contains(sx, sy) && (roiIter.getSample(sx, sy, 0) > 0))) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (float) backgroundValues[b]; } } } else { // Else the related source pixel is set for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = iter.getSampleFloat(sx, sy, b); } } } pixelOffset += pixelStride; } } // ONLY NODATA } else if (caseC || (hasROI && hasNoData && roiContainsTile)) { float inputValue = 0; for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; // Calculation of the warp for the selected row warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { /* * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round * to get the nearest neighbor. This is different from the standard nearest * implementation. */ final int sx = round(warpData[count++]); final int sy = round(warpData[count++]); if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (float) backgroundValues[b]; } } } else { // The related source pixel is set if it isn't a nodata for (int b = 0; b < dstBands; b++) { // Input value selected inputValue = iter.getSampleFloat(sx, sy, b); if (noDataRange.contains(inputValue)) { data[b][pixelOffset + bandOffsets[b]] = (float) backgroundValues[b]; } else { data[b][pixelOffset + bandOffsets[b]] = inputValue; } } } pixelOffset += pixelStride; } } // BOTH ROI AND NODATA } else { float inputValue = 0; for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; // Calculation of the warp for the selected row warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { /* * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round * to get the nearest neighbor. This is different from the standard nearest * implementation. */ final int sx = round(warpData[count++]); final int sy = round(warpData[count++]); if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (float) backgroundValues[b]; } } } else { // SG if we falls outside the roi we use the background // value if (!(roiBounds.contains(sx, sy) && (roiIter.getSample(sx, sy, 0) > 0))) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = (float) backgroundValues[b]; } } } else { // The related source pixel is set if it isn't a // nodata for (int b = 0; b < dstBands; b++) { // Input value selected inputValue = iter.getSampleFloat(sx, sy, b); if (noDataRange.contains(inputValue)) { data[b][pixelOffset + bandOffsets[b]] = (float) backgroundValues[b]; } else { data[b][pixelOffset + bandOffsets[b]] = inputValue; } } } } pixelOffset += pixelStride; } } } iter.done(); } @Override protected void computeRectDouble( final PlanarImage src, final RasterAccessor dst, final RandomIter roiIter, final boolean roiContainsTile) { // Random Iterator on the source image bounds final RandomIter iter = RandomIterFactory.create(src, src.getBounds(), TILE_CACHED, ARRAY_CALC); // Initial settings final int minX = src.getMinX(); final int maxX = src.getMaxX(); final int minY = src.getMinY(); final int maxY = src.getMaxY(); final int dstWidth = dst.getWidth(); final int dstHeight = dst.getHeight(); final int dstBands = dst.getNumBands(); final int lineStride = dst.getScanlineStride(); final int pixelStride = dst.getPixelStride(); final int[] bandOffsets = dst.getBandOffsets(); final double[][] data = dst.getDoubleDataArrays(); final float[] warpData = new float[2 * dstWidth]; int lineOffset = 0; // NO ROI AND NODATA if (caseA || (caseB && roiContainsTile)) { for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; // Calculation of the warp for the selected row warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { /* * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round * to get the nearest neighbor. This is different from the standard nearest * implementation. */ final int sx = round(warpData[count++]); final int sy = round(warpData[count++]); // If the pixel is outside the input image bounds if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = backgroundValues[b]; } } } else { // Nearest interpolation for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = iter.getSampleDouble(sx, sy, b); } } pixelOffset += pixelStride; } } // ONLY ROI } else if (caseB) { for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; // Calculation of the warp for the selected row warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { /* * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round * to get the nearest neighbor. This is different from the standard nearest * implementation. */ final int sx = round(warpData[count++]); final int sy = round(warpData[count++]); if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = backgroundValues[b]; } } } else { // SG if we falls outside the roi we use the background // value if (!(roiBounds.contains(sx, sy) && (roiIter.getSample(sx, sy, 0) > 0))) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = backgroundValues[b]; } } } else { // Else the related source pixel is set for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = iter.getSampleDouble(sx, sy, b); } } } pixelOffset += pixelStride; } } // ONLY NODATA } else if (caseC || (hasROI && hasNoData && roiContainsTile)) { double inputValue = 0; for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; // Calculation of the warp for the selected row warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { /* * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round * to get the nearest neighbor. This is different from the standard nearest * implementation. */ final int sx = round(warpData[count++]); final int sy = round(warpData[count++]); if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = backgroundValues[b]; } } } else { // The related source pixel is set if it isn't a nodata for (int b = 0; b < dstBands; b++) { // Input value selected inputValue = iter.getSampleDouble(sx, sy, b); if (noDataRange.contains(inputValue)) { data[b][pixelOffset + bandOffsets[b]] = backgroundValues[b]; } else { data[b][pixelOffset + bandOffsets[b]] = inputValue; } } } pixelOffset += pixelStride; } } // BOTH ROI AND NODATA } else { double inputValue = 0; for (int h = 0; h < dstHeight; h++) { int pixelOffset = lineOffset; lineOffset += lineStride; // Calculation of the warp for the selected row warp.warpRect(dst.getX(), dst.getY() + h, dstWidth, 1, warpData); int count = 0; for (int w = 0; w < dstWidth; w++) { /* * The warp object subtract 0.5 from backward mapped source coordinate. Need to do a round * to get the nearest neighbor. This is different from the standard nearest * implementation. */ final int sx = round(warpData[count++]); final int sy = round(warpData[count++]); if ((sx < minX) || (sx >= maxX) || (sy < minY) || (sy >= maxY)) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = backgroundValues[b]; } } } else { // SG if we falls outside the roi we use the background // value if (!(roiBounds.contains(sx, sy) && (roiIter.getSample(sx, sy, 0) > 0))) { /* Fill with a background color. */ if (setBackground) { for (int b = 0; b < dstBands; b++) { data[b][pixelOffset + bandOffsets[b]] = backgroundValues[b]; } } } else { // The related source pixel is set if it isn't a // nodata for (int b = 0; b < dstBands; b++) { // Input value selected inputValue = iter.getSampleDouble(sx, sy, b); if (noDataRange.contains(inputValue)) { data[b][pixelOffset + bandOffsets[b]] = backgroundValues[b]; } else { data[b][pixelOffset + bandOffsets[b]] = inputValue; } } } } pixelOffset += pixelStride; } } } iter.done(); } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/warp/WarpOpImage.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ /* * JAI-Ext - OpenSource Java Advanced Image Extensions Library http://www.geo-solutions.it/ * Copyright 2014 GeoSolutions Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You may obtain a copy of the License * at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in * writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific * language governing permissions and limitations under the License. */ package org.locationtech.geowave.adapter.raster.adapter.warp; import java.awt.Rectangle; import java.awt.image.DataBuffer; import java.awt.image.RenderedImage; import java.awt.image.WritableRaster; import java.util.Map; import javax.media.jai.BorderExtender; import javax.media.jai.ImageLayout; import javax.media.jai.Interpolation; import javax.media.jai.PlanarImage; import javax.media.jai.ROI; import javax.media.jai.RasterAccessor; import javax.media.jai.RasterFormatTag; import javax.media.jai.Warp; import javax.media.jai.iterator.RandomIter; import com.sun.media.jai.util.ImageUtil; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import it.geosolutions.jaiext.iterators.RandomIterFactory; import it.geosolutions.jaiext.range.Range; /** * This is code entirely intended to get around an issue on line 265 of WarpOpImage in jai-ext. The * following code does not work if the source is significant lower resolution than the destination * and seems unnecessary in general: * *

roiTile = roi.intersect(new ROIShape(srcRectExpanded)); */ @SuppressFBWarnings public abstract class WarpOpImage extends it.geosolutions.jaiext.warp.WarpOpImage { public WarpOpImage( final RenderedImage source, final ImageLayout layout, final Map configuration, final boolean cobbleSources, final BorderExtender extender, final Interpolation interp, final Warp warp, final double[] backgroundValues, final ROI roi, final Range noData) { super( source, layout, configuration, cobbleSources, extender, interp, warp, backgroundValues, roi, noData); } /** * Warps a rectangle. If ROI is present, the intersection between ROI and tile bounds is * calculated; The result ROI will be used for calculations inside the computeRect() method. */ @Override protected void computeRect( final PlanarImage[] sources, final WritableRaster dest, final Rectangle destRect) { // Retrieve format tags. final RasterFormatTag[] formatTags = getFormatTags(); final RasterAccessor dst = new RasterAccessor(dest, destRect, formatTags[1], getColorModel()); RandomIter roiIter = null; boolean roiContainsTile = false; boolean roiDisjointTile = false; // If a ROI is present, then only the part contained inside the current // tile bounds is taken. if (hasROI) { final Rectangle srcRectExpanded = mapDestRect(destRect, 0); // The tile dimension is extended for avoiding border errors srcRectExpanded.setRect( srcRectExpanded.getMinX() - leftPad, srcRectExpanded.getMinY() - topPad, srcRectExpanded.getWidth() + rightPad + leftPad, srcRectExpanded.getHeight() + bottomPad + topPad); if (!roiBounds.intersects(srcRectExpanded)) { roiDisjointTile = true; } else { roiContainsTile = roi.contains(srcRectExpanded); if (!roiContainsTile) { if (!roi.intersects(srcRectExpanded)) { roiDisjointTile = true; } else { final PlanarImage roiIMG = getImage(); roiIter = RandomIterFactory.create(roiIMG, null, TILE_CACHED, ARRAY_CALC); } } } } if (!hasROI || !roiDisjointTile) { switch (dst.getDataType()) { case DataBuffer.TYPE_BYTE: computeRectByte(sources[0], dst, roiIter, roiContainsTile); break; case DataBuffer.TYPE_USHORT: computeRectUShort(sources[0], dst, roiIter, roiContainsTile); break; case DataBuffer.TYPE_SHORT: computeRectShort(sources[0], dst, roiIter, roiContainsTile); break; case DataBuffer.TYPE_INT: computeRectInt(sources[0], dst, roiIter, roiContainsTile); break; case DataBuffer.TYPE_FLOAT: computeRectFloat(sources[0], dst, roiIter, roiContainsTile); break; case DataBuffer.TYPE_DOUBLE: computeRectDouble(sources[0], dst, roiIter, roiContainsTile); break; } // After the calculations, the output data are copied into the // WritableRaster if (dst.isDataCopy()) { dst.clampDataArrays(); dst.copyDataToRaster(); } } else { // If the tile is outside the ROI, then the destination Raster is // set to backgroundValues if (setBackground) { ImageUtil.fillBackground(dest, destRect, backgroundValues); } } } /** * This method provides a lazy initialization of the image associated to the ROI. The method uses * the Double-checked locking in order to maintain thread-safety * * @return */ private PlanarImage getImage() { PlanarImage img = roiImage; // HP Fortify "Double-Checked Locking" false positive // This is not a security issue. We are aware of the extremely small // potential for this to be called twice, but that is not an // inconsistency and is more than worth the performance gains if (img == null) { synchronized (this) { img = roiImage; if (img == null) { roiImage = img = roi.getAsImage(); } } } return img; } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/adapter/warp/WarpRIF.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ /* * JAI-Ext - OpenSource Java Advanced Image Extensions Library http://www.geo-solutions.it/ * Copyright 2014 GeoSolutions Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You may obtain a copy of the License * at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in * writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific * language governing permissions and limitations under the License. */ package org.locationtech.geowave.adapter.raster.adapter.warp; import java.awt.RenderingHints; import java.awt.image.RenderedImage; import java.awt.image.renderable.ParameterBlock; import java.awt.image.renderable.RenderedImageFactory; import javax.media.jai.ImageLayout; import javax.media.jai.Interpolation; import javax.media.jai.JAI; import javax.media.jai.OperationRegistry; import javax.media.jai.PlanarImage; import javax.media.jai.ROI; import javax.media.jai.Warp; import javax.media.jai.registry.RenderedRegistryMode; import com.sun.media.jai.opimage.RIFUtil; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import it.geosolutions.jaiext.interpolators.InterpolationNearest; import it.geosolutions.jaiext.range.Range; import it.geosolutions.jaiext.range.RangeFactory; /** * This is code entirely intended to get around an issue on line 265 of WarpOpImage in jai-ext. The * following code does not work if the source is significant lower resolution than the destination * and seems unnecessary in general: * *

roiTile = roi.intersect(new ROIShape(srcRectExpanded)); */ @SuppressFBWarnings public class WarpRIF extends it.geosolutions.jaiext.warp.WarpRIF { static boolean registered = false; public static synchronized void register(final boolean force) { if (!registered || force) { final OperationRegistry registry = JAI.getDefaultInstance().getOperationRegistry(); final RenderedImageFactory rif = new WarpRIF(); registry.registerFactory( RenderedRegistryMode.MODE_NAME, "Warp", "it.geosolutions.jaiext", rif); registered = true; } } /** Constructor. */ public WarpRIF() {} /** * Creates a new instance of warp operator according to the warp object and interpolation method. * * @param paramBlock The warp and interpolation objects. */ @Override public RenderedImage create(final ParameterBlock paramBlock, final RenderingHints renderHints) { final Interpolation interp = (Interpolation) paramBlock.getObjectParameter(1); if ((interp instanceof InterpolationNearest) || (interp instanceof javax.media.jai.InterpolationNearest)) { // Get ImageLayout from renderHints if any. final ImageLayout layout = RIFUtil.getImageLayoutHint(renderHints); RenderedImage source = paramBlock.getRenderedSource(0); final Warp warp = (Warp) paramBlock.getObjectParameter(0); final double[] backgroundValues = (double[]) paramBlock.getObjectParameter(2); ROI roi = null; final Object roi_ = paramBlock.getObjectParameter(3); if (roi_ instanceof ROI) { roi = (ROI) roi_; final PlanarImage temp = PlanarImage.wrapRenderedImage(source); temp.setProperty("ROI", roi); source = temp; } Range noData = (Range) paramBlock.getObjectParameter(4); noData = RangeFactory.convert(noData, source.getSampleModel().getDataType()); return new WarpNearestOpImage( source, renderHints, layout, warp, interp, roi, noData, backgroundValues); } return super.create(paramBlock, renderHints); } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/operations/DeletePyramidLevelCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.operations; import java.util.ArrayList; import java.util.List; import org.apache.commons.lang3.ArrayUtils; import org.locationtech.geowave.adapter.raster.Resolution; import org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter; import org.locationtech.geowave.adapter.raster.stats.RasterOverviewStatistic; import org.locationtech.geowave.adapter.raster.stats.RasterOverviewStatistic.RasterOverviewValue; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.Command; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.HierarchicalNumericIndexStrategy; import org.locationtech.geowave.core.index.HierarchicalNumericIndexStrategy.SubStrategy; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.QueryBuilder; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.locationtech.geowave.core.store.statistics.binning.DataTypeBinningStrategy; import org.locationtech.geowave.core.store.statistics.index.PartitionsStatistic; import org.locationtech.geowave.core.store.statistics.index.PartitionsStatistic.PartitionsValue; import org.locationtech.geowave.core.store.util.CompoundHierarchicalIndexStrategyWrapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "deletelevel", parentOperation = RasterSection.class) @Parameters(commandDescription = "Delete a pyramid level of a raster layer") public class DeletePyramidLevelCommand extends DefaultOperation implements Command { private static final Logger LOGGER = LoggerFactory.getLogger(DeletePyramidLevelCommand.class); @Parameter(description = "") private List parameters = new ArrayList<>(); @Parameter(names = "--level", description = "The raster pyramid level to delete", required = true) private Integer level = null; @Parameter( names = "--coverage", description = "The raster coverage name (required if store has multiple coverages)") private String coverageName = null; private DataStorePluginOptions inputStoreOptions = null; @Override public void execute(final OperationParams params) throws Exception { run(params); } public void setLevel(final Integer level) { this.level = level; } public void setCoverageName(final String coverageName) { this.coverageName = coverageName; } public void run(final OperationParams params) { // Ensure we have all the required arguments if (parameters.size() != 1) { throw new ParameterException("Requires argument: "); } final String inputStoreName = parameters.get(0); // Attempt to load store. inputStoreOptions = CLIUtils.loadStore(inputStoreName, getGeoWaveConfigFile(params), params.getConsole()); final DataStore store = inputStoreOptions.createDataStore(); RasterDataAdapter adapter = null; for (final DataTypeAdapter type : store.getTypes()) { if (isRaster(type) && ((coverageName == null) || coverageName.equals(adapter.getTypeName()))) { if (adapter != null) { LOGGER.error( "Store has multiple coverages. Must explicitly choose one with --coverage option."); return; } adapter = (RasterDataAdapter) type; } } if (adapter == null) { LOGGER.error("Store has no coverages or coverage name not found."); return; } boolean found = false; Resolution res = null; Index i = null; for (final Index index : store.getIndices(adapter.getTypeName())) { final HierarchicalNumericIndexStrategy indexStrategy = CompoundHierarchicalIndexStrategyWrapper.findHierarchicalStrategy( index.getIndexStrategy()); if (indexStrategy != null) { for (final SubStrategy s : indexStrategy.getSubStrategies()) { if ((s.getPrefix().length == 1) && (s.getPrefix()[0] == level)) { LOGGER.info("Deleting from index " + index.getName()); final double[] tileRes = s.getIndexStrategy().getHighestPrecisionIdRangePerDimension(); final double[] pixelRes = new double[tileRes.length]; for (int d = 0; d < tileRes.length; d++) { pixelRes[d] = tileRes[d] / adapter.getTileSize(); } found = true; i = index; res = new Resolution(pixelRes); break; } } } if (found) { break; } } if (!found) { LOGGER.error("Store has no indices supporting pyramids."); return; } final byte[][] predefinedSplits = i.getIndexStrategy().getPredefinedSplits(); // this should account for hash partitioning if used final List partitions = new ArrayList<>(); if ((predefinedSplits != null) && (predefinedSplits.length > 0)) { for (final byte[] split : predefinedSplits) { partitions.add(new ByteArray(ArrayUtils.add(split, level.byteValue()))); } } else { partitions.add(new ByteArray(new byte[] {level.byteValue()})); } // delete the resolution from the overview, delete the partitions, and delete the data if (inputStoreOptions.getFactoryOptions().getStoreOptions().isPersistDataStatistics()) { final DataStatisticsStore statsStore = inputStoreOptions.createDataStatisticsStore(); boolean overviewStatsFound = false; boolean partitionStatsFound = false; try (CloseableIterator>> it = statsStore.getDataTypeStatistics(adapter, RasterOverviewStatistic.STATS_TYPE, null)) { while (it.hasNext()) { final Statistic> next = it.next(); if ((next instanceof RasterOverviewStatistic) && (next.getBinningStrategy() == null)) { final RasterOverviewStatistic statistic = (RasterOverviewStatistic) next; final RasterOverviewValue value = statsStore.getStatisticValue(statistic); if (!value.removeResolution(res)) { LOGGER.error("Unable to remove resolution for pyramid level " + level); return; } statsStore.setStatisticValue(statistic, value); overviewStatsFound = true; } } } if (!overviewStatsFound) { LOGGER.error("Unable to find overview stats for coverage " + adapter.getTypeName()); return; } try (CloseableIterator>> it = statsStore.getIndexStatistics(i, PartitionsStatistic.STATS_TYPE, null)) { while (it.hasNext()) { final Statistic> next = it.next(); if (next instanceof PartitionsStatistic) { if ((next.getBinningStrategy() != null) && (next.getBinningStrategy() instanceof DataTypeBinningStrategy)) { final PartitionsStatistic statistic = (PartitionsStatistic) next; final PartitionsValue value = statsStore.getStatisticValue( (PartitionsStatistic) next, DataTypeBinningStrategy.getBin(adapter)); for (final ByteArray p : partitions) { if (!value.getValue().remove(p)) { LOGGER.error( "Unable to remove partition " + p.getHexString() + " for pyramid level " + level); return; } } statsStore.setStatisticValue( statistic, value, DataTypeBinningStrategy.getBin(adapter)); partitionStatsFound = true; } } } } if (!partitionStatsFound) { LOGGER.error( "Unable to find partition stats for coverage " + adapter.getTypeName() + " and index " + i.getName()); return; } } for (final ByteArray p : partitions) { store.delete( QueryBuilder.newBuilder().constraints( QueryBuilder.newBuilder().constraintsFactory().prefix( p.getBytes(), null)).addTypeName(adapter.getTypeName()).indexName(i.getName()).build()); } } private static boolean isRaster(final DataTypeAdapter adapter) { if (adapter instanceof InternalDataAdapter) { return isRaster(((InternalDataAdapter) adapter).getAdapter()); } return adapter instanceof RasterDataAdapter; } public List getParameters() { return parameters; } public void setParameters(final String inputStore) { parameters = new ArrayList<>(); parameters.add(inputStore); } public DataStorePluginOptions getInputStoreOptions() { return inputStoreOptions; } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/operations/InstallGdalCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.operations; import java.nio.file.Path; import java.nio.file.Paths; import org.locationtech.geowave.adapter.raster.plugin.gdal.InstallGdal; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.Command; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.store.util.DataStoreUtils; import com.beust.jcommander.Parameter; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "installgdal", parentOperation = RasterSection.class) @Parameters(commandDescription = "Install GDAL by downloading native libraries") public class InstallGdalCommand extends DefaultOperation implements Command { private static final String DEFAULT_DOWNLOAD_DIR = "lib/utilities/gdal"; @Parameter(names = "--dir", description = "The download directory", required = false) private String downloadDirectory = null; @Override public void execute(final OperationParams params) throws Exception { if (downloadDirectory == null) { final String homeDirectory = System.getProperty("geowave.home", DataStoreUtils.DEFAULT_GEOWAVE_DIRECTORY); final Path path = Paths.get(homeDirectory, DEFAULT_DOWNLOAD_DIR); downloadDirectory = path.toString(); } InstallGdal.main(new String[] {downloadDirectory}); } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/operations/RasterOperationCLIProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.operations; import org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi; public class RasterOperationCLIProvider implements CLIOperationProviderSpi { private static final Class[] OPERATIONS = new Class[] { RasterSection.class, ResizeMRCommand.class, InstallGdalCommand.class, DeletePyramidLevelCommand.class}; @Override public Class[] getOperations() { return OPERATIONS; } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/operations/RasterSection.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.operations; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.operations.GeoWaveTopLevelSection; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "raster", parentOperation = GeoWaveTopLevelSection.class) @Parameters(commandDescription = "Operations to perform transformations on raster data in GeoWave") public class RasterSection extends DefaultOperation { } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/operations/ResizeMRCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.operations; import java.io.File; import java.util.ArrayList; import java.util.List; import java.util.Properties; import org.locationtech.geowave.adapter.raster.operations.options.RasterTileResizeCommandLineOptions; import org.locationtech.geowave.adapter.raster.resize.RasterTileResizeJobRunner; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.Command; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.mapreduce.operations.ConfigHDFSCommand; import org.locationtech.geowave.mapreduce.operations.HdfsHostPortConverter; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import com.beust.jcommander.ParametersDelegate; @GeowaveOperation(name = "resizemr", parentOperation = RasterSection.class) @Parameters(commandDescription = "Use MapReduce to resize raster tiles") public class ResizeMRCommand extends DefaultOperation implements Command { @Parameter(description = " ") private List parameters = new ArrayList<>(); @ParametersDelegate private RasterTileResizeCommandLineOptions options = new RasterTileResizeCommandLineOptions(); @Parameter( names = "--hdfsHostPort", description = "he hdfs host port", converter = HdfsHostPortConverter.class) private String hdfsHostPort; @Parameter( names = "--jobSubmissionHostPort", description = "The job submission tracker", required = true) private String jobTrackerOrResourceManHostPort; private DataStorePluginOptions inputStoreOptions = null; private DataStorePluginOptions outputStoreOptions = null; @Override public void execute(final OperationParams params) throws Exception { createRunner(params).runJob(); } public RasterTileResizeJobRunner createRunner(final OperationParams params) { // Ensure we have all the required arguments if (parameters.size() != 2) { throw new ParameterException("Requires arguments: "); } final String inputStoreName = parameters.get(0); final String outputStoreName = parameters.get(1); // Config file final File configFile = getGeoWaveConfigFile(params); // Attempt to load input store. inputStoreOptions = CLIUtils.loadStore(inputStoreName, configFile, params.getConsole()); // Attempt to load output store. outputStoreOptions = CLIUtils.loadStore(outputStoreName, configFile, params.getConsole()); if (hdfsHostPort == null) { final Properties configProperties = ConfigOptions.loadProperties(configFile); final String hdfsFSUrl = ConfigHDFSCommand.getHdfsUrl(configProperties); hdfsHostPort = hdfsFSUrl; } final RasterTileResizeJobRunner runner = new RasterTileResizeJobRunner( inputStoreOptions, outputStoreOptions, options, hdfsHostPort, jobTrackerOrResourceManHostPort); return runner; } public List getParameters() { return parameters; } public void setParameters(final String inputStore, final String outputStore) { parameters = new ArrayList<>(); parameters.add(inputStore); parameters.add(outputStore); } public RasterTileResizeCommandLineOptions getOptions() { return options; } public void setOptions(final RasterTileResizeCommandLineOptions options) { this.options = options; } public DataStorePluginOptions getInputStoreOptions() { return inputStoreOptions; } public DataStorePluginOptions getOutputStoreOptions() { return outputStoreOptions; } public String getHdfsHostPort() { return hdfsHostPort; } public void setHdfsHostPort(final String hdfsHostPort) { this.hdfsHostPort = hdfsHostPort; } public String getJobTrackerOrResourceManHostPort() { return jobTrackerOrResourceManHostPort; } public void setJobTrackerOrResourceManHostPort(final String jobTrackerOrResourceManHostPort) { this.jobTrackerOrResourceManHostPort = jobTrackerOrResourceManHostPort; } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/operations/options/RasterTileResizeCommandLineOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.operations.options; import com.beust.jcommander.Parameter; public class RasterTileResizeCommandLineOptions { @Parameter( names = "--inputCoverageName", description = "The name of the input raster coverage", required = true) private String inputCoverageName; @Parameter( names = "--outputCoverageName", description = "The out output raster coverage name", required = true) private String outputCoverageName; @Parameter(names = "--minSplits", description = "The min partitions for the input data") private Integer minSplits; @Parameter(names = "--maxSplits", description = "The max partitions for the input data") private Integer maxSplits; @Parameter(names = "--outputTileSize", description = "The tile size to output", required = true) private Integer outputTileSize; @Parameter(names = "--indexName", description = "The index that the input raster is stored in") private String indexName; // Default constructor public RasterTileResizeCommandLineOptions() {} public RasterTileResizeCommandLineOptions( final String inputCoverageName, final String outputCoverageName, final Integer minSplits, final Integer maxSplits, final Integer outputTileSize, final String indexName) { this.inputCoverageName = inputCoverageName; this.outputCoverageName = outputCoverageName; this.minSplits = minSplits; this.maxSplits = maxSplits; this.outputTileSize = outputTileSize; this.indexName = indexName; } public String getInputCoverageName() { return inputCoverageName; } public String getOutputCoverageName() { return outputCoverageName; } public Integer getMinSplits() { return minSplits; } public Integer getMaxSplits() { return maxSplits; } public Integer getOutputTileSize() { return outputTileSize; } public String getIndexName() { return indexName; } public void setInputCoverageName(final String inputCoverageName) { this.inputCoverageName = inputCoverageName; } public void setOutputCoverageName(final String outputCoverageName) { this.outputCoverageName = outputCoverageName; } public void setMinSplits(final Integer minSplits) { this.minSplits = minSplits; } public void setMaxSplits(final Integer maxSplits) { this.maxSplits = maxSplits; } public void setOutputTileSize(final Integer outputTileSize) { this.outputTileSize = outputTileSize; } public void setIndexName(final String indexName) { this.indexName = indexName; } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/plugin/GeoWaveGTRasterFormat.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.plugin; import java.awt.Color; import java.io.File; import java.net.URL; import java.net.URLDecoder; import java.util.HashMap; import java.util.Locale; import org.geotools.coverage.grid.io.AbstractGridCoverage2DReader; import org.geotools.coverage.grid.io.AbstractGridFormat; import org.geotools.coverage.grid.io.imageio.GeoToolsWriteParams; import org.geotools.parameter.DefaultParameterDescriptor; import org.geotools.parameter.DefaultParameterDescriptorGroup; import org.geotools.parameter.ParameterGroup; import org.geotools.referencing.CRS; import org.geotools.util.factory.Hints; import org.locationtech.geowave.core.cli.VersionUtils; import org.opengis.coverage.grid.Format; import org.opengis.coverage.grid.GridCoverageWriter; import org.opengis.parameter.GeneralParameterDescriptor; import org.opengis.parameter.ParameterDescriptor; import org.opengis.referencing.FactoryException; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class GeoWaveGTRasterFormat extends AbstractGridFormat implements Format { private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveGTRasterFormat.class); public static final ParameterDescriptor OUTPUT_TRANSPARENT_COLOR = new DefaultParameterDescriptor<>("OutputTransparentColor", Color.class, null, null); public static final CoordinateReferenceSystem DEFAULT_CRS; static { try { DEFAULT_CRS = CRS.decode("EPSG:4326", true); } catch (final FactoryException e) { LOGGER.error("Unable to decode EPSG:4326 CRS", e); throw new RuntimeException("Unable to initialize EPSG:4326 CRS"); } } public GeoWaveGTRasterFormat() { super(); setInfo(); } /** Sets the metadata information. */ private void setInfo() { final HashMap info = new HashMap<>(); info.put("name", "GeoWaveRasterFormat"); info.put("description", "Image mosaicking and pyramiding in GeoWave"); info.put("vendor", "GeoWave"); info.put("docURL", "https://github.com/locationtech/geowave"); info.put("version", VersionUtils.getVersion()); mInfo = info; // reading parameters readParameters = new ParameterGroup( new DefaultParameterDescriptorGroup( mInfo, new GeneralParameterDescriptor[] { READ_GRIDGEOMETRY2D, OUTPUT_TRANSPARENT_COLOR, BACKGROUND_COLOR})); // reading parameters writeParameters = null; } @Override public AbstractGridCoverage2DReader getReader(final Object source) { return getReader(source, null); } @Override public AbstractGridCoverage2DReader getReader(final Object source, final Hints hints) { try { return new GeoWaveRasterReader(source, hints); } catch (final Exception e) { LOGGER.warn("Cannot create geowave raster reader", e); return null; } } @Override public GridCoverageWriter getWriter(final Object destination) { throw new UnsupportedOperationException("This plugin does not support writing."); } @Override public boolean accepts(final Object source, final Hints hints) { if (source == null) { return false; } if (isParamList(source)) { return true; } return validateURL(source); } @Override public GeoToolsWriteParams getDefaultImageIOWriteParameters() { throw new UnsupportedOperationException("This plugin does not support writing."); } @Override public GridCoverageWriter getWriter(final Object destination, final Hints hints) { throw new UnsupportedOperationException("This plugin does not support writing."); } public static boolean isParamList(final Object source) { return ((source instanceof String) && source.toString().contains("=") && source.toString().contains(";")); } public static URL getURLFromSource(final Object source) { if (source == null) { return null; } URL sourceURL = null; try { if (source instanceof File) { sourceURL = ((File) source).toURI().toURL(); } else if (source instanceof URL) { sourceURL = (URL) source; } else if (source instanceof String) { final File tempFile = new File((String) source); if (tempFile.exists()) { sourceURL = tempFile.toURI().toURL(); } else { sourceURL = new URL(URLDecoder.decode((String) source, "UTF8")); } } } catch (final Exception e) { LOGGER.warn("Unable to read source URL", e); return null; } return sourceURL; } public static boolean validateURL(final Object source) { final URL sourceUrl = getURLFromSource(source); if (sourceUrl == null) { return false; } if (!sourceUrl.getPath().toLowerCase(Locale.ENGLISH).endsWith(".xml")) { return false; } // TODO figure out additional ways to validate return true; } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/plugin/GeoWaveGTRasterFormatFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.plugin; import java.awt.RenderingHints.Key; import java.util.Map; import org.geotools.coverage.grid.io.AbstractGridFormat; import org.geotools.coverage.grid.io.GridFormatFactorySpi; public class GeoWaveGTRasterFormatFactory implements GridFormatFactorySpi { @Override public boolean isAvailable() { return true; } @Override public Map getImplementationHints() { return null; } @Override public AbstractGridFormat createFormat() { return new GeoWaveGTRasterFormat(); } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/plugin/GeoWaveRasterConfig.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.plugin; import java.io.IOException; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import java.util.HashMap; import java.util.Hashtable; import java.util.Iterator; import java.util.Locale; import java.util.Map; import javax.media.jai.Interpolation; import javax.xml.XMLConstants; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.locationtech.geowave.adapter.auth.AuthorizationFactorySPI; import org.locationtech.geowave.adapter.auth.EmptyAuthorizationFactory; import org.locationtech.geowave.core.index.SPIServiceRegistry; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.GeoWaveStoreFinder; import org.locationtech.geowave.core.store.StoreFactoryFamilySpi; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.config.ConfigUtils; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.InputSource; import org.xml.sax.SAXException; public class GeoWaveRasterConfig { private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveRasterConfig.class); private static final Map CONFIG_CACHE = new Hashtable<>(); protected static enum ConfigParameter { // the following two are optional parameters that will override the // behavior of tile mosaicing that is already set within each adapter INTERPOLATION("interpolationOverride"), SCALE_TO_8BIT("scaleTo8Bit"), EQUALIZE_HISTOGRAM("equalizeHistogramOverride"), AUTHORIZATION_PROVIDER("authorizationProvider"), AUTHORIZATION_URL("authorizationUrl"); private String configName; private ConfigParameter(final String configName) { this.configName = configName; } public String getConfigName() { return configName; } } private Map storeConfigObj; private StoreFactoryFamilySpi factoryFamily; private DataStore dataStore; private IndexStore indexStore; private PersistentAdapterStore adapterStore; private InternalAdapterStore internalAdapterStore; private DataStatisticsStore dataStatisticsStore; private AdapterIndexMappingStore adapterIndexMappingStore; private AuthorizationFactorySPI authorizationFactory; private URL authorizationURL; private Boolean equalizeHistogramOverride = null; private Boolean scaleTo8Bit = null; private Integer interpolationOverride = null; protected GeoWaveRasterConfig() {} public static GeoWaveRasterConfig createConfig( final Map dataStoreConfig, final String geowaveNamespace) { return createConfig(dataStoreConfig, geowaveNamespace, null, null, null, null, null); } public static GeoWaveRasterConfig createConfig( final Map dataStoreConfig, final String geowaveNamespace, final Boolean equalizeHistogramOverride, final Boolean scaleTo8Bit, final Integer interpolationOverride, final String authorizationProvider, final URL authorizationURL) { final GeoWaveRasterConfig result = new GeoWaveRasterConfig(); result.equalizeHistogramOverride = equalizeHistogramOverride; result.interpolationOverride = interpolationOverride; result.scaleTo8Bit = scaleTo8Bit; synchronized (result) { result.storeConfigObj = dataStoreConfig; result.factoryFamily = GeoWaveStoreFinder.findStoreFamily(result.storeConfigObj); } result.authorizationFactory = getAuthorizationFactory(authorizationProvider); result.authorizationURL = authorizationURL; return result; } public static AuthorizationFactorySPI getAuthorizationFactory(final String authProviderName) { if (authProviderName != null) { final Iterator authIt = getAuthorizationFactoryList(); while (authIt.hasNext()) { final AuthorizationFactorySPI authFactory = authIt.next(); if (authProviderName.equals(authFactory.toString())) { return authFactory; } } } return new EmptyAuthorizationFactory(); } private static Iterator getAuthorizationFactoryList() { return new SPIServiceRegistry(GeoWaveRasterConfig.class).load(AuthorizationFactorySPI.class); } public static URL getAuthorizationURL(final String authorizationURL) { if (authorizationURL != null) { try { return new URL(authorizationURL.toString()); } catch (final MalformedURLException e) { LOGGER.warn("Accumulo Plugin: malformed Authorization Service URL " + authorizationURL, e); } } return null; } public static GeoWaveRasterConfig readFromConfigParams(final String configParams) throws NullPointerException { GeoWaveRasterConfig result = CONFIG_CACHE.get(configParams); if (result != null) { return result; } result = new GeoWaveRasterConfig(); CONFIG_CACHE.put(configParams, result); final Map params = StringUtils.parseParams(configParams); parseParamsIntoRasterConfig(result, params); return result; } public static GeoWaveRasterConfig readFromURL(final URL xmlURL) throws IOException, ParserConfigurationException, SAXException { GeoWaveRasterConfig result = CONFIG_CACHE.get(xmlURL.toString()); if (result != null) { return result; } result = new GeoWaveRasterConfig(); CONFIG_CACHE.put(xmlURL.toString(), result); final Map params = getParamsFromURL(xmlURL); parseParamsIntoRasterConfig(result, params); return result; } private static Map getParamsFromURL(final URL xmlURL) throws IOException, ParserConfigurationException, SAXException { try (final InputStream in = xmlURL.openStream()) { final InputSource input = new InputSource(xmlURL.toString()); final DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); dbf.setIgnoringElementContentWhitespace(true); dbf.setIgnoringComments(true); dbf.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true); // HP Fortify "XML External Entity Injection" fix. // These lines are the recommended fix for // protecting a Java DocumentBuilderFactory from XXE. final String DISALLOW_DOCTYPE_DECL = "http://apache.org/xml/features/disallow-doctype-decl"; dbf.setFeature(DISALLOW_DOCTYPE_DECL, true); final DocumentBuilder db = dbf.newDocumentBuilder(); // db.setEntityResolver(new ConfigEntityResolver(xmlURL)); final Document dom = db.parse(input); in.close(); final NodeList children = dom.getChildNodes().item(0).getChildNodes(); final Map configParams = new HashMap<>(); for (int i = 0; i < children.getLength(); i++) { final Node child = children.item(i); configParams.put(child.getNodeName(), child.getTextContent()); } return configParams; } } private static void parseParamsIntoRasterConfig( final GeoWaveRasterConfig result, final Map params) { final Map storeParams = new HashMap<>(params); // isolate just the dynamic store params for (final ConfigParameter param : ConfigParameter.values()) { storeParams.remove(param.getConfigName()); } // findbugs complaint requires this synchronization synchronized (result) { result.storeConfigObj = storeParams; result.factoryFamily = GeoWaveStoreFinder.findStoreFamily(result.storeConfigObj); } final String equalizeHistogram = params.get(ConfigParameter.EQUALIZE_HISTOGRAM.getConfigName()); if (equalizeHistogram != null) { if (equalizeHistogram.trim().toLowerCase(Locale.ENGLISH).equals("true")) { result.equalizeHistogramOverride = true; } else { result.equalizeHistogramOverride = false; } } final String scaleTo8Bit = params.get(ConfigParameter.SCALE_TO_8BIT.getConfigName()); if (scaleTo8Bit != null) { if (scaleTo8Bit.trim().toLowerCase(Locale.ENGLISH).equals("true")) { result.scaleTo8Bit = true; } else { result.scaleTo8Bit = false; } } if (params.containsKey(ConfigParameter.INTERPOLATION.getConfigName())) { result.interpolationOverride = Integer.parseInt(params.get(ConfigParameter.INTERPOLATION.getConfigName())); } result.authorizationFactory = getAuthorizationFactory(params.get(ConfigParameter.AUTHORIZATION_PROVIDER.getConfigName())); result.authorizationURL = getAuthorizationURL(params.get(ConfigParameter.AUTHORIZATION_URL.getConfigName())); } protected AuthorizationFactorySPI getAuthorizationFactory() { return authorizationFactory; } protected URL getAuthorizationURL() { return authorizationURL; } public synchronized DataStore getDataStore() { if (dataStore == null) { dataStore = factoryFamily.getDataStoreFactory().createStore( ConfigUtils.populateOptionsFromList( factoryFamily.getDataStoreFactory().createOptionsInstance(), storeConfigObj)); } return dataStore; } public synchronized PersistentAdapterStore getAdapterStore() { if (adapterStore == null) { adapterStore = factoryFamily.getAdapterStoreFactory().createStore( ConfigUtils.populateOptionsFromList( factoryFamily.getAdapterStoreFactory().createOptionsInstance(), storeConfigObj)); } return adapterStore; } public synchronized InternalAdapterStore getInternalAdapterStore() { if (internalAdapterStore == null) { internalAdapterStore = factoryFamily.getInternalAdapterStoreFactory().createStore( ConfigUtils.populateOptionsFromList( factoryFamily.getInternalAdapterStoreFactory().createOptionsInstance(), storeConfigObj)); } return internalAdapterStore; } public synchronized IndexStore getIndexStore() { if (indexStore == null) { indexStore = factoryFamily.getIndexStoreFactory().createStore( ConfigUtils.populateOptionsFromList( factoryFamily.getIndexStoreFactory().createOptionsInstance(), storeConfigObj)); } return indexStore; } public synchronized DataStatisticsStore getDataStatisticsStore() { if (dataStatisticsStore == null) { dataStatisticsStore = factoryFamily.getDataStatisticsStoreFactory().createStore( ConfigUtils.populateOptionsFromList( factoryFamily.getDataStatisticsStoreFactory().createOptionsInstance(), storeConfigObj)); } return dataStatisticsStore; } public synchronized AdapterIndexMappingStore getAdapterIndexMappingStore() { if (adapterIndexMappingStore == null) { adapterIndexMappingStore = factoryFamily.getAdapterIndexMappingStoreFactory().createStore( ConfigUtils.populateOptionsFromList( factoryFamily.getDataStatisticsStoreFactory().createOptionsInstance(), storeConfigObj)); } return adapterIndexMappingStore; } public boolean isInterpolationOverrideSet() { return (interpolationOverride != null); } public Interpolation getInterpolationOverride() { if (!isInterpolationOverrideSet()) { throw new IllegalStateException("Interpolation Override is not set for this config"); } return Interpolation.getInstance(interpolationOverride); } public boolean isScaleTo8BitSet() { return (scaleTo8Bit != null); } public boolean isScaleTo8Bit() { if (!isScaleTo8BitSet()) { throw new IllegalStateException("Scale To 8-bit is not set for this config"); } return scaleTo8Bit; } public boolean isEqualizeHistogramOverrideSet() { return (equalizeHistogramOverride != null); } public boolean isEqualizeHistogramOverride() { if (!isEqualizeHistogramOverrideSet()) { throw new IllegalStateException("Equalize Histogram is not set for this config"); } return equalizeHistogramOverride; } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/plugin/GeoWaveRasterReader.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.plugin; import java.awt.Color; import java.awt.Rectangle; import java.awt.geom.Rectangle2D; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.imageio.ImageReadParam; import javax.media.jai.Histogram; import javax.media.jai.ImageLayout; import javax.media.jai.Interpolation; import org.geotools.coverage.grid.GridCoverage2D; import org.geotools.coverage.grid.GridEnvelope2D; import org.geotools.coverage.grid.GridGeometry2D; import org.geotools.coverage.grid.io.AbstractGridCoverage2DReader; import org.geotools.coverage.grid.io.AbstractGridFormat; import org.geotools.coverage.grid.io.GridCoverage2DReader; import org.geotools.coverage.grid.io.OverviewPolicy; import org.geotools.data.DataSourceException; import org.geotools.geometry.GeneralEnvelope; import org.geotools.parameter.Parameter; import org.geotools.referencing.CRS; import org.geotools.referencing.operation.BufferedCoordinateOperationFactory; import org.geotools.util.Utilities; import org.geotools.util.factory.Hints; import org.locationtech.geowave.adapter.auth.AuthorizationSPI; import org.locationtech.geowave.adapter.raster.RasterUtils; import org.locationtech.geowave.adapter.raster.Resolution; import org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter; import org.locationtech.geowave.adapter.raster.stats.RasterBoundingBoxStatistic; import org.locationtech.geowave.adapter.raster.stats.RasterBoundingBoxStatistic.RasterBoundingBoxValue; import org.locationtech.geowave.adapter.raster.stats.RasterHistogramStatistic; import org.locationtech.geowave.adapter.raster.stats.RasterHistogramStatistic.RasterHistogramValue; import org.locationtech.geowave.adapter.raster.stats.RasterOverviewStatistic; import org.locationtech.geowave.adapter.raster.stats.RasterOverviewStatistic.RasterOverviewValue; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.store.query.IndexOnlySpatialQuery; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.CloseableIterator.Wrapper; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.QueryBuilder; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.locationtech.geowave.core.store.statistics.InternalStatisticsHelper; import org.locationtech.geowave.core.store.util.DataStoreUtils; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.GeometryFactory; import org.opengis.coverage.grid.Format; import org.opengis.coverage.grid.GridCoverage; import org.opengis.coverage.grid.GridEnvelope; import org.opengis.parameter.GeneralParameterValue; import org.opengis.parameter.ParameterDescriptor; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.opengis.referencing.cs.AxisDirection; import org.opengis.referencing.cs.CoordinateSystem; import org.opengis.referencing.cs.CoordinateSystemAxis; import org.opengis.referencing.datum.PixelInCell; import org.opengis.referencing.operation.CoordinateOperationFactory; import org.opengis.referencing.operation.MathTransform; import org.opengis.referencing.operation.TransformException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** the reader gets the connection info and returns a grid coverage for every data adapter */ public class GeoWaveRasterReader extends AbstractGridCoverage2DReader implements GridCoverage2DReader { private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveRasterReader.class); private GeoWaveRasterConfig config; private PersistentAdapterStore geowaveAdapterStore; private InternalAdapterStore geowaveInternalAdapterStore; private DataStatisticsStore geowaveStatisticsStore; private DataStore geowaveDataStore; private IndexStore geowaveIndexStore; private AdapterIndexMappingStore geowaveAdapterIndexMappingStore; protected Map crsCache = new HashMap<>(); protected CoordinateReferenceSystem defaultCrs; private AuthorizationSPI authorizationSPI; protected static final CoordinateOperationFactory OPERATION_FACTORY = new BufferedCoordinateOperationFactory(new Hints(Hints.LENIENT_DATUM_SHIFT, Boolean.TRUE)); private static Set UPDirections; private static Set LEFTDirections; // class initializer static { LEFTDirections = new HashSet<>(); LEFTDirections.add(AxisDirection.DISPLAY_LEFT); LEFTDirections.add(AxisDirection.EAST); LEFTDirections.add(AxisDirection.GEOCENTRIC_X); LEFTDirections.add(AxisDirection.COLUMN_POSITIVE); UPDirections = new HashSet<>(); UPDirections.add(AxisDirection.DISPLAY_UP); UPDirections.add(AxisDirection.NORTH); UPDirections.add(AxisDirection.GEOCENTRIC_Y); UPDirections.add(AxisDirection.ROW_POSITIVE); } /** * @param source The source object. * @param uHints * @throws IOException * @throws MalformedURLException * @throws AccumuloSecurityException * @throws AccumuloException */ public GeoWaveRasterReader(final Object source, final Hints uHints) throws IOException { super(source, uHints); this.source = source; if (GeoWaveGTRasterFormat.isParamList(source)) { try { config = GeoWaveRasterConfig.readFromConfigParams(source.toString()); } catch (final Exception e) { throw new MalformedURLException(source.toString()); } } else { final URL url = GeoWaveGTRasterFormat.getURLFromSource(source); if (url == null) { throw new MalformedURLException(source.toString()); } try { config = GeoWaveRasterConfig.readFromURL(url); } catch (final Exception e) { LOGGER.error("Cannot read config", e); throw new IOException(e); } } init(config); } public GeoWaveRasterReader(final GeoWaveRasterConfig config) throws DataSourceException { super(new Object(), new Hints()); this.config = config; init(config); } private void init(final GeoWaveRasterConfig config) { geowaveDataStore = config.getDataStore(); geowaveAdapterStore = config.getAdapterStore(); geowaveStatisticsStore = config.getDataStatisticsStore(); geowaveIndexStore = config.getIndexStore(); geowaveAdapterIndexMappingStore = config.getAdapterIndexMappingStore(); geowaveInternalAdapterStore = config.getInternalAdapterStore(); authorizationSPI = config.getAuthorizationFactory().create(config.getAuthorizationURL()); } /** * Constructor. * * @param source The source object. * @throws IOException * @throws AccumuloSecurityException * @throws AccumuloException * @throws UnsupportedEncodingException */ public GeoWaveRasterReader(final Object source) throws IOException { this(source, null); } protected CoordinateReferenceSystem getDefaultCrs() { if (defaultCrs != null) { return defaultCrs; } if (!crsCache.isEmpty()) { defaultCrs = crsCache.values().iterator().next(); } else { final String[] coverageNames = getGridCoverageNames(); for (final String coverageName : coverageNames) { final CoordinateReferenceSystem crs = getCrsForCoverage(coverageName); if (crs != null) { defaultCrs = crs; break; } } } if (defaultCrs != null) { return defaultCrs; } // if no data has been ingested yet with a CRS, this is the best guess // we can make return GeometryUtils.getDefaultCRS(); } protected CoordinateReferenceSystem getCrsForCoverage(final String coverageName) { CoordinateReferenceSystem crs = crsCache.get(coverageName); if (crs != null) { return crs; } final AdapterToIndexMapping[] adapterMappings = geowaveAdapterIndexMappingStore.getIndicesForAdapter(getAdapterId(coverageName)); if ((adapterMappings != null) && (adapterMappings.length > 0)) { crs = GeometryUtils.getIndexCrs(adapterMappings[0].getIndex(geowaveIndexStore)); crsCache.put(coverageName, crs); } return crs; } @Override public Format getFormat() { return new GeoWaveGTRasterFormat(); } @Override public String[] getGridCoverageNames() { final InternalDataAdapter[] adapters = geowaveAdapterStore.getAdapters(); final List coverageNames = new ArrayList<>(); for (final InternalDataAdapter internalAdapter : adapters) { final DataTypeAdapter adapter = internalAdapter.getAdapter(); if (adapter instanceof RasterDataAdapter) { coverageNames.add(((RasterDataAdapter) adapter).getCoverageName()); } } return coverageNames.toArray(new String[coverageNames.size()]); } @Override public int getGridCoverageCount() { final InternalDataAdapter[] adapters = geowaveAdapterStore.getAdapters(); int coverageCount = 0; for (final InternalDataAdapter internalAdapter : adapters) { final DataTypeAdapter adapter = internalAdapter.getAdapter(); if (adapter instanceof RasterDataAdapter) { coverageCount++; } } return coverageCount; } @Override public String[] getMetadataNames() { throw new UnsupportedOperationException( "A coverage name must be provided, there is no support for a default coverage"); } @Override public String[] getMetadataNames(final String coverageName) { if (!checkName(coverageName)) { LOGGER.warn("Unable to find data adapter for '" + coverageName + "'"); return null; } final DataTypeAdapter adapter = geowaveAdapterStore.getAdapter(getAdapterId(coverageName)).getAdapter(); final Set var = ((RasterDataAdapter) adapter).getMetadata().keySet(); return var.toArray(new String[var.size()]); } @Override public String getMetadataValue(final String name) { throw new UnsupportedOperationException( "A coverage name must be provided, there is no support for a default coverage"); } @Override public String getMetadataValue(final String coverageName, final String name) { if (!checkName(coverageName)) { LOGGER.warn("Unable to find data adapter for '" + coverageName + "'"); return null; } final DataTypeAdapter adapter = geowaveAdapterStore.getAdapter(getAdapterId(coverageName)).getAdapter(); return ((RasterDataAdapter) adapter).getMetadata().get(name); } @Override protected boolean checkName(final String coverageName) { Utilities.ensureNonNull("coverageName", coverageName); final DataTypeAdapter adapter = geowaveAdapterStore.getAdapter(getAdapterId(coverageName)).getAdapter(); return (adapter != null) && (adapter instanceof RasterDataAdapter); } @Override public GeneralEnvelope getOriginalEnvelope() { throw new UnsupportedOperationException( "A coverage name must be provided, there is no support for a default coverage"); } @Override public GeneralEnvelope getOriginalEnvelope(final String coverageName) { final RasterBoundingBoxValue rasterBbox = InternalStatisticsHelper.getDataTypeStatistic( geowaveStatisticsStore, RasterBoundingBoxStatistic.STATS_TYPE, coverageName, authorizationSPI.getAuthorizations()); if (rasterBbox == null) { final CoordinateReferenceSystem crs = getCoordinateReferenceSystem(coverageName); final double minX = crs.getCoordinateSystem().getAxis(0).getMinimumValue(); final double maxX = crs.getCoordinateSystem().getAxis(0).getMaximumValue(); final double minY = crs.getCoordinateSystem().getAxis(1).getMinimumValue(); final double maxY = crs.getCoordinateSystem().getAxis(1).getMaximumValue(); final GeneralEnvelope env = new GeneralEnvelope(new Rectangle2D.Double(minX, minY, maxX - minX, maxY - minY)); env.setCoordinateReferenceSystem(crs); return env; } // try to use both the bounding box and the overview statistics to // determine the width and height at the highest resolution final GeneralEnvelope env = new GeneralEnvelope( new Rectangle2D.Double( rasterBbox.getMinX(), rasterBbox.getMinY(), rasterBbox.getWidth(), rasterBbox.getHeight())); env.setCoordinateReferenceSystem(getCoordinateReferenceSystem(coverageName)); return env; } @Override public CoordinateReferenceSystem getCoordinateReferenceSystem() { return getDefaultCrs(); } @Override public CoordinateReferenceSystem getCoordinateReferenceSystem(final String coverageName) { return getCrsForCoverage(coverageName); } @Override public GridEnvelope getOriginalGridRange() { throw new UnsupportedOperationException( "A coverage name must be provided, there is no support for a default coverage"); } @Override public GridEnvelope getOriginalGridRange(final String coverageName) { int width = 0; int height = 0; final RasterBoundingBoxValue bbox = InternalStatisticsHelper.getDataTypeStatistic( geowaveStatisticsStore, RasterBoundingBoxStatistic.STATS_TYPE, coverageName, authorizationSPI.getAuthorizations()); if (bbox != null) { final RasterOverviewValue overview = InternalStatisticsHelper.getDataTypeStatistic( geowaveStatisticsStore, RasterOverviewStatistic.STATS_TYPE, coverageName, authorizationSPI.getAuthorizations()); if (overview != null) { width = (int) Math.ceil( ((bbox.getMaxX() - bbox.getMinX()) / overview.getValue()[0].getResolution(0))); height = (int) Math.ceil( ((bbox.getMaxY() - bbox.getMinY()) / overview.getValue()[0].getResolution(1))); } } return new GridEnvelope2D(0, 0, width, height); } @Override public MathTransform getOriginalGridToWorld(final PixelInCell pixInCell) { throw new UnsupportedOperationException( "A coverage name must be provided, there is no support for a default coverage"); } @Override public MathTransform getOriginalGridToWorld( final String coverageName, final PixelInCell pixInCell) { // just reuse super class implementation but ensure that we do not use a // cached raster2model synchronized (this) { raster2Model = null; return super.getOriginalGridToWorld(coverageName, pixInCell); } } @Override public GridCoverage2D read(final GeneralParameterValue[] parameters) throws IllegalArgumentException, IOException { throw new UnsupportedOperationException( "A coverage name must be provided, there is no support for a default coverage"); } /* * (non-Javadoc) * * @see org.opengis.coverage.grid.GridCoverageReader#read(org.opengis.parameter * .GeneralParameterValue []) */ @Override public GridCoverage2D read(final String coverageName, final GeneralParameterValue[] params) throws IOException { if (!checkName(coverageName)) { LOGGER.warn("Unable to find data adapter for '" + coverageName + "'"); return null; } final Date start = new Date(); // ///////////////////////////////////////////////////////////////////// // // Checking params // // ///////////////////////////////////////////////////////////////////// Color outputTransparentColor = null; Color backgroundColor = null; Interpolation interpolation = null; Rectangle dim = null; GeneralEnvelope requestedEnvelope = null; if (params != null) { for (final GeneralParameterValue generalParameterValue : params) { final Parameter param = (Parameter) generalParameterValue; if (param.getDescriptor().getName().getCode().equals( AbstractGridFormat.READ_GRIDGEOMETRY2D.getName().toString())) { final GridGeometry2D gg = (GridGeometry2D) param.getValue(); requestedEnvelope = (GeneralEnvelope) gg.getEnvelope(); dim = gg.getGridRange2D().getBounds(); } else if (param.getDescriptor().getName().getCode().equals( GeoWaveGTRasterFormat.OUTPUT_TRANSPARENT_COLOR.getName().toString())) { outputTransparentColor = (Color) param.getValue(); } else if (param.getDescriptor().getName().getCode().equals( AbstractGridFormat.BACKGROUND_COLOR.getName().toString())) { backgroundColor = (Color) param.getValue(); } else if (param.getDescriptor().getName().getCode().equals( AbstractGridFormat.INTERPOLATION.getName().toString())) { interpolation = (Interpolation) param.getValue(); } } } final GridCoverage2D coverage = renderGridCoverage( coverageName, dim, requestedEnvelope, backgroundColor, outputTransparentColor, interpolation); LOGGER.info( "GeoWave Raster Reader needs : " + ((new Date()).getTime() - start.getTime()) + " millisecs"); return coverage; } public GridCoverage2D renderGridCoverage( final String coverageName, final Rectangle dim, final GeneralEnvelope generalEnvelope, Color backgroundColor, Color outputTransparentColor, final Interpolation interpolation) throws IOException { if (backgroundColor == null) { backgroundColor = AbstractGridFormat.BACKGROUND_COLOR.getDefaultValue(); } if (outputTransparentColor == null) { outputTransparentColor = GeoWaveGTRasterFormat.OUTPUT_TRANSPARENT_COLOR.getDefaultValue(); } final GeoWaveRasterReaderState state = new GeoWaveRasterReaderState(coverageName); state.setRequestedEnvelope(generalEnvelope); // ///////////////////////////////////////////////////////////////////// // // Loading tiles trying to optimize as much as possible // // ///////////////////////////////////////////////////////////////////// final GridCoverage2D coverage = loadTiles( coverageName, backgroundColor, outputTransparentColor, interpolation, dim, state, getCoordinateReferenceSystem(coverageName), getOriginalEnvelope(coverageName)); return coverage; } /** * @param backgroundColor the background color * @param outputTransparentColor the transparent color * @param pixelDimension * @return the gridcoverage as the final result * @throws IOException */ private GridCoverage2D loadTiles( final String coverageName, final Color backgroundColor, final Color outputTransparentColor, Interpolation interpolation, final Rectangle pixelDimension, final GeoWaveRasterReaderState state, final CoordinateReferenceSystem crs, final GeneralEnvelope originalEnvelope) throws IOException { transformRequestEnvelope(state, crs); // ///////////////////////////////////////////////////////////////////// // // Check if we have something to load by intersecting the requested // envelope with the bounds of the data set. If not, give warning // // ///////////////////////////////////////////////////////////////////// if (!state.getRequestEnvelopeXformed().intersects(originalEnvelope, true)) { LOGGER.warn("The requested envelope does not intersect the envelope of this mosaic"); LOGGER.warn(state.getRequestEnvelopeXformed().toString()); LOGGER.warn(originalEnvelope.toString()); return null; } final ImageReadParam readP = new ImageReadParam(); final Integer imageChoice; final RasterDataAdapter adapter = (RasterDataAdapter) geowaveAdapterStore.getAdapter(getAdapterId(coverageName)).getAdapter(); if (pixelDimension != null) { try { synchronized (this) { if (!setupResolutions(coverageName)) { LOGGER.warn("Cannot find the overview statistics for the requested coverage name"); return coverageFactory.create( coverageName, RasterUtils.getEmptyImage( (int) pixelDimension.getWidth(), (int) pixelDimension.getHeight(), backgroundColor, outputTransparentColor, adapter.getColorModel()), state.getRequestedEnvelope()); } imageChoice = setReadParams( state.getCoverageName(), OverviewPolicy.getDefaultPolicy(), readP, state.getRequestEnvelopeXformed(), pixelDimension); } readP.setSourceSubsampling(1, 1, 0, 0); } catch (final TransformException e) { LOGGER.error(e.getLocalizedMessage(), e); return coverageFactory.create( coverageName, RasterUtils.getEmptyImage( (int) pixelDimension.getWidth(), (int) pixelDimension.getHeight(), backgroundColor, outputTransparentColor, adapter.getColorModel()), state.getRequestedEnvelope()); } } else { imageChoice = Integer.valueOf(0); } final double[][] resolutionLevels = getResolutionLevels(coverageName); final Histogram histogram; boolean equalizeHistogram; if (config.isEqualizeHistogramOverrideSet()) { equalizeHistogram = config.isEqualizeHistogramOverride(); } else { equalizeHistogram = adapter.isEqualizeHistogram(); } if (equalizeHistogram) { histogram = getHistogram( coverageName, resolutionLevels[imageChoice.intValue()][0], resolutionLevels[imageChoice.intValue()][1]); } else { histogram = null; } boolean scaleTo8Bit = true; // default to always scale to 8-bit final boolean scaleTo8BitSet = config.isScaleTo8BitSet(); if (scaleTo8BitSet) { scaleTo8Bit = config.isScaleTo8Bit(); } try (final CloseableIterator gridCoverageIt = queryForTiles( pixelDimension, state.getRequestEnvelopeXformed(), resolutionLevels[imageChoice.intValue()][0], resolutionLevels[imageChoice.intValue()][1], adapter)) { // allow the config to override the WMS request if (config.isInterpolationOverrideSet()) { interpolation = config.getInterpolationOverride(); } // but don't allow the default adapter interpolation to override the // WMS request else if (interpolation == null) { interpolation = adapter.getInterpolation(); } final GridCoverage2D result = RasterUtils.mosaicGridCoverages( gridCoverageIt, backgroundColor, outputTransparentColor, pixelDimension, state.getRequestEnvelopeXformed(), resolutionLevels[imageChoice.intValue()][0], resolutionLevels[imageChoice.intValue()][1], adapter.getNoDataValuesPerBand(), state.isAxisSwapped(), coverageFactory, state.getCoverageName(), interpolation, histogram, scaleTo8BitSet, scaleTo8Bit, adapter.getColorModel()); return transformResult(result, pixelDimension, state); } } private boolean setupResolutions(final String coverageName) throws IOException { // this is a bit of a hack to avoid copy and pasting large // portions of the inherited class, which does not handle // multiple coverage names final double[][] resLevels = getResolutionLevels(coverageName); if ((resLevels == null) || (resLevels.length == 0)) { return false; } numOverviews = resLevels.length - 1; highestRes = resLevels[0]; if (numOverviews > 0) { overViewResolutions = new double[numOverviews][]; System.arraycopy(resLevels, 1, overViewResolutions, 0, numOverviews); } else { overViewResolutions = new double[][] {}; } this.coverageName = coverageName; return true; } private CloseableIterator queryForTiles( final Rectangle pixelDimension, final GeneralEnvelope requestEnvelope, final double levelResX, final double levelResY, final RasterDataAdapter adapter) throws IOException { final QueryConstraints query; if (requestEnvelope.getCoordinateReferenceSystem() != null) { query = new IndexOnlySpatialQuery( new GeometryFactory().toGeometry( new Envelope( requestEnvelope.getMinimum(0), requestEnvelope.getMaximum(0), requestEnvelope.getMinimum(1), requestEnvelope.getMaximum(1))), GeometryUtils.getCrsCode(requestEnvelope.getCoordinateReferenceSystem())); } else { query = new IndexOnlySpatialQuery( new GeometryFactory().toGeometry( new Envelope( requestEnvelope.getMinimum(0), requestEnvelope.getMaximum(0), requestEnvelope.getMinimum(1), requestEnvelope.getMaximum(1)))); } return queryForTiles( adapter, query, new double[] {levelResX * adapter.getTileSize(), levelResY * adapter.getTileSize()}); } private CloseableIterator queryForTiles( final RasterDataAdapter adapter, final QueryConstraints query, final double[] targetResolutionPerDimension) { final AdapterToIndexMapping[] adapterIndexMappings = geowaveAdapterIndexMappingStore.getIndicesForAdapter(getAdapterId(adapter.getTypeName())); // just work on the first spatial only index that contains this adapter // ID // TODO consider the best strategy for handling temporal queries here for (final AdapterToIndexMapping indexMapping : adapterIndexMappings) { if (SpatialDimensionalityTypeProvider.isSpatial(indexMapping.getIndex(geowaveIndexStore))) { return (CloseableIterator) geowaveDataStore.query( QueryBuilder.newBuilder().setAuthorizations( authorizationSPI.getAuthorizations()).addTypeName( adapter.getTypeName()).constraints(query).addHint( DataStoreUtils.TARGET_RESOLUTION_PER_DIMENSION_FOR_HIERARCHICAL_INDEX, targetResolutionPerDimension).build()); } } return new Wrapper(Collections.emptyIterator()); } private GridCoverage2D transformResult( final GridCoverage2D coverage, final Rectangle pixelDimension, final GeoWaveRasterReaderState state) { if (state.getRequestEnvelopeXformed() == state.getRequestedEnvelope()) { return coverage; // nothing to do } GridCoverage2D result = null; LOGGER.info("Image reprojection necessary"); result = (GridCoverage2D) RasterUtils.getCoverageOperations().resample( coverage, state.getRequestedEnvelope().getCoordinateReferenceSystem()); return coverageFactory.create( result.getName(), result.getRenderedImage(), result.getEnvelope()); } /** * transforms (if necessary) the requested envelope into the CRS used by this reader. * * @throws DataSourceException */ public static void transformRequestEnvelope( final GeoWaveRasterReaderState state, final CoordinateReferenceSystem crs) throws DataSourceException { if (CRS.equalsIgnoreMetadata( state.getRequestedEnvelope().getCoordinateReferenceSystem(), crs)) { state.setRequestEnvelopeXformed(state.getRequestedEnvelope()); return; // and finish } try { /** Buffered factory for coordinate operations. */ // transforming the envelope back to the dataset crs in final MathTransform transform = OPERATION_FACTORY.createOperation( state.getRequestedEnvelope().getCoordinateReferenceSystem(), crs).getMathTransform(); if (transform.isIdentity()) { // Identity Transform ? state.setRequestEnvelopeXformed(state.getRequestedEnvelope()); return; // and finish } state.setRequestEnvelopeXformed(CRS.transform(transform, state.getRequestedEnvelope())); state.getRequestEnvelopeXformed().setCoordinateReferenceSystem(crs); // if (config.getIgnoreAxisOrder() == false) { // check for axis // order // required final int indexX = indexOfX(crs); final int indexY = indexOfY(crs); final int indexRequestedX = indexOfX(state.getRequestedEnvelope().getCoordinateReferenceSystem()); final int indexRequestedY = indexOfY(state.getRequestedEnvelope().getCoordinateReferenceSystem()); // x Axis problem ??? if ((indexX == indexRequestedY) && (indexY == indexRequestedX)) { state.setAxisSwap(true); final Rectangle2D tmp = new Rectangle2D.Double( state.getRequestEnvelopeXformed().getMinimum(1), state.getRequestEnvelopeXformed().getMinimum(0), state.getRequestEnvelopeXformed().getSpan(1), state.getRequestEnvelopeXformed().getSpan(0)); state.setRequestEnvelopeXformed(new GeneralEnvelope(tmp)); state.getRequestEnvelopeXformed().setCoordinateReferenceSystem(crs); } else if ((indexX == indexRequestedX) && (indexY == indexRequestedY)) { // everything is fine } else { throw new DataSourceException("Unable to resolve the X Axis problem"); } // } } catch (final Exception e) { throw new DataSourceException("Unable to create a coverage for this source", e); } } @Override public Set> getDynamicParameters() throws IOException { throw new UnsupportedOperationException( "A coverage name must be provided, there is no support for a default coverage"); } @Override public Set> getDynamicParameters(final String coverageName) throws IOException { return Collections.emptySet(); } @Override public double[] getReadingResolutions( final OverviewPolicy policy, final double[] requestedResolution) throws IOException { throw new UnsupportedOperationException( "A coverage name must be provided, there is no support for a default coverage"); } @Override public double[] getReadingResolutions( final String coverageName, final OverviewPolicy policy, final double[] requestedResolution) throws IOException { synchronized (this) { if (!setupResolutions(coverageName)) { LOGGER.warn("Cannot find the overview statistics for the requested coverage name"); return null; } return super.getReadingResolutions(coverageName, policy, requestedResolution); } } @Override public ImageLayout getImageLayout() throws IOException { throw new UnsupportedOperationException( "A coverage name must be provided, there is no support for a default coverage"); } @Override public ImageLayout getImageLayout(final String coverageName) throws IOException { if (!checkName(coverageName)) { LOGGER.warn("Unable to find data adapter for '" + coverageName + "'"); return null; } final RasterDataAdapter adapter = (RasterDataAdapter) geowaveAdapterStore.getAdapter(getAdapterId(coverageName)); final GridEnvelope gridEnvelope = getOriginalGridRange(); return new ImageLayout().setMinX(gridEnvelope.getLow(0)).setMinY( gridEnvelope.getLow(1)).setTileWidth(adapter.getTileSize()).setTileHeight( adapter.getTileSize()).setSampleModel(adapter.getSampleModel()).setColorModel( adapter.getColorModel()).setWidth(gridEnvelope.getHigh(0)).setHeight( gridEnvelope.getHigh(1)); } @Override public double[][] getResolutionLevels() throws IOException { throw new UnsupportedOperationException( "A coverage name must be provided, there is no support for a default coverage"); } @Override public double[][] getResolutionLevels(final String coverageName) throws IOException { final RasterOverviewValue overview = InternalStatisticsHelper.getDataTypeStatistic( geowaveStatisticsStore, RasterOverviewStatistic.STATS_TYPE, coverageName, authorizationSPI.getAuthorizations()); if (overview == null) { LOGGER.warn("Cannot find resolutions for coverage '" + coverageName + "'"); return null; } final double[][] retVal = new double[overview.getValue().length][]; int i = 0; for (final Resolution res : overview.getValue()) { retVal[i++] = res.getResolutionPerDimension(); } return retVal; } private Histogram getHistogram(final String coverageName, final double resX, final double resY) throws IOException { final RasterHistogramValue histogram = InternalStatisticsHelper.getDataTypeStatistic( geowaveStatisticsStore, RasterHistogramStatistic.STATS_TYPE, coverageName, authorizationSPI.getAuthorizations()); if (histogram != null) { return histogram.getValue().get(new Resolution(new double[] {resX, resY})); } else { LOGGER.warn("Cannot find histogram for coverage '" + coverageName + "'"); } return null; } /** * @param crs CoordinateReference System * @return dimension index of y dir in crs */ private static int indexOfY(final CoordinateReferenceSystem crs) { return indexOf(crs, UPDirections); } /** * @param crs CoordinateReference System * @return dimension index of X dir in crs */ private static int indexOfX(final CoordinateReferenceSystem crs) { return indexOf(crs, LEFTDirections); } private static int indexOf( final CoordinateReferenceSystem crs, final Set direction) { final CoordinateSystem cs = crs.getCoordinateSystem(); for (int index = 0; index < cs.getDimension(); index++) { final CoordinateSystemAxis axis = cs.getAxis(index); if (direction.contains(axis.getDirection())) { return index; } } return -1; } private short getAdapterId(final String coverageName) { return geowaveInternalAdapterStore.getAdapterId(coverageName); } private DataTypeAdapter getAdapter(final String coverageName) { return geowaveAdapterStore.getAdapter(getAdapterId(coverageName)); } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/plugin/GeoWaveRasterReaderState.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.plugin; import org.geotools.geometry.GeneralEnvelope; /** This class allows us to make the GeoWaveRasterReader thread safe by storing its state here */ public class GeoWaveRasterReaderState { private final String coverageName; private boolean axisSwap = false; private GeneralEnvelope requestedEnvelope = null; private GeneralEnvelope requestEnvelopeXformed; public GeoWaveRasterReaderState(final String coverageName) { this.coverageName = coverageName; } /** @return the coverageName */ public String getCoverageName() { return coverageName; } /** @return the boolean value of axisSwap */ public boolean isAxisSwapped() { return axisSwap; } /** @param axisSwap the boolean value to set */ public void setAxisSwap(final boolean axisSwap) { this.axisSwap = axisSwap; } /** @return the requestedEnvelope */ public GeneralEnvelope getRequestedEnvelope() { return requestedEnvelope; } /** @param requestedEnvelope the requestedEnvelope to set */ public void setRequestedEnvelope(final GeneralEnvelope requestedEnvelope) { this.requestedEnvelope = requestedEnvelope; } /** @return the requestEnvelopeXformed */ public GeneralEnvelope getRequestEnvelopeXformed() { return requestEnvelopeXformed; } /** @param requestEnvelopeXformed the requestEnvelopeXformed to set */ public void setRequestEnvelopeXformed(final GeneralEnvelope requestEnvelopeXformed) { this.requestEnvelopeXformed = requestEnvelopeXformed; } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/plugin/gdal/GDALGeoTiffFormat.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.plugin.gdal; import java.util.Collections; import java.util.HashMap; import org.geotools.coverageio.gdal.BaseGDALGridFormat; import org.geotools.data.DataSourceException; import org.geotools.parameter.DefaultParameterDescriptorGroup; import org.geotools.parameter.ParameterGroup; import org.geotools.util.factory.Hints; import org.opengis.coverage.grid.Format; import org.opengis.geometry.MismatchedDimensionException; import org.opengis.parameter.GeneralParameterDescriptor; import it.geosolutions.imageio.plugins.geotiff.GeoTiffImageReaderSpi; public class GDALGeoTiffFormat extends BaseGDALGridFormat implements Format { /** Creates an instance and sets the metadata. */ public GDALGeoTiffFormat() { super(new GeoTiffImageReaderSpi()); setInfo(); } /** Sets the metadata information. */ @Override protected void setInfo() { final HashMap info = new HashMap<>(); info.put("name", "GDALGeoTiff"); info.put("description", "GDAL GeoTiff Coverage Format"); info.put("vendor", "GeoWave"); info.put("docURL", ""); // TODO: set something info.put("version", "1.0"); mInfo = Collections.unmodifiableMap(info); // writing parameters writeParameters = null; // reading parameters readParameters = new ParameterGroup( new DefaultParameterDescriptorGroup( mInfo, new GeneralParameterDescriptor[] { READ_GRIDGEOMETRY2D, USE_JAI_IMAGEREAD, USE_MULTITHREADING, SUGGESTED_TILE_SIZE})); } @Override public GDALGeoTiffReader getReader(final Object source, final Hints hints) { try { return new GDALGeoTiffReader(source, hints); } catch (final MismatchedDimensionException e) { final RuntimeException re = new RuntimeException(); re.initCause(e); throw re; } catch (final DataSourceException e) { final RuntimeException re = new RuntimeException(); re.initCause(e); throw re; } } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/plugin/gdal/GDALGeoTiffFormatFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.plugin.gdal; import org.geotools.coverage.grid.io.AbstractGridFormat; import org.geotools.coverageio.BaseGridFormatFactorySPI; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import it.geosolutions.imageio.plugins.geotiff.GeoTiffImageReaderSpi; public class GDALGeoTiffFormatFactory extends BaseGridFormatFactorySPI { private static final Logger LOGGER = LoggerFactory.getLogger(GDALGeoTiffFormatFactory.class); @Override public boolean isAvailable() { boolean available = true; // if these classes are here, then the runtime environment has // access to JAI and the JAI ImageI/O toolbox. try { Class.forName("it.geosolutions.imageio.plugins.geotiff.GeoTiffImageReaderSpi"); available = new GeoTiffImageReaderSpi().isAvailable(); } catch (final ClassNotFoundException cnf) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("GDALGeoTiffFormatFactory is not availaible."); } available = false; } return available; } @Override public AbstractGridFormat createFormat() { return new GDALGeoTiffFormat(); } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/plugin/gdal/GDALGeoTiffReader.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.plugin.gdal; import org.geotools.coverageio.gdal.BaseGDALGridCoverage2DReader; import org.geotools.coverageio.gdal.dted.DTEDReader; import org.geotools.data.DataSourceException; import org.geotools.util.factory.Hints; import org.opengis.coverage.grid.Format; import org.opengis.coverage.grid.GridCoverageReader; import it.geosolutions.imageio.plugins.geotiff.GeoTiffImageReaderSpi; public class GDALGeoTiffReader extends BaseGDALGridCoverage2DReader implements GridCoverageReader { private static final String worldFileExt = ""; /** * Creates a new instance of a {@link DTEDReader}. I assume nothing about file extension. * * @param input Source object for which we want to build an {@link DTEDReader} . * @throws DataSourceException */ public GDALGeoTiffReader(final Object input) throws DataSourceException { this(input, null); } /** * Creates a new instance of a {@link DTEDReader}. I assume nothing about file extension. * * @param input Source object for which we want to build an {@link DTEDReader} . * @param hints Hints to be used by this reader throughout his life. * @throws DataSourceException */ public GDALGeoTiffReader(final Object input, final Hints hints) throws DataSourceException { super(input, hints, worldFileExt, new GeoTiffImageReaderSpi()); } /** @see org.opengis.coverage.grid.GridCoverageReader#getFormat() */ @Override public Format getFormat() { return new GDALGeoTiffFormat(); } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/plugin/gdal/InstallGdal.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.plugin.gdal; import java.io.File; import java.io.FileFilter; import java.io.FileOutputStream; import java.io.IOException; import java.net.URL; import java.net.URLConnection; import java.nio.file.Files; import java.util.Locale; import org.apache.commons.io.IOUtils; import org.codehaus.plexus.archiver.tar.TarGZipUnArchiver; import org.codehaus.plexus.logging.console.ConsoleLogger; import org.locationtech.geowave.adapter.raster.util.ZipUtils; import org.slf4j.LoggerFactory; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; public class InstallGdal { private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(InstallGdal.class); public static final File DEFAULT_TEMP_DIR = new File("./target/temp"); private static final String GDAL_ENV = "baseGdalDownload"; // this has some of the content from // http://demo.geo-solutions.it/share/github/imageio-ext/releases/1.1.X/1.1.7/native/gdal // rehosted, with all supplemental files to retain the credit (just to // lessen the burden of additional network traffic imposed on this external // server) private static final String DEFAULT_BASE = "https://s3.amazonaws.com/geowave/third-party-downloads/gdal"; public static void main(final String[] args) throws IOException { File gdalDir = null; if ((args != null) && (args.length > 0) && (args[0] != null) && !args[0].trim().isEmpty()) { gdalDir = new File(args[0]); // HP Fortify "Path Traversal" false positive // What Fortify considers "user input" comes only // from users with OS-level access anyway } else { gdalDir = new File(DEFAULT_TEMP_DIR, "gdal"); } if (gdalDir.exists() && gdalDir.isDirectory()) { final File[] files = gdalDir.listFiles(); if ((files != null) && (files.length > 1)) { System.out.println("GDAL already exists"); return; } else { LOGGER.error( "Directory " + gdalDir.getAbsolutePath() + " exists but does not contain GDAL, consider deleting directory or choosing a different one."); } } if (!gdalDir.mkdirs()) { LOGGER.warn("unable to create directory " + gdalDir.getAbsolutePath()); } install(gdalDir); } @SuppressFBWarnings(value = "REC_CATCH_EXCEPTION") private static void install(final File gdalDir) throws IOException { URL url; String file; String gdalEnv = System.getProperty(GDAL_ENV); if ((gdalEnv == null) || gdalEnv.trim().isEmpty()) { gdalEnv = DEFAULT_BASE; } if (isWindows()) { file = "win-x64-gdal204.zip"; url = new URL(gdalEnv + "/windows/MSVC2017/" + file); } else if (isMac()) { file = "gdal-1.9.2_macOSX.zip"; url = new URL(gdalEnv + "/mac/" + file); } else { file = "linux-libgdal26.tar.gz"; url = new URL(gdalEnv + "/linux/" + file); } final File downloadFile = new File(gdalDir, file); if (downloadFile.exists() && (downloadFile.length() < 1)) { // its corrupt, delete it if (!downloadFile.delete()) { LOGGER.warn( "File '" + downloadFile.getAbsolutePath() + "' is corrupt and cannot be deleted"); } } System.out.println("Downloading GDAL native libraries..."); if (!downloadFile.exists()) { boolean success = false; for (int i = 0; i < 3; i++) { try (FileOutputStream fos = new FileOutputStream(downloadFile)) { final URLConnection connection = url.openConnection(); connection.setConnectTimeout(360_000); connection.setReadTimeout(360_000); IOUtils.copyLarge(connection.getInputStream(), fos); fos.flush(); success = true; break; } catch (final Exception e) { LOGGER.warn("Unable to download url '" + url + "'. Retry attempt #" + i); } } if (!success) { LOGGER.error("Unable to download url '" + url + "' after 3 attempts."); System.exit(-1); } } if (file.endsWith("zip")) { ZipUtils.unZipFile(downloadFile, gdalDir.getAbsolutePath(), false); } else { final TarGZipUnArchiver unarchiver = new TarGZipUnArchiver(); unarchiver.enableLogging( new ConsoleLogger(org.codehaus.plexus.logging.Logger.LEVEL_WARN, "GDAL Unarchive")); unarchiver.setSourceFile(downloadFile); unarchiver.setDestDirectory(gdalDir); unarchiver.extract(); // the symbolic links are not working, programmatically re-create // them final File[] links = gdalDir.listFiles(new FileFilter() { @Override public boolean accept(final File pathname) { return pathname.length() <= 0; } }); if (links != null) { final File[] actualLibs = gdalDir.listFiles(new FileFilter() { @Override public boolean accept(final File pathname) { return pathname.length() > 0; } }); for (final File link : links) { // find an actual lib that matches for (final File lib : actualLibs) { if (lib.getName().startsWith(link.getName())) { if (link.delete()) { Files.createSymbolicLink( link.getAbsoluteFile().toPath(), lib.getAbsoluteFile().toPath()); } break; } } } } } if (!downloadFile.delete()) { LOGGER.warn("cannot delete " + downloadFile.getAbsolutePath()); } System.out.println("GDAL installed in directory " + gdalDir.getAbsolutePath()); } private static boolean isWindows() { final String OS = System.getProperty("os.name", "generic").toLowerCase(Locale.ENGLISH); return (OS.indexOf("win") > -1); } private static boolean isMac() { final String OS = System.getProperty("os.name", "generic").toLowerCase(Locale.ENGLISH); return (OS.indexOf("mac") >= 0); } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/resize/RasterTileResizeCombiner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.resize; import java.io.IOException; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.mapreduce.ReduceContext; import org.apache.hadoop.mapreduce.Reducer; import org.locationtech.geowave.mapreduce.GeoWaveReducer; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.opengis.coverage.grid.GridCoverage; public class RasterTileResizeCombiner extends GeoWaveReducer { private RasterTileResizeHelper helper; @Override protected void reduceNativeValues( final GeoWaveInputKey key, final Iterable values, final ReduceContext context) throws IOException, InterruptedException { final GridCoverage mergedCoverage = helper.getMergedCoverage(key, values); if (mergedCoverage != null) { context.write(key, mergedCoverage); } } @Override protected void setup( final Reducer.Context context) throws IOException, InterruptedException { super.setup(context); helper = new RasterTileResizeHelper(context); } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/resize/RasterTileResizeHelper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.resize; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.util.Iterator; import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.JobContext; import org.locationtech.geowave.adapter.raster.adapter.ClientMergeableRasterTile; import org.locationtech.geowave.adapter.raster.adapter.GridCoverageWritable; import org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter; import org.locationtech.geowave.adapter.raster.adapter.merge.nodata.NoDataMergeStrategy; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.metadata.InternalAdapterStoreImpl; import org.locationtech.geowave.mapreduce.HadoopWritableSerializer; import org.locationtech.geowave.mapreduce.JobContextAdapterStore; import org.locationtech.geowave.mapreduce.JobContextIndexStore; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey; import org.opengis.coverage.grid.GridCoverage; public class RasterTileResizeHelper implements Serializable { private static final long serialVersionUID = 1L; private RasterDataAdapter newAdapter; private short oldAdapterId; private short newAdapterId; private Index index; private String[] indexNames; private HadoopWritableSerializer serializer; public RasterTileResizeHelper(final JobContext context) { index = JobContextIndexStore.getIndices(context)[0]; indexNames = new String[] {index.getName()}; final DataTypeAdapter[] adapters = JobContextAdapterStore.getDataAdapters(context); final Configuration conf = context.getConfiguration(); final String newTypeName = conf.get(RasterTileResizeJobRunner.NEW_TYPE_NAME_KEY); oldAdapterId = (short) conf.getInt(RasterTileResizeJobRunner.OLD_ADAPTER_ID_KEY, -1); newAdapterId = (short) conf.getInt( RasterTileResizeJobRunner.NEW_ADAPTER_ID_KEY, InternalAdapterStoreImpl.getLazyInitialAdapterId(newTypeName)); for (final DataTypeAdapter adapter : adapters) { if (adapter.getTypeName().equals(newTypeName)) { if (((RasterDataAdapter) adapter).getTransform() == null) { // the new adapter doesn't have a merge strategy - resizing // will require merging, so default to NoDataMergeStrategy newAdapter = new RasterDataAdapter( (RasterDataAdapter) adapter, newTypeName, new NoDataMergeStrategy()); } else { newAdapter = (RasterDataAdapter) adapter; } } } } public RasterTileResizeHelper( final short oldAdapterId, final short newAdapterId, final RasterDataAdapter newAdapter, final Index index) { this.newAdapter = newAdapter; this.oldAdapterId = oldAdapterId; this.newAdapterId = newAdapterId; this.index = index; indexNames = new String[] {index.getName()}; } public GeoWaveOutputKey getGeoWaveOutputKey() { return new GeoWaveOutputKey(newAdapter.getTypeName(), indexNames); } public Iterator getCoveragesForIndex(final GridCoverage existingCoverage) { return newAdapter.convertToIndex(index, existingCoverage); } public GridCoverage getMergedCoverage(final GeoWaveInputKey key, final Iterable values) throws IOException, InterruptedException { GridCoverage mergedCoverage = null; ClientMergeableRasterTile mergedTile = null; boolean needsMerge = false; final Iterator it = values.iterator(); while (it.hasNext()) { final Object value = it.next(); if (value instanceof GridCoverage) { if (mergedCoverage == null) { mergedCoverage = (GridCoverage) value; } else { if (!needsMerge) { mergedTile = newAdapter.getRasterTileFromCoverage(mergedCoverage); needsMerge = true; } final ClientMergeableRasterTile thisTile = newAdapter.getRasterTileFromCoverage((GridCoverage) value); if (mergedTile != null) { mergedTile.merge(thisTile); } } } } if (needsMerge) { final Pair pair = key.getPartitionAndSortKey(index); mergedCoverage = newAdapter.getCoverageFromRasterTile( mergedTile, pair == null ? null : pair.getLeft(), pair == null ? null : pair.getRight(), index); } return mergedCoverage; } private void readObject(final ObjectInputStream aInputStream) throws ClassNotFoundException, IOException { final byte[] adapterBytes = new byte[aInputStream.readUnsignedShort()]; aInputStream.readFully(adapterBytes); final byte[] indexBytes = new byte[aInputStream.readUnsignedShort()]; aInputStream.readFully(indexBytes); newAdapter = (RasterDataAdapter) PersistenceUtils.fromBinary(adapterBytes); index = (Index) PersistenceUtils.fromBinary(indexBytes); oldAdapterId = aInputStream.readShort(); newAdapterId = aInputStream.readShort(); indexNames = new String[] {index.getName()}; } private void writeObject(final ObjectOutputStream aOutputStream) throws IOException { final byte[] adapterBytes = PersistenceUtils.toBinary(newAdapter); final byte[] indexBytes = PersistenceUtils.toBinary(index); aOutputStream.writeShort(adapterBytes.length); aOutputStream.write(adapterBytes); aOutputStream.writeShort(indexBytes.length); aOutputStream.write(indexBytes); aOutputStream.writeShort(oldAdapterId); aOutputStream.writeShort(newAdapterId); aOutputStream.flush(); } public HadoopWritableSerializer getSerializer() { if (serializer == null) { serializer = newAdapter.createWritableSerializer(); } return serializer; } public short getNewAdapterId() { return newAdapterId; } public byte[] getNewDataId(final GridCoverage coverage) { return newAdapter.getDataId(coverage); } public String getIndexName() { return index.getName(); } public boolean isOriginalCoverage(final short adapterId) { return oldAdapterId == adapterId; } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/resize/RasterTileResizeJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.resize; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter; import org.locationtech.geowave.adapter.raster.operations.ResizeMRCommand; import org.locationtech.geowave.adapter.raster.operations.options.RasterTileResizeCommandLineOptions; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.cli.parser.CommandLineOperationParams; import org.locationtech.geowave.core.cli.parser.OperationParser; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.operations.MetadataType; import org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase; import org.locationtech.geowave.mapreduce.JobContextAdapterStore; import org.locationtech.geowave.mapreduce.JobContextInternalAdapterStore; import org.locationtech.geowave.mapreduce.input.GeoWaveInputFormat; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputFormat; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey; import org.opengis.coverage.grid.GridCoverage; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class RasterTileResizeJobRunner extends Configured implements Tool { private static final Logger LOGGER = LoggerFactory.getLogger(RasterTileResizeJobRunner.class); public static final String NEW_TYPE_NAME_KEY = "NEW_TYPE_NAME"; public static final String NEW_ADAPTER_ID_KEY = "NEW_ADAPTER_ID"; public static final String OLD_TYPE_NAME_KEY = "OLD_TYPE_NAME"; public static final String OLD_ADAPTER_ID_KEY = "OLD_ADAPTER_ID"; private final DataStorePluginOptions inputStoreOptions; private final DataStorePluginOptions outputStoreOptions; private final String hdfsHostPort; private final String jobTrackerOrResourceManagerHostPort; protected RasterTileResizeCommandLineOptions rasterResizeOptions; public RasterTileResizeJobRunner( final DataStorePluginOptions inputStoreOptions, final DataStorePluginOptions outputStoreOptions, final RasterTileResizeCommandLineOptions rasterResizeOptions, final String hdfsHostPort, final String jobTrackerOrResourceManagerHostPort) { this.inputStoreOptions = inputStoreOptions; this.outputStoreOptions = outputStoreOptions; this.rasterResizeOptions = rasterResizeOptions; this.hdfsHostPort = hdfsHostPort; this.jobTrackerOrResourceManagerHostPort = jobTrackerOrResourceManagerHostPort; } /** Main method to execute the MapReduce analytic. */ public int runJob() throws IOException, InterruptedException, ClassNotFoundException { Configuration conf = super.getConf(); if (conf == null) { conf = new Configuration(); setConf(conf); } GeoWaveConfiguratorBase.setRemoteInvocationParams( hdfsHostPort, jobTrackerOrResourceManagerHostPort, conf); conf.set(OLD_TYPE_NAME_KEY, rasterResizeOptions.getInputCoverageName()); conf.set(NEW_TYPE_NAME_KEY, rasterResizeOptions.getOutputCoverageName()); final Job job = new Job(conf); job.setJarByClass(this.getClass()); job.setJobName( "Converting " + rasterResizeOptions.getInputCoverageName() + " to tile size=" + rasterResizeOptions.getOutputTileSize()); job.setMapperClass(RasterTileResizeMapper.class); job.setCombinerClass(RasterTileResizeCombiner.class); job.setReducerClass(RasterTileResizeReducer.class); job.setInputFormatClass(GeoWaveInputFormat.class); job.setOutputFormatClass(GeoWaveOutputFormat.class); job.setMapOutputKeyClass(GeoWaveInputKey.class); job.setMapOutputValueClass(ObjectWritable.class); job.setOutputKeyClass(GeoWaveOutputKey.class); job.setOutputValueClass(GridCoverage.class); job.setNumReduceTasks(8); GeoWaveInputFormat.setMinimumSplitCount( job.getConfiguration(), rasterResizeOptions.getMinSplits()); GeoWaveInputFormat.setMaximumSplitCount( job.getConfiguration(), rasterResizeOptions.getMaxSplits()); GeoWaveInputFormat.setStoreOptions(job.getConfiguration(), inputStoreOptions); final InternalAdapterStore internalAdapterStore = inputStoreOptions.createInternalAdapterStore(); final short internalAdapterId = internalAdapterStore.getAdapterId(rasterResizeOptions.getInputCoverageName()); final DataTypeAdapter adapter = inputStoreOptions.createAdapterStore().getAdapter(internalAdapterId).getAdapter(); if (adapter == null) { throw new IllegalArgumentException( "Adapter for coverage '" + rasterResizeOptions.getInputCoverageName() + "' does not exist in namespace '" + inputStoreOptions.getGeoWaveNamespace() + "'"); } final RasterDataAdapter newAdapter = new RasterDataAdapter( (RasterDataAdapter) adapter, rasterResizeOptions.getOutputCoverageName(), rasterResizeOptions.getOutputTileSize()); JobContextAdapterStore.addDataAdapter(job.getConfiguration(), adapter); JobContextAdapterStore.addDataAdapter(job.getConfiguration(), newAdapter); Index index = null; final IndexStore indexStore = inputStoreOptions.createIndexStore(); if (rasterResizeOptions.getIndexName() != null) { index = indexStore.getIndex(rasterResizeOptions.getIndexName()); } if (index == null) { try (CloseableIterator indices = indexStore.getIndices()) { index = indices.next(); } if (index == null) { throw new IllegalArgumentException( "Index does not exist in namespace '" + inputStoreOptions.getGeoWaveNamespace() + "'"); } } GeoWaveOutputFormat.setStoreOptions(job.getConfiguration(), outputStoreOptions); GeoWaveOutputFormat.addIndex(job.getConfiguration(), index); final DataStore store = outputStoreOptions.createDataStore(); store.addType(newAdapter, index); final short newInternalAdapterId = outputStoreOptions.createInternalAdapterStore().addTypeName(newAdapter.getTypeName()); // what if the adapter IDs are the same, but the internal IDs are // different (unlikely corner case, but seemingly possible) JobContextInternalAdapterStore.addTypeName( job.getConfiguration(), newAdapter.getTypeName(), newInternalAdapterId); JobContextInternalAdapterStore.addTypeName( job.getConfiguration(), adapter.getTypeName(), internalAdapterId); job.getConfiguration().setInt(OLD_ADAPTER_ID_KEY, internalAdapterId); job.getConfiguration().setInt(NEW_ADAPTER_ID_KEY, newInternalAdapterId); if (outputStoreOptions.getFactoryOptions().getStoreOptions().isPersistDataStatistics()) { try { // this is done primarily to ensure stats merging is enabled // before the // distributed ingest outputStoreOptions.createDataStoreOperations().createMetadataWriter( MetadataType.STATISTIC_VALUES).close(); } catch (final Exception e) { LOGGER.error("Unable to create stats writer", e); } } boolean retVal = false; try { retVal = job.waitForCompletion(true); } catch (final IOException ex) { LOGGER.error("Error waiting for map reduce tile resize job: ", ex); } return retVal ? 0 : 1; } public static void main(final String[] args) throws Exception { final ConfigOptions opts = new ConfigOptions(); final OperationParser parser = new OperationParser(); parser.addAdditionalObject(opts); final ResizeMRCommand command = new ResizeMRCommand(); final CommandLineOperationParams params = parser.parse(command, args); opts.prepare(params); final int res = ToolRunner.run(new Configuration(), command.createRunner(params), args); System.exit(res); } @Override public int run(final String[] args) throws Exception { // parse args to find command line etc... return runJob(); } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/resize/RasterTileResizeMapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.resize; import java.io.IOException; import java.util.Iterator; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.mapreduce.MapContext; import org.apache.hadoop.mapreduce.Mapper; import org.locationtech.geowave.adapter.raster.FitToIndexGridCoverage; import org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.entities.GeoWaveKey; import org.locationtech.geowave.core.store.entities.GeoWaveKeyImpl; import org.locationtech.geowave.mapreduce.GeoWaveWritableOutputMapper; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.opengis.coverage.grid.GridCoverage; public class RasterTileResizeMapper extends GeoWaveWritableOutputMapper { private RasterTileResizeHelper helper; @Override protected void mapNativeValue( final GeoWaveInputKey key, final GridCoverage value, final MapContext context) throws IOException, InterruptedException { if (helper.isOriginalCoverage(key.getInternalAdapterId())) { final InternalDataAdapter adapter = super.serializationTool.getInternalAdapter(key.getInternalAdapterId()); if ((adapter != null) && (adapter.getAdapter() != null) && (adapter.getAdapter() instanceof RasterDataAdapter)) { final Iterator coverages = helper.getCoveragesForIndex(value); if (coverages == null) { LOGGER.error("Couldn't get coverages instance, getCoveragesForIndex returned null"); throw new IOException( "Couldn't get coverages instance, getCoveragesForIndex returned null"); } while (coverages.hasNext()) { final GridCoverage c = coverages.next(); // it should be a FitToIndexGridCoverage because it was just // converted above if (c instanceof FitToIndexGridCoverage) { final byte[] partitionKey = ((FitToIndexGridCoverage) c).getPartitionKey(); final byte[] sortKey = ((FitToIndexGridCoverage) c).getSortKey(); final GeoWaveKey geowaveKey = new GeoWaveKeyImpl( helper.getNewDataId(c), key.getInternalAdapterId(), partitionKey, sortKey, 0); final GeoWaveInputKey inputKey = new GeoWaveInputKey(helper.getNewAdapterId(), geowaveKey, helper.getIndexName()); context.write(inputKey, c); } } } } } @Override protected void setup( final Mapper.Context context) throws IOException, InterruptedException { super.setup(context); helper = new RasterTileResizeHelper(context); } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/resize/RasterTileResizeReducer.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.resize; import java.io.IOException; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.mapreduce.Reducer; import org.locationtech.geowave.mapreduce.GeoWaveWritableInputReducer; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.locationtech.geowave.mapreduce.output.GeoWaveOutputKey; import org.opengis.coverage.grid.GridCoverage; public class RasterTileResizeReducer extends GeoWaveWritableInputReducer { private RasterTileResizeHelper helper; @Override protected void reduceNativeValues( final GeoWaveInputKey key, final Iterable values, final Reducer.Context context) throws IOException, InterruptedException { final GridCoverage mergedCoverage = helper.getMergedCoverage(key, values); if (mergedCoverage != null) { context.write(helper.getGeoWaveOutputKey(), mergedCoverage); } } @Override protected void setup( final Reducer.Context context) throws IOException, InterruptedException { super.setup(context); helper = new RasterTileResizeHelper(context); } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/stats/HistogramConfig.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.stats; import java.awt.image.SampleModel; import java.nio.ByteBuffer; import org.geotools.coverage.TypeMap; import org.geotools.util.NumberRange; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.Persistable; public class HistogramConfig implements Persistable { private static final int MAX_DEFAULT_NUM_BINS = 65536; private double[] highValues; private double[] lowValues; private int[] numBins; public HistogramConfig() {} public HistogramConfig(final SampleModel sampleModel) { final int numBands = sampleModel.getNumBands(); highValues = new double[numBands]; lowValues = new double[numBands]; numBins = new int[numBands]; for (int b = 0; b < numBands; b++) { final NumberRange range = TypeMap.getRange(TypeMap.getSampleDimensionType(sampleModel, b)); int bins; double min = range.getMinimum(true); double max = range.getMaximum(true); if (Double.isInfinite(min) || Double.isInfinite(max) || Double.isNaN(min) || Double.isNaN(max)) { // in this case there is no reasonable default, just use a range // of 0 to 1 as a placeholder min = 0; max = 1; bins = MAX_DEFAULT_NUM_BINS; } else { bins = (int) Math.min(MAX_DEFAULT_NUM_BINS, (max - min) + 1); } lowValues[b] = min; highValues[b] = max; numBins[b] = bins; } } public HistogramConfig(final double[] highValues, final double[] lowValues, final int[] numBins) { this.highValues = highValues; this.lowValues = lowValues; this.numBins = numBins; } @Override public byte[] toBinary() { int byteLength = 0; for (int b = 0; b < highValues.length; b++) { byteLength += 16 + VarintUtils.unsignedIntByteLength(numBins[b]); } byteLength += VarintUtils.unsignedIntByteLength(highValues.length); // constant number of bands, 8 + 8 + 4 bytes per band (high,low, and // numBins), and 4 more for the total bands final ByteBuffer buf = ByteBuffer.allocate(byteLength); VarintUtils.writeUnsignedInt(highValues.length, buf); for (int b = 0; b < highValues.length; b++) { buf.putDouble(lowValues[b]); buf.putDouble(highValues[b]); VarintUtils.writeUnsignedInt(numBins[b], buf); } return buf.array(); } public double[] getHighValues() { return highValues; } public double[] getLowValues() { return lowValues; } public int[] getNumBins() { return numBins; } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int numBands = VarintUtils.readUnsignedInt(buf); highValues = new double[numBands]; lowValues = new double[numBands]; numBins = new int[numBands]; for (int b = 0; b < numBands; b++) { lowValues[b] = buf.getDouble(); highValues[b] = buf.getDouble(); numBins[b] = VarintUtils.readUnsignedInt(buf); } } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/stats/RasterBoundingBoxStatistic.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.stats; import org.geotools.geometry.GeneralEnvelope; import org.locationtech.geowave.adapter.raster.FitToIndexGridCoverage; import org.locationtech.geowave.core.geotime.store.statistics.AbstractBoundingBoxValue; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.DataTypeStatistic; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.statistics.adapter.DataTypeStatisticType; import org.locationtech.jts.geom.Envelope; import org.opengis.coverage.grid.GridCoverage; public class RasterBoundingBoxStatistic extends DataTypeStatistic { public static final DataTypeStatisticType STATS_TYPE = new DataTypeStatisticType<>("RASTER_BOUNDING_BOX"); public RasterBoundingBoxStatistic() { super(STATS_TYPE); } public RasterBoundingBoxStatistic(final String typeName) { super(STATS_TYPE, typeName); } @Override public String getDescription() { return "Maintains a bounding box for a raster data set."; } @Override public boolean isCompatibleWith(final Class adapterClass) { return GridCoverage.class.isAssignableFrom(adapterClass); } @Override public RasterBoundingBoxValue createEmpty() { return new RasterBoundingBoxValue(this); } public static class RasterBoundingBoxValue extends AbstractBoundingBoxValue { public RasterBoundingBoxValue() { this(null); } public RasterBoundingBoxValue(final Statistic statistic) { super(statistic); } @Override public Envelope getEnvelope( final DataTypeAdapter adapter, final T entry, final GeoWaveRow... rows) { if (entry instanceof GridCoverage) { final org.opengis.geometry.Envelope indexedEnvelope = ((GridCoverage) entry).getEnvelope(); final org.opengis.geometry.Envelope originalEnvelope; if (entry instanceof FitToIndexGridCoverage) { originalEnvelope = ((FitToIndexGridCoverage) entry).getOriginalEnvelope(); } else { originalEnvelope = null; } // we don't want to accumulate the envelope outside of the original if // it is fit to the index, so compute the intersection with the original // envelope final org.opengis.geometry.Envelope resultingEnvelope = getIntersection(originalEnvelope, indexedEnvelope); if (resultingEnvelope != null) { return new Envelope( resultingEnvelope.getMinimum(0), resultingEnvelope.getMaximum(0), resultingEnvelope.getMinimum(1), resultingEnvelope.getMaximum(1)); } } return null; } } private static org.opengis.geometry.Envelope getIntersection( final org.opengis.geometry.Envelope originalEnvelope, final org.opengis.geometry.Envelope indexedEnvelope) { if (originalEnvelope == null) { return indexedEnvelope; } if (indexedEnvelope == null) { return originalEnvelope; } final int dimensions = originalEnvelope.getDimension(); final double[] minDP = new double[dimensions]; final double[] maxDP = new double[dimensions]; for (int d = 0; d < dimensions; d++) { // to perform the intersection of the original envelope and the // indexed envelope, use the max of the mins per dimension and the // min of the maxes minDP[d] = Math.max(originalEnvelope.getMinimum(d), indexedEnvelope.getMinimum(d)); maxDP[d] = Math.min(originalEnvelope.getMaximum(d), indexedEnvelope.getMaximum(d)); } return new GeneralEnvelope(minDP, maxDP); } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/stats/RasterFootprintStatistic.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.stats; import java.nio.ByteBuffer; import org.locationtech.geowave.adapter.raster.FitToIndexGridCoverage; import org.locationtech.geowave.adapter.raster.RasterUtils; import org.locationtech.geowave.core.geotime.util.TWKBReader; import org.locationtech.geowave.core.geotime.util.TWKBWriter; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.DataTypeStatistic; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback; import org.locationtech.geowave.core.store.statistics.adapter.DataTypeStatisticType; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.io.ParseException; import org.opengis.coverage.grid.GridCoverage; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class RasterFootprintStatistic extends DataTypeStatistic { private static final Logger LOGGER = LoggerFactory.getLogger(RasterFootprintStatistic.class); public static final DataTypeStatisticType STATS_TYPE = new DataTypeStatisticType<>("RASTER_FOOTPRINT"); public RasterFootprintStatistic() { super(STATS_TYPE); } public RasterFootprintStatistic(final String typeName) { super(STATS_TYPE, typeName); } @Override public boolean isCompatibleWith(final Class adapterClass) { return GridCoverage.class.isAssignableFrom(adapterClass); } @Override public String getDescription() { return "Maintains a footprint that encompasses all of the raster data."; } @Override public RasterFootprintValue createEmpty() { return new RasterFootprintValue(this); } public static class RasterFootprintValue extends StatisticValue implements StatisticsIngestCallback { public RasterFootprintValue() { this(null); } public RasterFootprintValue(final Statistic statistic) { super(statistic); } private Geometry footprint = null; @Override public void merge(Mergeable merge) { if (merge instanceof RasterFootprintValue) { footprint = RasterUtils.combineIntoOneGeometry(footprint, ((RasterFootprintValue) merge).footprint); } } @Override public void entryIngested(DataTypeAdapter adapter, T entry, GeoWaveRow... rows) { if (entry instanceof FitToIndexGridCoverage) { footprint = RasterUtils.combineIntoOneGeometry( footprint, ((FitToIndexGridCoverage) entry).getFootprintWorldGeometry()); } } @Override public Geometry getValue() { return footprint; } @Override public byte[] toBinary() { byte[] bytes = null; if (footprint == null) { bytes = new byte[] {}; } else { bytes = new TWKBWriter().write(footprint); } final ByteBuffer buf = ByteBuffer.allocate(bytes.length); buf.put(bytes); return buf.array(); } @Override public void fromBinary(byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final byte[] payload = buf.array(); if (payload.length > 0) { try { footprint = new TWKBReader().read(payload); } catch (final ParseException e) { LOGGER.warn("Unable to parse WKB", e); } } else { footprint = null; } } } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/stats/RasterHistogramStatistic.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.stats; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.geotools.coverage.grid.GridCoverage2D; import org.geotools.coverage.processing.AbstractOperation; import org.geotools.coverage.processing.BaseStatisticsOperationJAI; import org.geotools.coverage.processing.CoverageProcessor; import org.geotools.coverage.processing.operation.Histogram; import org.locationtech.geowave.adapter.raster.FitToIndexGridCoverage; import org.locationtech.geowave.adapter.raster.RasterUtils; import org.locationtech.geowave.adapter.raster.Resolution; import org.locationtech.geowave.adapter.raster.plugin.GeoWaveGTRasterFormat; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.DataTypeStatistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback; import org.locationtech.geowave.core.store.statistics.adapter.DataTypeStatisticType; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryCollection; import org.locationtech.jts.geom.Polygon; import org.opengis.coverage.grid.GridCoverage; import org.opengis.parameter.ParameterValueGroup; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class RasterHistogramStatistic extends DataTypeStatistic { private static final Logger LOGGER = LoggerFactory.getLogger(RasterHistogramStatistic.class); public static final DataTypeStatisticType STATS_TYPE = new DataTypeStatisticType<>("RASTER_HISTOGRAM"); private HistogramConfig histogramConfig; public RasterHistogramStatistic() { super(STATS_TYPE); } public RasterHistogramStatistic(final String typeName) { super(STATS_TYPE, typeName); } public RasterHistogramStatistic(final String typeName, final HistogramConfig histogramConfig) { super(STATS_TYPE, typeName); this.histogramConfig = histogramConfig; } @Override public boolean isCompatibleWith(final Class adapterClass) { return GridCoverage.class.isAssignableFrom(adapterClass); } @Override public String getDescription() { return "Histogram for raster data."; } @Override public RasterHistogramValue createEmpty() { return new RasterHistogramValue(this); } // Cache this so we don't have to serialize the histogram multiple times private byte[] histogramConfigBytes = null; @Override protected int byteLength() { if (histogramConfigBytes == null) { histogramConfigBytes = PersistenceUtils.toBinary(histogramConfig); } return super.byteLength() + histogramConfigBytes.length + VarintUtils.unsignedIntByteLength(histogramConfigBytes.length); } @Override protected void writeBytes(ByteBuffer buffer) { super.writeBytes(buffer); VarintUtils.writeUnsignedInt(histogramConfigBytes.length, buffer); buffer.put(histogramConfigBytes); histogramConfigBytes = null; } @Override protected void readBytes(ByteBuffer buffer) { super.readBytes(buffer); final byte[] configBinary = ByteArrayUtils.safeRead(buffer, VarintUtils.readUnsignedInt(buffer)); histogramConfig = (HistogramConfig) PersistenceUtils.fromBinary(configBinary); } public static class RasterHistogramValue extends StatisticValue> implements StatisticsIngestCallback { private final Map histograms = new HashMap<>(); private HistogramConfig histogramConfig; public RasterHistogramValue() { super(null); this.histogramConfig = null; } private RasterHistogramValue(final RasterHistogramStatistic statistic) { super(statistic); this.histogramConfig = statistic.histogramConfig; } public Set getResolutions() { return histograms.keySet(); } public javax.media.jai.Histogram getHistogram(final Resolution resolution) { return histograms.get(resolution); } @Override public void merge(Mergeable merge) { if ((merge != null) && (merge instanceof RasterHistogramValue)) { final Set resolutions = new HashSet<>(getResolutions()); resolutions.addAll(((RasterHistogramValue) merge).getResolutions()); for (final Resolution res : resolutions) { final javax.media.jai.Histogram otherHistogram = ((RasterHistogramValue) merge).getHistogram(res); final javax.media.jai.Histogram thisHistogram = getHistogram(res); if (otherHistogram != null) { javax.media.jai.Histogram mergedHistogram; if (thisHistogram != null) { mergedHistogram = mergeHistograms(thisHistogram, otherHistogram); } else { mergedHistogram = otherHistogram; } synchronized (this) { histograms.put(res, mergedHistogram); } } } } } @Override public void entryIngested(DataTypeAdapter adapter, T entry, GeoWaveRow... rows) { /* * Create the operation for the Histogram with a ROI. No subsampling should be applied. */ final Geometry footprint; if (entry instanceof FitToIndexGridCoverage) { footprint = ((FitToIndexGridCoverage) entry).getFootprintWorldGeometry(); if (footprint == null) { return; } } else { // this is a condition that isn't going to be exercised typically in // any code, but at this point we will assume default CRS footprint = RasterUtils.getFootprint((GridCoverage) entry, GeoWaveGTRasterFormat.DEFAULT_CRS); } final GridCoverage originalCoverage; Resolution resolution = null; if (entry instanceof FitToIndexGridCoverage) { originalCoverage = ((FitToIndexGridCoverage) entry).getOriginalCoverage(); resolution = ((FitToIndexGridCoverage) entry).getResolution(); } else { originalCoverage = (GridCoverage) entry; } if (footprint instanceof GeometryCollection) { final GeometryCollection collection = (GeometryCollection) footprint; for (int g = 0; g < collection.getNumGeometries(); g++) { final Geometry geom = collection.getGeometryN(g); if (geom instanceof Polygon) { mergePoly(originalCoverage, (Polygon) geom, resolution); } } } else if (footprint instanceof Polygon) { mergePoly(originalCoverage, (Polygon) footprint, resolution); } } @Override public Map getValue() { return histograms; } private void mergePoly( final GridCoverage originalCoverage, final Polygon poly, final Resolution resolution) { final CoverageProcessor processor = CoverageProcessor.getInstance(); final AbstractOperation op = (AbstractOperation) processor.getOperation("Histogram"); final ParameterValueGroup params = op.getParameters(); params.parameter("Source").setValue(originalCoverage); params.parameter(BaseStatisticsOperationJAI.ROI.getName().getCode()).setValue(poly); params.parameter("lowValue").setValue(histogramConfig.getLowValues()); params.parameter("highValue").setValue(histogramConfig.getHighValues()); params.parameter("numBins").setValue(histogramConfig.getNumBins()); try { final GridCoverage2D coverage = (GridCoverage2D) op.doOperation(params, null); final javax.media.jai.Histogram histogram = (javax.media.jai.Histogram) coverage.getProperty( Histogram.GT_SYNTHETIC_PROPERTY_HISTOGRAM); javax.media.jai.Histogram mergedHistogram; final javax.media.jai.Histogram resolutionHistogram = histograms.get(resolution); if (resolutionHistogram != null) { mergedHistogram = mergeHistograms(resolutionHistogram, histogram); } else { mergedHistogram = histogram; } synchronized (this) { histograms.put(resolution, mergedHistogram); } } catch (final Exception e) { // this is simply 'info' because there is a known issue in the // histogram op when the ROI is so small that the resulting cropped // pixel size is 0 LOGGER.info( "This is often a non-issue relating to applying an ROI calculation that results in 0 pixels (the error is in calculating stats).", e); } } private static javax.media.jai.Histogram mergeHistograms( final javax.media.jai.Histogram histogram1, final javax.media.jai.Histogram histogram2) { final int numBands = Math.min(histogram1.getNumBands(), histogram2.getNumBands()); final double[] lowValue1 = histogram1.getLowValue(); final double[] lowValue2 = histogram2.getLowValue(); final double[] lowValue = new double[numBands]; for (int b = 0; b < numBands; b++) { lowValue[b] = Math.min(lowValue1[b], lowValue2[b]); } final double[] highValue1 = histogram1.getHighValue(); final double[] highValue2 = histogram2.getHighValue(); final double[] highValue = new double[numBands]; for (int b = 0; b < numBands; b++) { highValue[b] = Math.max(highValue1[b], highValue2[b]); } final int[][] bins1 = histogram1.getBins(); final int[][] bins2 = histogram2.getBins(); final int[] numBins = new int[numBands]; for (int b = 0; b < numBands; b++) { numBins[b] = Math.min(bins1[b].length, bins2[b].length); } final javax.media.jai.Histogram histogram = new javax.media.jai.Histogram(numBins, lowValue, highValue); for (int b = 0; b < numBands; b++) { // this is a bit of a hack, but the only way to interact with the // counts in a mutable way is by getting an array of the bin counts // and setting values in the array final int[] bins = histogram.getBins(b); for (int i = 0; i < bins.length; i++) { bins[i] = bins1[b][i] + bins2[b][i]; } } return histogram; } @Override public byte[] toBinary() { final List perEntryBinary = new ArrayList<>(); int totalBytes = 0; for (final Entry entry : histograms.entrySet()) { final ByteArrayOutputStream baos = new ByteArrayOutputStream(); byte[] keyBytes; byte[] valueBytes = new byte[] {}; if (entry.getKey() != null) { keyBytes = PersistenceUtils.toBinary(entry.getKey()); } else { keyBytes = new byte[] {}; } if (entry.getValue() != null) { ObjectOutputStream oos; try { oos = new ObjectOutputStream(baos); oos.writeObject(entry.getValue()); oos.close(); baos.close(); valueBytes = baos.toByteArray(); } catch (final IOException e) { LOGGER.warn("Unable to write histogram", e); } } // 8 for key and value lengths as ints final int entryBytes = VarintUtils.unsignedIntByteLength(keyBytes.length) + VarintUtils.unsignedIntByteLength(valueBytes.length) + keyBytes.length + valueBytes.length; final ByteBuffer buf = ByteBuffer.allocate(entryBytes); VarintUtils.writeUnsignedInt(keyBytes.length, buf); buf.put(keyBytes); VarintUtils.writeUnsignedInt(valueBytes.length, buf); buf.put(valueBytes); perEntryBinary.add(buf.array()); totalBytes += entryBytes; } totalBytes += VarintUtils.unsignedIntByteLength(perEntryBinary.size()); final ByteBuffer buf = ByteBuffer.allocate(totalBytes); VarintUtils.writeUnsignedInt(perEntryBinary.size(), buf); for (final byte[] entryBinary : perEntryBinary) { buf.put(entryBinary); } return buf.array(); } @Override public void fromBinary(byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int numEntries = VarintUtils.readUnsignedInt(buf); ByteArrayUtils.verifyBufferSize(buf, numEntries); for (int i = 0; i < numEntries; i++) { final int keyLength = VarintUtils.readUnsignedInt(buf); Resolution key = null; if (keyLength > 0) { final byte[] keyBytes = ByteArrayUtils.safeRead(buf, keyLength); key = (Resolution) PersistenceUtils.fromBinary(keyBytes); } final int valueLength = VarintUtils.readUnsignedInt(buf); javax.media.jai.Histogram histogram = null; if (valueLength > 0) { final byte[] valueBytes = ByteArrayUtils.safeRead(buf, valueLength); ObjectInputStream ois; try { ois = new ObjectInputStream(new ByteArrayInputStream(valueBytes)); histogram = (javax.media.jai.Histogram) ois.readObject(); } catch (IOException | ClassNotFoundException e) { LOGGER.warn("Unable to read histogram", e); } } histograms.put(key, histogram); } } } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/stats/RasterOverviewStatistic.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.stats; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.TreeSet; import org.apache.commons.lang3.ArrayUtils; import org.locationtech.geowave.adapter.raster.FitToIndexGridCoverage; import org.locationtech.geowave.adapter.raster.Resolution; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.DataTypeStatistic; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.entities.GeoWaveRow; import org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback; import org.locationtech.geowave.core.store.statistics.adapter.DataTypeStatisticType; import org.opengis.coverage.grid.GridCoverage; public class RasterOverviewStatistic extends DataTypeStatistic { public static final DataTypeStatisticType STATS_TYPE = new DataTypeStatisticType<>("RASTER_OVERVIEW"); public RasterOverviewStatistic() { super(STATS_TYPE); } public RasterOverviewStatistic(final String typeName) { super(STATS_TYPE, typeName); } @Override public boolean isCompatibleWith(final Class adapterClass) { return GridCoverage.class.isAssignableFrom(adapterClass); } @Override public String getDescription() { return "Provides an overview of the resolutions of a raster dataset."; } @Override public RasterOverviewValue createEmpty() { return new RasterOverviewValue(this); } public static class RasterOverviewValue extends StatisticValue implements StatisticsIngestCallback { private Resolution[] resolutions = new Resolution[] {}; public RasterOverviewValue() { this(null); } public RasterOverviewValue(final Statistic statistic) { super(statistic); } public boolean removeResolution(Resolution res) { synchronized (this) { int index = -1; for (int i = 0; i < resolutions.length; i++) { if (Arrays.equals( resolutions[i].getResolutionPerDimension(), res.getResolutionPerDimension())) { index = i; break; } } if (index >= 0) { resolutions = ArrayUtils.remove(resolutions, index); return true; } return false; } } @Override public void merge(Mergeable merge) { if (merge instanceof RasterOverviewValue) { synchronized (this) { resolutions = incorporateResolutions(resolutions, ((RasterOverviewValue) merge).getValue()); } } } @Override public void entryIngested(DataTypeAdapter adapter, T entry, GeoWaveRow... rows) { if (entry instanceof FitToIndexGridCoverage) { final FitToIndexGridCoverage fitEntry = (FitToIndexGridCoverage) entry; synchronized (this) { resolutions = incorporateResolutions(resolutions, new Resolution[] {fitEntry.getResolution()}); } } } @Override public Resolution[] getValue() { synchronized (this) { return resolutions; } } @Override public byte[] toBinary() { synchronized (this) { final List resolutionBinaries = new ArrayList<>(resolutions.length); int byteCount = 0; // an int for the list size for (final Resolution res : resolutions) { final byte[] resBinary = PersistenceUtils.toBinary(res); resolutionBinaries.add(resBinary); byteCount += (resBinary.length + VarintUtils.unsignedIntByteLength(resBinary.length)); // an // int // for // the // binary // size } byteCount += VarintUtils.unsignedIntByteLength(resolutionBinaries.size()); final ByteBuffer buf = ByteBuffer.allocate(byteCount); VarintUtils.writeUnsignedInt(resolutionBinaries.size(), buf); for (final byte[] resBinary : resolutionBinaries) { VarintUtils.writeUnsignedInt(resBinary.length, buf); buf.put(resBinary); } return buf.array(); } } @Override public void fromBinary(byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int resLength = VarintUtils.readUnsignedInt(buf); synchronized (this) { resolutions = new Resolution[resLength]; for (int i = 0; i < resolutions.length; i++) { final byte[] resBytes = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); resolutions[i] = (Resolution) PersistenceUtils.fromBinary(resBytes); } } } } private static Resolution[] incorporateResolutions( final Resolution[] res1, final Resolution[] res2) { final TreeSet resolutionSet = new TreeSet<>(); for (final Resolution res : res1) { resolutionSet.add(res); } for (final Resolution res : res2) { resolutionSet.add(res); } final Resolution[] combinedRes = new Resolution[resolutionSet.size()]; int i = 0; for (final Resolution res : resolutionSet) { combinedRes[i++] = res; } return combinedRes; } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/stats/RasterRegisteredStatistics.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.stats; import org.locationtech.geowave.adapter.raster.stats.RasterBoundingBoxStatistic.RasterBoundingBoxValue; import org.locationtech.geowave.adapter.raster.stats.RasterFootprintStatistic.RasterFootprintValue; import org.locationtech.geowave.adapter.raster.stats.RasterHistogramStatistic.RasterHistogramValue; import org.locationtech.geowave.adapter.raster.stats.RasterOverviewStatistic.RasterOverviewValue; import org.locationtech.geowave.core.store.statistics.StatisticsRegistrySPI; public class RasterRegisteredStatistics implements StatisticsRegistrySPI { @Override public RegisteredStatistic[] getRegisteredStatistics() { return new RegisteredStatistic[] { // Adapter Statistics new RegisteredStatistic( RasterBoundingBoxStatistic.STATS_TYPE, RasterBoundingBoxStatistic::new, RasterBoundingBoxValue::new, (short) 2300, (short) 2301), new RegisteredStatistic( RasterFootprintStatistic.STATS_TYPE, RasterFootprintStatistic::new, RasterFootprintValue::new, (short) 2302, (short) 2303), new RegisteredStatistic( RasterHistogramStatistic.STATS_TYPE, RasterHistogramStatistic::new, RasterHistogramValue::new, (short) 2304, (short) 2305), new RegisteredStatistic( RasterOverviewStatistic.STATS_TYPE, RasterOverviewStatistic::new, RasterOverviewValue::new, (short) 2306, (short) 2307)}; } @Override public RegisteredBinningStrategy[] getRegisteredBinningStrategies() { return new RegisteredBinningStrategy[] {}; } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/stats/RasterStatisticQueryBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.stats; import java.util.Map; import javax.media.jai.Histogram; import org.locationtech.geowave.adapter.raster.Resolution; import org.locationtech.geowave.adapter.raster.stats.RasterBoundingBoxStatistic.RasterBoundingBoxValue; import org.locationtech.geowave.adapter.raster.stats.RasterFootprintStatistic.RasterFootprintValue; import org.locationtech.geowave.adapter.raster.stats.RasterHistogramStatistic.RasterHistogramValue; import org.locationtech.geowave.adapter.raster.stats.RasterOverviewStatistic.RasterOverviewValue; import org.locationtech.geowave.core.store.api.StatisticQueryBuilder; import org.locationtech.geowave.core.store.statistics.query.DataTypeStatisticQueryBuilder; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; public interface RasterStatisticQueryBuilder { /** * Create a new data type statistic query builder for a raster bounding box statistic. * * @return the data type statistic query builder */ static DataTypeStatisticQueryBuilder bbox() { return StatisticQueryBuilder.newBuilder(RasterBoundingBoxStatistic.STATS_TYPE); } /** * Create a new data type statistic query builder for a raster footprint statistic. * * @return the data type statistic query builder */ static DataTypeStatisticQueryBuilder footprint() { return StatisticQueryBuilder.newBuilder(RasterFootprintStatistic.STATS_TYPE); } /** * Create a new data type statistic query builder for a raster histogram statistic. * * @return the data type statistic query builder */ static DataTypeStatisticQueryBuilder> histogram() { return StatisticQueryBuilder.newBuilder(RasterHistogramStatistic.STATS_TYPE); } /** * Create a new data type statistic query builder for a raster overview statistic. * * @return the data type statistic query builder */ static DataTypeStatisticQueryBuilder overview() { return StatisticQueryBuilder.newBuilder(RasterOverviewStatistic.STATS_TYPE); } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/util/DataBufferPersistenceUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.util; import java.awt.image.DataBuffer; import java.awt.image.DataBufferByte; import java.awt.image.DataBufferDouble; import java.awt.image.DataBufferFloat; import java.awt.image.DataBufferInt; import java.awt.image.DataBufferShort; import java.awt.image.DataBufferUShort; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; import org.apache.commons.lang3.ArrayUtils; import org.locationtech.geowave.adapter.raster.protobuf.DataBufferProtos; import org.locationtech.geowave.adapter.raster.protobuf.DataBufferProtos.ByteDataBuffer; import org.locationtech.geowave.adapter.raster.protobuf.DataBufferProtos.DoubleArray; import org.locationtech.geowave.adapter.raster.protobuf.DataBufferProtos.DoubleDataBuffer; import org.locationtech.geowave.adapter.raster.protobuf.DataBufferProtos.FloatArray; import org.locationtech.geowave.adapter.raster.protobuf.DataBufferProtos.FloatDataBuffer; import org.locationtech.geowave.adapter.raster.protobuf.DataBufferProtos.SignedIntArray; import org.locationtech.geowave.adapter.raster.protobuf.DataBufferProtos.SignedIntDataBuffer; import com.google.common.primitives.Doubles; import com.google.common.primitives.Floats; import com.google.common.primitives.Ints; import com.google.protobuf.ByteString; import me.lemire.integercompression.differential.IntegratedIntCompressor; public class DataBufferPersistenceUtils { public static byte[] getDataBufferBinary(final DataBuffer dataBuffer) { final DataBufferProtos.DataBuffer.Builder bldr = DataBufferProtos.DataBuffer.newBuilder(); bldr.setType(dataBuffer.getDataType()); bldr.addAllOffsets(Ints.asList(dataBuffer.getOffsets())); bldr.setSize(dataBuffer.getSize()); switch (dataBuffer.getDataType()) { case DataBuffer.TYPE_BYTE: final ByteDataBuffer.Builder byteBldr = ByteDataBuffer.newBuilder(); final byte[][] byteBank = ((DataBufferByte) dataBuffer).getBankData(); final Iterable byteIt = () -> new Iterator() { private int index = 0; @Override public boolean hasNext() { return byteBank.length > index; } @Override public ByteString next() { if (!hasNext()) { throw new NoSuchElementException(); } return ByteString.copyFrom(byteBank[index++]); } }; byteBldr.addAllBanks(byteIt); bldr.setByteDb(byteBldr.build()); break; case DataBuffer.TYPE_SHORT: setBuilder(shortToInt(((DataBufferShort) dataBuffer).getBankData()), bldr); break; case DataBuffer.TYPE_USHORT: setBuilder(shortToInt(((DataBufferUShort) dataBuffer).getBankData()), bldr); break; case DataBuffer.TYPE_INT: setBuilder(((DataBufferInt) dataBuffer).getBankData(), bldr); break; case DataBuffer.TYPE_FLOAT: final FloatDataBuffer.Builder fltBldr = FloatDataBuffer.newBuilder(); final float[][] fltBank = ((DataBufferFloat) dataBuffer).getBankData(); final Iterable floatIt = () -> new Iterator() { private int index = 0; @Override public boolean hasNext() { return fltBank.length > index; } @Override public FloatArray next() { return FloatArray.newBuilder().addAllSamples(Floats.asList(fltBank[index++])).build(); } }; fltBldr.addAllBanks(floatIt); bldr.setFlt(fltBldr); break; case DataBuffer.TYPE_DOUBLE: final DoubleDataBuffer.Builder dblBldr = DoubleDataBuffer.newBuilder(); final double[][] dblBank = ((DataBufferDouble) dataBuffer).getBankData(); final Iterable dblIt = () -> new Iterator() { private int index = 0; @Override public boolean hasNext() { return dblBank.length > index; } @Override public DoubleArray next() { return DoubleArray.newBuilder().addAllSamples(Doubles.asList(dblBank[index++])).build(); } }; dblBldr.addAllBanks(dblIt); bldr.setDbl(dblBldr); break; default: throw new RuntimeException( "Unsupported DataBuffer type for serialization " + dataBuffer.getDataType()); } return bldr.build().toByteArray(); } private static void setBuilder( final int[][] intBank, final DataBufferProtos.DataBuffer.Builder bldr) { final IntegratedIntCompressor iic = new IntegratedIntCompressor(); final SignedIntDataBuffer.Builder intBldr = SignedIntDataBuffer.newBuilder(); final Iterable intIt = () -> new Iterator() { private int index = 0; @Override public boolean hasNext() { return intBank.length > index; } @Override public SignedIntArray next() { final int[] internalArray = intBank[index++]; final int[] compressed = iic.compress(internalArray); return SignedIntArray.newBuilder().addAllSamples(Ints.asList(compressed)).build(); } }; intBldr.addAllBanks(intIt); bldr.setSint(intBldr); } public static DataBuffer getDataBuffer(final byte[] binary) throws IOException, ClassNotFoundException { // // Read serialized form from the stream. final DataBufferProtos.DataBuffer buffer = DataBufferProtos.DataBuffer.parseFrom(binary); final int[] offsets = ArrayUtils.toPrimitive(buffer.getOffsetsList().toArray(new Integer[] {})); // Restore the transient DataBuffer. switch (buffer.getType()) { case DataBuffer.TYPE_BYTE: return new DataBufferByte( listToByte(buffer.getByteDb().getBanksList()), buffer.getSize(), offsets); case DataBuffer.TYPE_SHORT: return new DataBufferShort( intToShort(listToInt(buffer.getSint().getBanksList())), buffer.getSize(), offsets); case DataBuffer.TYPE_USHORT: return new DataBufferUShort( intToShort(listToInt(buffer.getSint().getBanksList())), buffer.getSize(), offsets); case DataBuffer.TYPE_INT: return new DataBufferInt( listToInt(buffer.getSint().getBanksList()), buffer.getSize(), offsets); case DataBuffer.TYPE_FLOAT: return new DataBufferFloat( listToFloat(buffer.getFlt().getBanksList()), buffer.getSize(), offsets); case DataBuffer.TYPE_DOUBLE: return new DataBufferDouble( listToDouble(buffer.getDbl().getBanksList()), buffer.getSize(), offsets); default: throw new RuntimeException( "Unsupported data buffer type for deserialization" + buffer.getType()); } } private static byte[][] listToByte(final List list) { final byte[][] retVal = new byte[list.size()][]; for (int i = 0; i < list.size(); i++) { retVal[i] = list.get(i).toByteArray(); } return retVal; } private static float[][] listToFloat(final List list) { final float[][] retVal = new float[list.size()][]; for (int i = 0; i < list.size(); i++) { final List internalList = list.get(i).getSamplesList(); retVal[i] = ArrayUtils.toPrimitive(internalList.toArray(new Float[internalList.size()])); } return retVal; } private static double[][] listToDouble(final List list) { final double[][] retVal = new double[list.size()][]; for (int i = 0; i < list.size(); i++) { final List internalList = list.get(i).getSamplesList(); retVal[i] = ArrayUtils.toPrimitive(internalList.toArray(new Double[internalList.size()])); } return retVal; } private static int[][] listToInt(final List list) { final IntegratedIntCompressor iic = new IntegratedIntCompressor(); final int[][] retVal = new int[list.size()][]; for (int i = 0; i < list.size(); i++) { final List internalList = list.get(i).getSamplesList(); retVal[i] = iic.uncompress(integerListToPrimitiveArray(internalList)); } return retVal; } protected static int[] integerListToPrimitiveArray(final List internalList) { return ArrayUtils.toPrimitive(internalList.toArray(new Integer[internalList.size()])); } private static int[][] shortToInt(final short[][] shortBank) { final int[][] intBank = new int[shortBank.length][]; for (int a = 0; a < shortBank.length; a++) { intBank[a] = new int[shortBank[a].length]; for (int i = 0; i < shortBank[a].length; i++) { intBank[a][i] = shortBank[a][i]; } } return intBank; } private static short[][] intToShort(final int[][] intBank) { final short[][] shortBank = new short[intBank.length][]; for (int a = 0; a < intBank.length; a++) { shortBank[a] = new short[intBank[a].length]; for (int i = 0; i < intBank[a].length; i++) { shortBank[a][i] = (short) intBank[a][i]; } } return shortBank; } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/util/SampleModelPersistenceUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.util; import java.awt.image.BandedSampleModel; import java.awt.image.ComponentSampleModel; import java.awt.image.DataBuffer; import java.awt.image.DataBufferByte; import java.awt.image.DataBufferDouble; import java.awt.image.DataBufferFloat; import java.awt.image.DataBufferInt; import java.awt.image.DataBufferShort; import java.awt.image.DataBufferUShort; import java.awt.image.MultiPixelPackedSampleModel; import java.awt.image.PixelInterleavedSampleModel; import java.awt.image.SampleModel; import java.awt.image.SinglePixelPackedSampleModel; import javax.media.jai.ComponentSampleModelJAI; import org.locationtech.geowave.adapter.raster.protobuf.SampleModelProtos; import com.google.common.primitives.Ints; import com.google.protobuf.InvalidProtocolBufferException; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; public class SampleModelPersistenceUtils { /** Flag indicating a BandedSampleModel. */ private static final int TYPE_BANDED = 1; /** Flag indicating a PixelInterleavedSampleModel. */ private static final int TYPE_PIXEL_INTERLEAVED = 2; /** Flag indicating a SinglePixelPackedSampleModel. */ private static final int TYPE_SINGLE_PIXEL_PACKED = 3; /** Flag indicating a MultiPixelPackedSampleModel. */ private static final int TYPE_MULTI_PIXEL_PACKED = 4; /** Flag indicating a ComponentSampleModelJAI. */ private static final int TYPE_COMPONENT_JAI = 5; /** Flag indicating a generic ComponentSampleModel. */ private static final int TYPE_COMPONENT = 6; public static byte[] getSampleModelBinary(final SampleModel sampleModel) { final SampleModelProtos.SampleModel.Builder bldr = SampleModelProtos.SampleModel.newBuilder(); if (sampleModel instanceof ComponentSampleModel) { final ComponentSampleModel sm = (ComponentSampleModel) sampleModel; int sampleModelType = TYPE_COMPONENT; final int transferType = sm.getTransferType(); if (sampleModel instanceof PixelInterleavedSampleModel) { sampleModelType = TYPE_PIXEL_INTERLEAVED; } else if (sampleModel instanceof BandedSampleModel) { sampleModelType = TYPE_BANDED; } else if (((sampleModel instanceof InternalComponentSampleModelJAI) || (sampleModel instanceof ComponentSampleModelJAI)) || (transferType == DataBuffer.TYPE_FLOAT) || (transferType == DataBuffer.TYPE_DOUBLE)) { sampleModelType = TYPE_COMPONENT_JAI; } bldr.setModelType(sampleModelType); if (sampleModelType != TYPE_BANDED) { bldr.setPixelStride(sm.getPixelStride()); } bldr.setScanlineStride(sm.getScanlineStride()); if (sampleModelType != TYPE_PIXEL_INTERLEAVED) { bldr.addAllBankIndices(Ints.asList(sm.getBankIndices())); } bldr.addAllBandOffsets(Ints.asList(sm.getBandOffsets())); } else if (sampleModel instanceof SinglePixelPackedSampleModel) { final SinglePixelPackedSampleModel sm = (SinglePixelPackedSampleModel) sampleModel; bldr.setModelType(TYPE_SINGLE_PIXEL_PACKED); bldr.setScanlineStride(sm.getScanlineStride()); bldr.addAllBitMasks(Ints.asList(sm.getBitMasks())); } else if (sampleModel instanceof MultiPixelPackedSampleModel) { final MultiPixelPackedSampleModel sm = (MultiPixelPackedSampleModel) sampleModel; bldr.setModelType(TYPE_MULTI_PIXEL_PACKED); bldr.setPixelBitStride(sm.getPixelBitStride()); bldr.setScanlineStride(sm.getScanlineStride()); bldr.setDataBitOffset(sm.getDataBitOffset()); } else { throw new RuntimeException("Unsupported SampleModel type for serialization " + sampleModel); } bldr.setTransferType(sampleModel.getTransferType()); bldr.setWidth(sampleModel.getWidth()); bldr.setHeight(sampleModel.getHeight()); return bldr.build().toByteArray(); } public static SampleModel getSampleModel(final byte[] binary) throws InvalidProtocolBufferException { final SampleModelProtos.SampleModel sm = SampleModelProtos.SampleModel.parseFrom(binary); final int sampleModelType = sm.getModelType(); switch (sampleModelType) { case TYPE_PIXEL_INTERLEAVED: return createPixelInterleavedSampleModel( sm.getTransferType(), sm.getWidth(), sm.getHeight(), sm.getPixelStride(), sm.getScanlineStride(), DataBufferPersistenceUtils.integerListToPrimitiveArray(sm.getBandOffsetsList())); case TYPE_BANDED: return createBandedSampleModel( sm.getTransferType(), sm.getWidth(), sm.getHeight(), sm.getScanlineStride(), DataBufferPersistenceUtils.integerListToPrimitiveArray(sm.getBankIndicesList()), DataBufferPersistenceUtils.integerListToPrimitiveArray(sm.getBandOffsetsList())); case TYPE_COMPONENT_JAI: return new InternalComponentSampleModelJAI( sm.getTransferType(), sm.getWidth(), sm.getHeight(), sm.getPixelStride(), sm.getScanlineStride(), DataBufferPersistenceUtils.integerListToPrimitiveArray(sm.getBankIndicesList()), DataBufferPersistenceUtils.integerListToPrimitiveArray(sm.getBandOffsetsList())); case TYPE_COMPONENT: return new ComponentSampleModel( sm.getTransferType(), sm.getWidth(), sm.getHeight(), sm.getPixelStride(), sm.getScanlineStride(), DataBufferPersistenceUtils.integerListToPrimitiveArray(sm.getBankIndicesList()), DataBufferPersistenceUtils.integerListToPrimitiveArray(sm.getBandOffsetsList())); case TYPE_SINGLE_PIXEL_PACKED: return new SinglePixelPackedSampleModel( sm.getTransferType(), sm.getWidth(), sm.getHeight(), sm.getScanlineStride(), DataBufferPersistenceUtils.integerListToPrimitiveArray(sm.getBitMasksList())); case TYPE_MULTI_PIXEL_PACKED: return new MultiPixelPackedSampleModel( sm.getTransferType(), sm.getWidth(), sm.getHeight(), sm.getPixelBitStride(), sm.getScanlineStride(), sm.getDataBitOffset()); default: throw new RuntimeException( "Unsupported sample model type for deserialization " + sampleModelType); } } private static SampleModel createBandedSampleModel( final int dataType, final int width, final int height, final int numBands, int bankIndices[], int bandOffsets[]) { if (numBands < 1) { throw new IllegalArgumentException("Num Bands must be >= 1"); } if (bankIndices == null) { bankIndices = new int[numBands]; for (int i = 0; i < numBands; i++) { bankIndices[i] = i; } } if (bandOffsets == null) { bandOffsets = new int[numBands]; for (int i = 0; i < numBands; i++) { bandOffsets[i] = 0; } } if (bandOffsets.length != bankIndices.length) { throw new IllegalArgumentException( "Band Offsets " + bandOffsets.length + " doesn't match Bank Indices " + bankIndices.length); } return new InternalComponentSampleModelJAI( dataType, width, height, 1, width, bankIndices, bandOffsets); } private static SampleModel createPixelInterleavedSampleModel( final int dataType, final int width, final int height, final int pixelStride, final int scanlineStride, final int bandOffsets[]) { if (bandOffsets == null) { throw new IllegalArgumentException(); } int minBandOff = bandOffsets[0]; int maxBandOff = bandOffsets[0]; for (int i = 1; i < bandOffsets.length; i++) { minBandOff = Math.min(minBandOff, bandOffsets[i]); maxBandOff = Math.max(maxBandOff, bandOffsets[i]); } maxBandOff -= minBandOff; if (maxBandOff > scanlineStride) { throw new IllegalArgumentException( "max Band Offset (" + maxBandOff + ") must be > scanline stride (" + scanlineStride + ")"); } if ((pixelStride * width) > scanlineStride) { throw new IllegalArgumentException( "pixelStride*width (" + (pixelStride * width) + ") must be > scanline stride (" + scanlineStride + ")"); } if (pixelStride < maxBandOff) { throw new IllegalArgumentException( "max Band Offset (" + maxBandOff + ") must be > pixel stride (" + pixelStride + ")"); } switch (dataType) { case DataBuffer.TYPE_BYTE: case DataBuffer.TYPE_USHORT: return new PixelInterleavedSampleModel( dataType, width, height, pixelStride, scanlineStride, bandOffsets); case DataBuffer.TYPE_INT: case DataBuffer.TYPE_SHORT: case DataBuffer.TYPE_FLOAT: case DataBuffer.TYPE_DOUBLE: return new InternalComponentSampleModelJAI( dataType, width, height, pixelStride, scanlineStride, bandOffsets); default: throw new IllegalArgumentException("Unsupported data buffer type"); } } /** * This is here as an internal class only for package re-naming purposes because hbase * classloading special-cases javax.* causing problems with a package named "javax.media.jai" And * this JAI sample model is best for floating point sample values. */ /* * $RCSfile: ComponentSampleModelJAI.java,v $ * * Copyright (c) 2005 Sun Microsystems, Inc. All rights reserved. * * Use is subject to license terms. * * $Revision: 1.1 $ $Date: 2005-02-11 04:57:07 $ $State: Exp $ */ /** * This class represents image data which is stored such that each sample of a pixel occupies one * data element of the DataBuffer. It stores the N samples which make up a pixel in N * separate data array elements. Different bands may be in different banks of the DataBuffer * . Accessor methods are provided so that image data can be manipulated directly. This * class can support different kinds of interleaving, e.g. band interleaving, scanline * interleaving, and pixel interleaving. Pixel stride is the number of data array elements between * two samples for the same band on the same scanline. Scanline stride is the number of data array * elements between a given sample and the corresponding sample in the same column of the next * scanline. Band offsets denote the number of data array elements from the first data array * element of the bank of the DataBuffer holding each band to the first sample of the * band. The bands are numbered from 0 to N-1. This class can represent image data for the * dataTypes enumerated in java.awt.image.DataBuffer (all samples of a given * ComponentSampleModel are stored with the same precision) . This class adds support for * Double and Float data types in addition to those supported by the * ComponentSampleModel class in Java 2D. All strides and offsets must be * non-negative. * * @see java.awt.image.ComponentSampleModel */ private static class InternalComponentSampleModelJAI extends ComponentSampleModel { /** * Constructs a ComponentSampleModel with the specified parameters. The number of * bands will be given by the length of the bandOffsets array. All bands will be stored in the * first bank of the DataBuffer. * * @param dataType The data type for storing samples. * @param w The width (in pixels) of the region of image data described. * @param h The height (in pixels) of the region of image data described. * @param pixelStride The pixel stride of the region of image data described. * @param scanlineStride The line stride of the region of image data described. * @param bandOffsets The offsets of all bands. */ public InternalComponentSampleModelJAI( final int dataType, final int w, final int h, final int pixelStride, final int scanlineStride, final int bandOffsets[]) { super(dataType, w, h, pixelStride, scanlineStride, bandOffsets); } /** * Constructs a ComponentSampleModel with the specified parameters. The number of * bands will be given by the length of the bandOffsets array. Different bands may be stored in * different banks of the DataBuffer. * * @param dataType The data type for storing samples. * @param w The width (in pixels) of the region of image data described. * @param h The height (in pixels) of the region of image data described. * @param pixelStride The pixel stride of the region of image data described. * @param scanlineStride The line stride of the region of image data described. * @param bankIndices The bank indices of all bands. * @param bandOffsets The band offsets of all bands. */ public InternalComponentSampleModelJAI( final int dataType, final int w, final int h, final int pixelStride, final int scanlineStride, final int bankIndices[], final int bandOffsets[]) { super(dataType, w, h, pixelStride, scanlineStride, bankIndices, bandOffsets); } /** * Returns the size of the data buffer (in data elements) needed for a data buffer that matches * this ComponentSampleModel. */ private long getBufferSize() { int maxBandOff = bandOffsets[0]; for (int i = 1; i < bandOffsets.length; i++) { maxBandOff = Math.max(maxBandOff, bandOffsets[i]); } long size = 0; if (maxBandOff >= 0) { size += maxBandOff + 1; } if (pixelStride > 0) { size += (long) pixelStride * (width - 1); } if (scanlineStride > 0) { size += (long) scanlineStride * (height - 1); } return size; } /** Preserves band ordering with new step factor... */ private int[] JAIorderBands(final int orig[], final int step) { final int map[] = new int[orig.length]; final int ret[] = new int[orig.length]; for (int i = 0; i < map.length; i++) { map[i] = i; } for (int i = 0; i < ret.length; i++) { int index = i; for (int j = i + 1; j < ret.length; j++) { if (orig[map[index]] > orig[map[j]]) { index = j; } } ret[map[index]] = i * step; map[index] = map[i]; } return ret; } /** * Creates a new ComponentSampleModel with the specified width and height. The new * SampleModel will have the same number of bands, storage data type, interleaving * scheme, and pixel stride as this SampleModel. * * @param w The width in pixels. * @param h The height in pixels */ @Override public SampleModel createCompatibleSampleModel(final int w, final int h) { final SampleModel ret = null; final long size; int minBandOff = bandOffsets[0]; int maxBandOff = bandOffsets[0]; for (int i = 1; i < bandOffsets.length; i++) { minBandOff = Math.min(minBandOff, bandOffsets[i]); maxBandOff = Math.max(maxBandOff, bandOffsets[i]); } maxBandOff -= minBandOff; final int bands = bandOffsets.length; int bandOff[]; int pStride = Math.abs(pixelStride); int lStride = Math.abs(scanlineStride); final int bStride = Math.abs(maxBandOff); if (pStride > lStride) { if (pStride > bStride) { if (lStride > bStride) { // pix > line > band bandOff = new int[bandOffsets.length]; for (int i = 0; i < bands; i++) { bandOff[i] = bandOffsets[i] - minBandOff; } lStride = bStride + 1; pStride = lStride * h; } else { // pix > band > line bandOff = JAIorderBands(bandOffsets, lStride * h); pStride = bands * lStride * h; } } else { // band > pix > line pStride = lStride * h; bandOff = JAIorderBands(bandOffsets, pStride * w); } } else { if (pStride > bStride) { // line > pix > band bandOff = new int[bandOffsets.length]; for (int i = 0; i < bands; i++) { bandOff[i] = bandOffsets[i] - minBandOff; } pStride = bStride + 1; lStride = pStride * w; } else { if (lStride > bStride) { // line > band > pix bandOff = JAIorderBands(bandOffsets, pStride * w); lStride = bands * pStride * w; } else { // band > line > pix lStride = pStride * w; bandOff = JAIorderBands(bandOffsets, lStride * h); } } } // make sure we make room for negative offsets... int base = 0; if (scanlineStride < 0) { base += lStride * h; lStride *= -1; } if (pixelStride < 0) { base += pStride * w; pStride *= -1; } for (int i = 0; i < bands; i++) { bandOff[i] += base; } return new ComponentSampleModelJAI(dataType, w, h, pStride, lStride, bankIndices, bandOff); } /** * This creates a new ComponentSampleModel with a subset of the bands of this * ComponentSampleModel. The new ComponentSampleModel can be used with * any DataBuffer that the existing ComponentSampleModel can be used * with. The new ComponentSampleModel/DataBuffer combination will * represent an image with a subset of the bands of the original ComponentSampleModel * /DataBuffer combination. * * @param bands subset of bands of this ComponentSampleModel */ @Override public SampleModel createSubsetSampleModel(final int bands[]) { final int newBankIndices[] = new int[bands.length]; final int newBandOffsets[] = new int[bands.length]; for (int i = 0; i < bands.length; i++) { final int b = bands[i]; newBankIndices[i] = bankIndices[b]; newBandOffsets[i] = bandOffsets[b]; } return new ComponentSampleModelJAI( dataType, width, height, pixelStride, scanlineStride, newBankIndices, newBandOffsets); } /** * Creates a DataBuffer that corresponds to this ComponentSampleModel. * The DataBuffer's data type, number of banks, and size will be consistent with * this ComponentSampleModel. */ @Override public DataBuffer createDataBuffer() { DataBuffer dataBuffer = null; final int size = (int) getBufferSize(); switch (dataType) { case DataBuffer.TYPE_BYTE: dataBuffer = new DataBufferByte(size, numBanks); break; case DataBuffer.TYPE_USHORT: dataBuffer = new DataBufferUShort(size, numBanks); break; case DataBuffer.TYPE_INT: dataBuffer = new DataBufferInt(size, numBanks); break; case DataBuffer.TYPE_SHORT: dataBuffer = new DataBufferShort(size, numBanks); break; case DataBuffer.TYPE_FLOAT: dataBuffer = new DataBufferFloat(size, numBanks); break; case DataBuffer.TYPE_DOUBLE: dataBuffer = new DataBufferDouble(size, numBanks); break; default: throw new RuntimeException("Unsupported data buffer type " + dataType); } return dataBuffer; } /** * Returns data for a single pixel in a primitive array of type TransferType. For a * ComponentSampleModel, this will be the same as the data type, and samples will be * returned one per array element. Generally, obj should be passed in as null, so that the * Object will be created automatically and will be of the right primitive data * type. * *

The following code illustrates transferring data for one pixel from DataBuffer * db1, whose storage layout is described by ComponentSampleModel * csm1, to DataBuffer db2, whose storage layout * is described by ComponentSampleModel csm2. The transfer will * generally be more efficient than using getPixel/setPixel. * *

     * ComponentSampleModel csm1, csm2;
     * DataBufferInt db1, db2;
     * csm2.setDataElements(x, y, csm1.getDataElements(x, y, null, db1), db2);
     * 
* * Using getDataElements/setDataElements to transfer between two DataBuffer * /SampleModel pairs is legitimate if the SampleModels have the same number of * bands, corresponding bands have the same number of bits per sample, and the TransferTypes are * the same. * *

* * @param x The X coordinate of the pixel location. * @param y The Y coordinate of the pixel location. * @param obj If non-null, a primitive array in which to return the pixel data. * @param data The DataBuffer containing the image data. * @throws ClassCastException if obj is non-null and is not a primitive array of * type TransferType. * @throws ArrayIndexOutOfBoundsException if the coordinates are not in bounds, or * if obj is non-null and is not large enough to hold the pixel data. */ @Override public Object getDataElements(final int x, final int y, Object obj, final DataBuffer data) { final int type = getTransferType(); final int numDataElems = getNumDataElements(); final int pixelOffset = (y * scanlineStride) + (x * pixelStride); switch (type) { case DataBuffer.TYPE_BYTE: byte[] bdata; if (obj == null) { bdata = new byte[numDataElems]; } else { bdata = (byte[]) obj; } for (int i = 0; i < numDataElems; i++) { bdata[i] = (byte) data.getElem(bankIndices[i], pixelOffset + bandOffsets[i]); } obj = bdata; break; case DataBuffer.TYPE_USHORT: short[] usdata; if (obj == null) { usdata = new short[numDataElems]; } else { usdata = (short[]) obj; } for (int i = 0; i < numDataElems; i++) { usdata[i] = (short) data.getElem(bankIndices[i], pixelOffset + bandOffsets[i]); } obj = usdata; break; case DataBuffer.TYPE_INT: int[] idata; if (obj == null) { idata = new int[numDataElems]; } else { idata = (int[]) obj; } for (int i = 0; i < numDataElems; i++) { idata[i] = data.getElem(bankIndices[i], pixelOffset + bandOffsets[i]); } obj = idata; break; case DataBuffer.TYPE_SHORT: short[] sdata; if (obj == null) { sdata = new short[numDataElems]; } else { sdata = (short[]) obj; } for (int i = 0; i < numDataElems; i++) { sdata[i] = (short) data.getElem(bankIndices[i], pixelOffset + bandOffsets[i]); } obj = sdata; break; case DataBuffer.TYPE_FLOAT: float[] fdata; if (obj == null) { fdata = new float[numDataElems]; } else { fdata = (float[]) obj; } for (int i = 0; i < numDataElems; i++) { fdata[i] = data.getElemFloat(bankIndices[i], pixelOffset + bandOffsets[i]); } obj = fdata; break; case DataBuffer.TYPE_DOUBLE: double[] ddata; if (obj == null) { ddata = new double[numDataElems]; } else { ddata = (double[]) obj; } for (int i = 0; i < numDataElems; i++) { ddata[i] = data.getElemDouble(bankIndices[i], pixelOffset + bandOffsets[i]); } obj = ddata; break; default: throw new RuntimeException("Unsupported data buffer type " + type); } return obj; } /** * Returns the pixel data for the specified rectangle of pixels in a primitive array of type * TransferType. For image data supported by the Java 2D API, this will be one of the dataTypes * supported by java.awt.image.DataBuffer. Data may be returned in a packed format, thus * increasing efficiency for data transfers. Generally, obj should be passed in as null, so that * the Object will be created automatically and will be of the right primitive data * type. * *

The following code illustrates transferring data for a rectangular region of pixels from * DataBuffer db1, whose storage layout is described by * SampleModel sm1, to DataBuffer db2, whose * storage layout is described by SampleModel sm2. The transfer will * generally be more efficient than using getPixels/setPixels. * *

     * SampleModel sm1, sm2;
     * DataBuffer db1, db2;
     * sm2.setDataElements(x, y, w, h, sm1.getDataElements(x, y, w, h, null, db1), db2);
     * 
* * Using getDataElements/setDataElements to transfer between two DataBuffer * /SampleModel pairs is legitimate if the SampleModels have the same number of * bands, corresponding bands have the same number of bits per sample, and the TransferTypes are * the same. * *

* * @param x The minimum X coordinate of the pixel rectangle. * @param y The minimum Y coordinate of the pixel rectangle. * @param w The width of the pixel rectangle. * @param h The height of the pixel rectangle. * @param obj If non-null, a primitive array in which to return the pixel data. * @param data The DataBuffer containing the image data. * @see #getNumDataElements * @see #getTransferType * @see java.awt.image.DataBuffer * @throws ClassCastException if obj is non-null and is not a primitive array of * type TransferType. * @throws ArrayIndexOutOfBoundsException if the coordinates are not in bounds, or * if obj is non-null and is not large enough to hold the pixel data. */ @Override public Object getDataElements( final int x, final int y, final int w, final int h, Object obj, final DataBuffer data) { final int type = getTransferType(); final int numDataElems = getNumDataElements(); int cnt = 0; Object o = null; switch (type) { case DataBuffer.TYPE_BYTE: { byte[] btemp; byte[] bdata; if (obj == null) { bdata = new byte[numDataElems * w * h]; } else { bdata = (byte[]) obj; } for (int i = y; i < (y + h); i++) { for (int j = x; j < (x + w); j++) { o = getDataElements(j, i, o, data); btemp = (byte[]) o; for (int k = 0; k < numDataElems; k++) { bdata[cnt++] = btemp[k]; } } } obj = bdata; break; } case DataBuffer.TYPE_USHORT: { short[] usdata; short[] ustemp; if (obj == null) { usdata = new short[numDataElems * w * h]; } else { usdata = (short[]) obj; } for (int i = y; i < (y + h); i++) { for (int j = x; j < (x + w); j++) { o = getDataElements(j, i, o, data); ustemp = (short[]) o; for (int k = 0; k < numDataElems; k++) { usdata[cnt++] = ustemp[k]; } } } obj = usdata; break; } case DataBuffer.TYPE_INT: { int[] idata; int[] itemp; if (obj == null) { idata = new int[numDataElems * w * h]; } else { idata = (int[]) obj; } for (int i = y; i < (y + h); i++) { for (int j = x; j < (x + w); j++) { o = getDataElements(j, i, o, data); itemp = (int[]) o; for (int k = 0; k < numDataElems; k++) { idata[cnt++] = itemp[k]; } } } obj = idata; break; } case DataBuffer.TYPE_SHORT: { short[] sdata; short[] stemp; if (obj == null) { sdata = new short[numDataElems * w * h]; } else { sdata = (short[]) obj; } for (int i = y; i < (y + h); i++) { for (int j = x; j < (x + w); j++) { o = getDataElements(j, i, o, data); stemp = (short[]) o; for (int k = 0; k < numDataElems; k++) { sdata[cnt++] = stemp[k]; } } } obj = sdata; break; } case DataBuffer.TYPE_FLOAT: { float[] fdata; float[] ftemp; if (obj == null) { fdata = new float[numDataElems * w * h]; } else { fdata = (float[]) obj; } for (int i = y; i < (y + h); i++) { for (int j = x; j < (x + w); j++) { o = getDataElements(j, i, o, data); ftemp = (float[]) o; for (int k = 0; k < numDataElems; k++) { fdata[cnt++] = ftemp[k]; } } } obj = fdata; break; } case DataBuffer.TYPE_DOUBLE: { double[] ddata; double[] dtemp; if (obj == null) { ddata = new double[numDataElems * w * h]; } else { ddata = (double[]) obj; } for (int i = y; i < (y + h); i++) { for (int j = x; j < (x + w); j++) { o = getDataElements(j, i, o, data); dtemp = (double[]) o; for (int k = 0; k < numDataElems; k++) { ddata[cnt++] = dtemp[k]; } } } obj = ddata; break; } default: throw new RuntimeException("Unsupported data buffer type " + type); } return obj; } /** * Sets the data for a single pixel in the specified DataBuffer from a primitive * array of type TransferType. For a ComponentSampleModel, this will be the same as * the data type, and samples are transferred one per array element. * *

The following code illustrates transferring data for one pixel from DataBuffer * db1, whose storage layout is described by ComponentSampleModel * csm1, to DataBuffer db2, whose storage layout * is described by ComponentSampleModel csm2. The transfer will * generally be more efficient than using getPixel/setPixel. * *

     * ComponentSampleModel csm1, csm2;
     * DataBufferInt db1, db2;
     * csm2.setDataElements(x, y, csm1.getDataElements(x, y, null, db1), db2);
     * 
* * Using getDataElements/setDataElements to transfer between two DataBuffer * /SampleModel pairs is legitimate if the SampleModels have the same number of * bands, corresponding bands have the same number of bits per sample, and the TransferTypes are * the same. * *

* * @param x The X coordinate of the pixel location. * @param y The Y coordinate of the pixel location. * @param obj A primitive array containing pixel data. * @param data The DataBuffer containing the image data. * @throws ClassCastException if obj is non-null and is not a primitive array of * type TransferType. * @throws ArrayIndexOutOfBoundsException if the coordinates are not in bounds, or * if obj is non-null and is not large enough to hold the pixel data. */ @Override public void setDataElements(final int x, final int y, final Object obj, final DataBuffer data) { final int type = getTransferType(); final int numDataElems = getNumDataElements(); final int pixelOffset = (y * scanlineStride) + (x * pixelStride); switch (type) { case DataBuffer.TYPE_BYTE: final byte[] barray = (byte[]) obj; for (int i = 0; i < numDataElems; i++) { data.setElem(bankIndices[i], pixelOffset + bandOffsets[i], (barray[i]) & 0xff); } break; case DataBuffer.TYPE_USHORT: final short[] usarray = (short[]) obj; for (int i = 0; i < numDataElems; i++) { data.setElem(bankIndices[i], pixelOffset + bandOffsets[i], (usarray[i]) & 0xffff); } break; case DataBuffer.TYPE_INT: final int[] iarray = (int[]) obj; for (int i = 0; i < numDataElems; i++) { data.setElem(bankIndices[i], pixelOffset + bandOffsets[i], iarray[i]); } break; case DataBuffer.TYPE_SHORT: final short[] sarray = (short[]) obj; for (int i = 0; i < numDataElems; i++) { data.setElem(bankIndices[i], pixelOffset + bandOffsets[i], sarray[i]); } break; case DataBuffer.TYPE_FLOAT: final float[] farray = (float[]) obj; for (int i = 0; i < numDataElems; i++) { data.setElemFloat(bankIndices[i], pixelOffset + bandOffsets[i], farray[i]); } break; case DataBuffer.TYPE_DOUBLE: final double[] darray = (double[]) obj; for (int i = 0; i < numDataElems; i++) { data.setElemDouble(bankIndices[i], pixelOffset + bandOffsets[i], darray[i]); } break; default: throw new RuntimeException("Unsupported data buffer type " + type); } } /** * Sets the data for a rectangle of pixels in the specified DataBuffer from a * primitive array of type TransferType. For image data supported by the Java 2D API, this will * be one of the dataTypes supported by java.awt.image.DataBuffer. Data in the array may be in a * packed format, thus increasing efficiency for data transfers. * *

The following code illustrates transferring data for a rectangular region of pixels from * DataBuffer db1, whose storage layout is described by * SampleModel sm1, to DataBuffer db2, whose * storage layout is described by SampleModel sm2. The transfer will * generally be more efficient than using getPixels/setPixels. * *

     * SampleModel sm1, sm2;
     * DataBuffer db1, db2;
     * sm2.setDataElements(x, y, w, h, sm1.getDataElements(x, y, w, h, null, db1), db2);
     * 
* * Using getDataElements/setDataElements to transfer between two DataBuffer * /SampleModel pairs is legitimate if the SampleModels have the same number of * bands, corresponding bands have the same number of bits per sample, and the TransferTypes are * the same. * *

* * @param x The minimum X coordinate of the pixel rectangle. * @param y The minimum Y coordinate of the pixel rectangle. * @param w The width of the pixel rectangle. * @param h The height of the pixel rectangle. * @param obj A primitive array containing pixel data. * @param data The DataBuffer containing the image data. * @throws ClassCastException if obj is non-null and is not a primitive array of * type TransferType. * @throws ArrayIndexOutOfBoundsException if the coordinates are not in bounds, or * if obj is non-null and is not large enough to hold the pixel data. * @see #getNumDataElements * @see #getTransferType * @see java.awt.image.DataBuffer */ @Override public void setDataElements( final int x, final int y, final int w, final int h, final Object obj, final DataBuffer data) { int cnt = 0; final Object o = null; final int type = getTransferType(); final int numDataElems = getNumDataElements(); switch (type) { case DataBuffer.TYPE_BYTE: { final byte[] barray = (byte[]) obj; final byte[] btemp = new byte[numDataElems]; for (int i = y; i < (y + h); i++) { for (int j = x; j < (x + w); j++) { for (int k = 0; k < numDataElems; k++) { btemp[k] = barray[cnt++]; } setDataElements(j, i, btemp, data); } } break; } case DataBuffer.TYPE_USHORT: { final short[] usarray = (short[]) obj; final short[] ustemp = new short[numDataElems]; for (int i = y; i < (y + h); i++) { for (int j = x; j < (x + w); j++) { for (int k = 0; k < numDataElems; k++) { ustemp[k] = usarray[cnt++]; } setDataElements(j, i, ustemp, data); } } break; } case DataBuffer.TYPE_INT: { final int[] iArray = (int[]) obj; final int[] itemp = new int[numDataElems]; for (int i = y; i < (y + h); i++) { for (int j = x; j < (x + w); j++) { for (int k = 0; k < numDataElems; k++) { itemp[k] = iArray[cnt++]; } setDataElements(j, i, itemp, data); } } break; } case DataBuffer.TYPE_SHORT: { final short[] sArray = (short[]) obj; final short[] stemp = new short[numDataElems]; for (int i = y; i < (y + h); i++) { for (int j = x; j < (x + w); j++) { for (int k = 0; k < numDataElems; k++) { stemp[k] = sArray[cnt++]; } setDataElements(j, i, stemp, data); } } break; } case DataBuffer.TYPE_FLOAT: { final float[] fArray = (float[]) obj; final float[] ftemp = new float[numDataElems]; for (int i = y; i < (y + h); i++) { for (int j = x; j < (x + w); j++) { for (int k = 0; k < numDataElems; k++) { ftemp[k] = fArray[cnt++]; } setDataElements(j, i, ftemp, data); } } break; } case DataBuffer.TYPE_DOUBLE: { final double[] dArray = (double[]) obj; final double[] dtemp = new double[numDataElems]; for (int i = y; i < (y + h); i++) { for (int j = x; j < (x + w); j++) { for (int k = 0; k < numDataElems; k++) { dtemp[k] = dArray[cnt++]; } setDataElements(j, i, dtemp, data); } } break; } default: throw new RuntimeException("Unsupported data buffer type " + type); } } /** * Sets a sample in the specified band for the pixel located at (x,y) in the DataBuffer * using a float for input. ArrayIndexOutOfBoundsException may * be thrown if the coordinates are not in bounds. * * @param x The X coordinate of the pixel location. * @param y The Y coordinate of the pixel location. * @param b The band to set. * @param s The input sample as a float. * @param data The DataBuffer containing the image data. * @throws ArrayIndexOutOfBoundsException if coordinates are not in bounds */ @Override public void setSample( final int x, final int y, final int b, final float s, final DataBuffer data) { data.setElemFloat( bankIndices[b], (y * scanlineStride) + (x * pixelStride) + bandOffsets[b], s); } /** * Returns the sample in a specified band for the pixel located at (x,y) as a float * . ArrayIndexOutOfBoundsException may be thrown if the coordinates are not in * bounds. * * @param x The X coordinate of the pixel location. * @param y The Y coordinate of the pixel location. * @param b The band to return. * @param data The DataBuffer containing the image data. * @return sample The floating point sample value * @throws ArrayIndexOutOfBoundsException if coordinates are not in bounds */ @Override public float getSampleFloat(final int x, final int y, final int b, final DataBuffer data) { final float sample = data.getElemFloat( bankIndices[b], (y * scanlineStride) + (x * pixelStride) + bandOffsets[b]); return sample; } /** * Sets a sample in the specified band for the pixel located at (x,y) in the DataBuffer * using a double for input. ArrayIndexOutOfBoundsException * may be thrown if the coordinates are not in bounds. * * @param x The X coordinate of the pixel location. * @param y The Y coordinate of the pixel location. * @param b The band to set. * @param s The input sample as a double. * @param data The DataBuffer containing the image data. * @throws ArrayIndexOutOfBoundsException if coordinates are not in bounds */ @Override public void setSample( final int x, final int y, final int b, final double s, final DataBuffer data) { data.setElemDouble( bankIndices[b], (y * scanlineStride) + (x * pixelStride) + bandOffsets[b], s); } /** * Returns the sample in a specified band for a pixel located at (x,y) as a double. * ArrayIndexOutOfBoundsException may be thrown if the coordinates are not in * bounds. * * @param x The X coordinate of the pixel location. * @param y The Y coordinate of the pixel location. * @param b The band to return. * @param data The DataBuffer containing the image data. * @return sample The double sample value * @throws ArrayIndexOutOfBoundsException if coordinates are not in bounds */ @Override public double getSampleDouble(final int x, final int y, final int b, final DataBuffer data) { final double sample = data.getElemDouble( bankIndices[b], (y * scanlineStride) + (x * pixelStride) + bandOffsets[b]); return sample; } /** * Returns all samples for a rectangle of pixels in a double array, one sample per * array element. ArrayIndexOutOfBoundsException may be thrown if the coordinates * are not in bounds. * * @param x The X coordinate of the upper left pixel location. * @param y The Y coordinate of the upper left pixel location. * @param w The width of the pixel rectangle. * @param h The height of the pixel rectangle. * @param dArray If non-null, returns the samples in this array. * @param data The DataBuffer containing the image data. * @throws ArrayIndexOutOfBoundsException if coordinates are not in bounds */ @Override public double[] getPixels( final int x, final int y, final int w, final int h, final double dArray[], final DataBuffer data) { double pixels[]; int Offset = 0; if (dArray != null) { pixels = dArray; } else { pixels = new double[numBands * w * h]; } for (int i = y; i < (h + y); i++) { for (int j = x; j < (w + x); j++) { for (int k = 0; k < numBands; k++) { pixels[Offset++] = getSampleDouble(j, i, k, data); } } } return pixels; } /** Returns a String containing the values of all valid fields. */ @Override @SuppressFBWarnings public String toString() { String ret = "ComponentSampleModelJAI: " + " dataType=" + getDataType() + " numBands=" + getNumBands() + " width=" + getWidth() + " height=" + getHeight() + " bandOffsets=[ "; for (int i = 0; i < numBands; i++) { ret += getBandOffsets()[i] + " "; } ret += "]"; return ret; } } } ================================================ FILE: extensions/adapters/raster/src/main/java/org/locationtech/geowave/adapter/raster/util/ZipUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster.util; import java.io.File; import java.io.IOException; import org.apache.hadoop.fs.FileUtil; import org.slf4j.LoggerFactory; import net.lingala.zip4j.core.ZipFile; import net.lingala.zip4j.exception.ZipException; public class ZipUtils { private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(ZipUtils.class); /** * Unzips the contents of a zip file to a target output directory, deleting anything that existed * beforehand * * @param zipInput input zip file * @param outputFolder zip file output folder */ public static void unZipFile(final File zipInput, final String outputFolder) { unZipFile(zipInput, outputFolder, true); } /** * Unzips the contents of a zip file to a target output directory * * @param zipInput input zip file * @param outputFolder zip file output folder * @param deleteTargetDir delete the destination directory before extracting */ public static void unZipFile( final File zipInput, final String outputFolder, final boolean deleteTargetDir) { try { final File of = new File(outputFolder); if (!of.exists()) { if (!of.mkdirs()) { throw new IOException("Could not create temporary directory: " + of.toString()); } } else if (deleteTargetDir) { FileUtil.fullyDelete(of); } final ZipFile z = new ZipFile(zipInput); z.extractAll(outputFolder); } catch (final ZipException e) { LOGGER.warn("Unable to extract test data", e); } catch (final IOException e) { LOGGER.warn("Unable to create temporary directory: " + outputFolder, e); } } } ================================================ FILE: extensions/adapters/raster/src/main/protobuf/DataBuffer.proto ================================================ option java_package = "org.locationtech.geowave.adapter.raster.protobuf"; option java_outer_classname = "DataBufferProtos"; option optimize_for = SPEED; message SignedIntArray { repeated sint32 samples = 1 [packed=true]; } message DoubleArray { repeated double samples = 1 [packed=true]; } message FloatArray { repeated float samples = 1 [packed=true]; } message SignedIntDataBuffer { repeated SignedIntArray banks = 1; } message DoubleDataBuffer { repeated DoubleArray banks = 1; } message FloatDataBuffer { repeated FloatArray banks = 1; } message ByteDataBuffer { repeated bytes banks = 1; } message DataBuffer { required uint32 type = 1; repeated uint32 offsets = 2 [packed=true]; required uint32 size = 3; optional SignedIntDataBuffer sint = 4; optional DoubleDataBuffer dbl = 5; optional FloatDataBuffer flt = 6; optional ByteDataBuffer byteDb = 7; } ================================================ FILE: extensions/adapters/raster/src/main/protobuf/SampleModel.proto ================================================ option java_package = "org.locationtech.geowave.adapter.raster.protobuf"; option java_outer_classname = "SampleModelProtos"; option optimize_for = SPEED; message SampleModel { required uint32 modelType = 1; required uint32 transferType = 2; required uint32 width = 3; required uint32 height = 4; required uint32 scanlineStride = 5; optional uint32 pixelStride = 6; optional uint32 pixelBitStride = 7; optional uint32 dataBitOffset = 8; repeated uint32 bandOffsets = 9 [packed=true]; repeated uint32 bankIndices = 10 [packed=true]; repeated uint32 bitMasks = 11 [packed=true]; } ================================================ FILE: extensions/adapters/raster/src/main/resources/META-INF/services/org.geotools.coverage.grid.io.GridFormatFactorySpi ================================================ org.locationtech.geowave.adapter.raster.plugin.GeoWaveGTRasterFormatFactory org.locationtech.geowave.adapter.raster.plugin.gdal.GDALGeoTiffFormatFactory ================================================ FILE: extensions/adapters/raster/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi ================================================ org.locationtech.geowave.adapter.raster.operations.RasterOperationCLIProvider ================================================ FILE: extensions/adapters/raster/src/main/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi ================================================ org.locationtech.geowave.adapter.raster.RasterAdapterPersistableRegistry ================================================ FILE: extensions/adapters/raster/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.index.IndexFieldMapperRegistrySPI ================================================ org.locationtech.geowave.adapter.raster.adapter.RasterRegisteredIndexFieldMappers ================================================ FILE: extensions/adapters/raster/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.statistics.StatisticsRegistrySPI ================================================ org.locationtech.geowave.adapter.raster.stats.RasterRegisteredStatistics ================================================ FILE: extensions/adapters/raster/src/test/java/org/locationtech/geowave/adapter/raster/RasterUtilsTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster; import org.junit.Assert; import org.junit.Test; import org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter; import org.locationtech.geowave.adapter.raster.adapter.merge.nodata.NoDataMergeStrategy; public class RasterUtilsTest { @Test public void testCreateDataAdapter() { final RasterDataAdapter adapter = RasterUtils.createDataAdapterTypeDouble("test", 3, 256, new NoDataMergeStrategy()); Assert.assertNotNull(adapter); Assert.assertEquals("test", adapter.getCoverageName()); Assert.assertEquals(3, adapter.getSampleModel().getNumBands()); Assert.assertEquals(256, adapter.getTileSize()); } } ================================================ FILE: extensions/adapters/raster/src/test/java/org/locationtech/geowave/adapter/raster/WebMercatorRasterTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.raster; import java.awt.Rectangle; import java.awt.image.Raster; import java.awt.image.WritableRaster; import java.io.IOException; import java.util.Collections; import org.geotools.geometry.GeneralEnvelope; import org.geotools.referencing.CRS; import org.junit.Assert; import org.junit.Test; import org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter; import org.locationtech.geowave.adapter.raster.adapter.merge.nodata.NoDataMergeStrategy; import org.locationtech.geowave.adapter.raster.plugin.GeoWaveRasterConfig; import org.locationtech.geowave.adapter.raster.plugin.GeoWaveRasterReader; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder; import org.locationtech.geowave.core.index.FloatCompareUtils; import org.locationtech.geowave.core.store.GeoWaveStoreFinder; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.Writer; import org.locationtech.geowave.core.store.memory.MemoryStoreFactoryFamily; import org.opengis.coverage.grid.GridCoverage; import org.opengis.geometry.MismatchedDimensionException; import org.opengis.referencing.FactoryException; import org.opengis.referencing.NoSuchAuthorityCodeException; public class WebMercatorRasterTest { public static final String CRS_STR = "EPSG:3857"; @Test public void testStoreRetrieve() throws IOException, MismatchedDimensionException, NoSuchAuthorityCodeException, FactoryException { GeoWaveStoreFinder.getRegisteredStoreFactoryFamilies().put( "memory", new MemoryStoreFactoryFamily()); final DataStore dataStore = GeoWaveStoreFinder.createDataStore(Collections.EMPTY_MAP); final int xTiles = 8; final int yTiles = 8; final double[] minsPerBand = new double[] {0, 0, 0}; final double[] maxesPerBand = new double[] { (xTiles * 3) + (yTiles * 24), (xTiles * 3) + (yTiles * 24), (xTiles * 3) + (yTiles * 24)}; final String[] namesPerBand = new String[] {"b1", "b2", "b3"}; final RasterDataAdapter adapter = RasterUtils.createDataAdapterTypeDouble( "test", 3, 64, minsPerBand, maxesPerBand, namesPerBand, new NoDataMergeStrategy()); final Index index = new SpatialIndexBuilder().setCrs(CRS_STR) // 3857 .createIndex(); double bounds = CRS.decode(CRS_STR).getCoordinateSystem().getAxis(0).getMaximumValue(); if (!Double.isFinite(bounds)) { bounds = SpatialDimensionalityTypeProvider.DEFAULT_UNBOUNDED_CRS_INTERVAL; } bounds /= 32.0; dataStore.addType(adapter, index); for (double xTile = 0; xTile < xTiles; xTile++) { for (double yTile = 0; yTile < yTiles; yTile++) { try (Writer writer = dataStore.createWriter(adapter.getTypeName())) { final WritableRaster raster = RasterUtils.createRasterTypeDouble(3, 64); RasterUtils.fillWithNoDataValues( raster, new double[][] { {(xTile * 3) + (yTile * 24)}, {(xTile * 3) + (yTile * 24) + 1}, {(xTile * 3) + (yTile * 24) + 2}}); writer.write( RasterUtils.createCoverageTypeDouble( "test", xTile * bounds, (xTile + 1) * bounds, yTile * bounds, (yTile + 1) * bounds, minsPerBand, maxesPerBand, namesPerBand, raster, CRS_STR)); } } } final int grid[][] = new int[8][8]; final GeoWaveRasterReader reader = new GeoWaveRasterReader(GeoWaveRasterConfig.createConfig(Collections.EMPTY_MAP, "")); for (int xTile = 1; xTile < xTiles; xTile++) { for (int yTile = 1; yTile < yTiles; yTile++) { final GeneralEnvelope queryEnvelope = new GeneralEnvelope( new double[] { // this is exactly on a tile boundary, so there // will be no // scaling on the tile composition/rendering (xTile - (15 / 64.0)) * bounds, (yTile - (15 / 64.0)) * bounds}, new double[] { // these values are also on a tile boundary, to // avoid // scaling (xTile + (15 / 64.0)) * bounds, (yTile + (15 / 64.0)) * bounds}); queryEnvelope.setCoordinateReferenceSystem(CRS.decode(CRS_STR)); final GridCoverage gridCoverage = reader.renderGridCoverage( "test", new Rectangle(32, 32), queryEnvelope, null, null, null); final Raster img = gridCoverage.getRenderedImage().getData(); grid[xTile - 1][yTile - 1] = img.getSample(0, 16, 0); grid[xTile - 1][yTile] = img.getSample(0, 0, 0); grid[xTile][yTile - 1] = img.getSample(16, 16, 0); grid[xTile][yTile] = img.getSample(16, 0, 0); final double expectedMinXMinYValue = ((xTile - 1) * 3) + ((yTile - 1) * 24); final double expectedMinXMaxYValue = ((xTile - 1) * 3) + (yTile * 24); final double expectedMaxXMinYValue = (xTile * 3) + ((yTile - 1) * 24); final double expectedMaxXMaxYValue = (xTile * 3) + (yTile * 24); for (int x = 0; x < 32; x++) { for (int y = 0; y < 32; y++) { for (int b = 0; b < 3; b++) { double expectedValue; if (x > 15) { if (y <= 15) { expectedValue = expectedMaxXMaxYValue; } else { expectedValue = expectedMaxXMinYValue; } } else if (y <= 15) { expectedValue = expectedMinXMaxYValue; } else { expectedValue = expectedMinXMinYValue; } expectedValue += b; Assert.assertEquals( String.format("Value didn't match expected at x=%d;y=%d;b=%d", x, y, b), expectedValue, img.getSample(x, y, b), FloatCompareUtils.COMP_EPSILON); } } } } } } } ================================================ FILE: extensions/adapters/vector/.gitignore ================================================ src/main/java/org/locationtech/geowave/adapter/vector/avro ================================================ FILE: extensions/adapters/vector/pom.xml ================================================ 4.0.0 geowave-extension-parent org.locationtech.geowave ../../ 2.0.2-SNAPSHOT geowave-adapter-vector Geowave Vector Adapter Geowave Data Adapter for Vector Data com.google.guava guava org.apache.commons commons-math org.locationtech.geowave geowave-adapter-auth ${project.version} org.locationtech.geowave geowave-core-ingest ${project.version} org.locationtech.geowave geowave-core-geotime ${project.version} org.geotools gt-opengis org.geotools gt-main org.geotools gt-wps org.geotools gt-shapefile org.geotools gt-geojson org.geoserver gs-wms gt-epsg-hsql org.geotools log4j log4j commons-beanutils commons-beanutils com.oath.cyclops cyclops org.geotools gt-render org.hdrhistogram HdrHistogram 2.1.7 com.clearspring.analytics stream org.apache.curator curator-test 2.5.0 test org.locationtech.geowave geowave-core-store tests test-jar ${project.version} test org.locationtech.geowave geowave-core-geotime tests test-jar ${project.version} test org.apache.avro avro-maven-plugin ================================================ FILE: extensions/adapters/vector/src/main/avro/AvroSimpleFeature.avsc ================================================ [ { "name" : "AvroAttributeValues", "namespace" : "org.locationtech.geowave.adapter.vector.avro", "type" : "record", "fields" : [ { "name" : "fid", "type" : "string" }, { "name" : "values", "type" : { "type" : "array", "items" : "bytes" } }, { "name" : "classifications", "type" : [ "null",{ "type" : "array", "items" : "string" } ] }, { "name" : "serializationVersion", "type" : "bytes", "default" : "\u0000" } ] }, { "name" : "AvroFeatureDefinition", "namespace" : "org.locationtech.geowave.adapter.vector.avro", "type" : "record", "fields" : [ { "name" : "featureTypeName", "type" : "string" }, { "name" : "attributeNames", "type" : { "type" : "array", "items" : "string" } }, { "name" : "attributeTypes", "type" : { "type" : "array", "items" : "string" } }, { "name" : "attributeDefaultClassifications", "type" : { "type" : "array", "items" : "string" } } ] }, { "name" : "AvroSimpleFeature", "namespace" : "org.locationtech.geowave.adapter.vector.avro", "type" : "record", "fields" : [ { "name" : "featureType", "type" : "AvroFeatureDefinition" }, { "name" : "value", "type" : "AvroAttributeValues" } ] }, { "name" : "AvroSimpleFeatureCollection", "namespace" : "org.locationtech.geowave.adapter.vector.avro", "type" : "record", "fields" : [ {"name" : "featureType", "type" : "AvroFeatureDefinition"}, {"name" : "simpleFeatureCollection", "type" : { "type" : "array", "items" : "AvroAttributeValues" } } ] } ] ================================================ FILE: extensions/adapters/vector/src/main/java/org/geotools/feature/simple/OptimizedSimpleFeatureBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.geotools.feature.simple; import org.opengis.feature.simple.SimpleFeatureType; /** * Variation of SimpleFeatureBuilder that skips object conversion, since GeoWave handles that * already. */ public class OptimizedSimpleFeatureBuilder extends SimpleFeatureBuilder { public OptimizedSimpleFeatureBuilder(final SimpleFeatureType featureType) { super(featureType); } @Override public void set(int index, Object value) { if (index >= values.length) throw new ArrayIndexOutOfBoundsException( "Can handle " + values.length + " attributes only, index is " + index); values[index] = value; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/geotools/process/function/DistributedRenderProcessUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.geotools.process.function; import java.util.Collections; import java.util.Map; import org.geotools.data.Parameter; import org.geotools.feature.NameImpl; import org.geotools.filter.LiteralExpressionImpl; import org.geotools.process.ProcessFactory; import org.geotools.process.Processors; import org.geotools.process.RenderingProcess; import org.geotools.process.factory.AnnotatedBeanProcessFactory; import org.geotools.text.Text; import org.locationtech.geowave.adapter.vector.plugin.InternalProcessFactory; import org.locationtech.geowave.adapter.vector.render.InternalDistributedRenderProcess; import org.opengis.feature.type.Name; import org.opengis.filter.expression.Expression; public class DistributedRenderProcessUtils { private static Expression SINGLETON_RENDER_PROCESS = null; public static Expression getRenderingProcess() { if (SINGLETON_RENDER_PROCESS == null) { final ProcessFactory processFactory = new AnnotatedBeanProcessFactory( Text.text("Internal GeoWave Process Factory"), "internal", InternalDistributedRenderProcess.class); final Name processName = new NameImpl("internal", "InternalDistributedRender"); final RenderingProcess process = (RenderingProcess) processFactory.create(processName); final Map> parameters = processFactory.getParameterInfo(processName); final InternalProcessFactory factory = new InternalProcessFactory(); // this is kinda a hack, but the only way to instantiate a process // is // for it to have a registered process factory, so temporarily // register // the process factory Processors.addProcessFactory(factory); SINGLETON_RENDER_PROCESS = new RenderingProcessFunction( processName, Collections.singletonList( new ParameterFunction( null, Collections.singletonList(new LiteralExpressionImpl("data")))), parameters, process, null); Processors.removeProcessFactory(factory); } return SINGLETON_RENDER_PROCESS; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/geotools/renderer/lite/DistributedRenderer.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.geotools.renderer.lite; import java.awt.Composite; import java.awt.Graphics2D; import java.awt.image.BufferedImage; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.BlockingQueue; import org.apache.commons.lang3.tuple.Pair; import org.geotools.process.function.ProcessFunction; import org.locationtech.geowave.adapter.vector.plugin.DistributedRenderProcess; import org.locationtech.geowave.adapter.vector.render.DistributedRenderOptions; import org.locationtech.geowave.adapter.vector.render.DistributedRenderResult; import org.locationtech.geowave.adapter.vector.render.DistributedRenderResult.CompositeGroupResult; import org.locationtech.geowave.adapter.vector.render.PersistableComposite; import org.locationtech.geowave.adapter.vector.render.PersistableRenderedImage; import com.google.common.base.Function; import com.google.common.collect.Lists; public class DistributedRenderer extends StreamingRenderer { private final DistributedRenderOptions options; protected DistributedRenderingBlockingQueue renderQueue; public DistributedRenderer(final DistributedRenderOptions options) { this.options = options; } @Override List> classifyByFeatureProduction( final List lfts) { // strip off a distributed rendering render transform because that is // what is currently being processed final List> retVal = super.classifyByFeatureProduction(lfts); for (final List featureTypeStyles : retVal) { final LiteFeatureTypeStyle transformLfts = featureTypeStyles.get(0); // there doesn't seem to be an easy way to check if its a // distributed render transform so for now let's just not allow // other rendering transformations when distributed rendering is // employed and strip all transformations if (transformLfts.transformation instanceof ProcessFunction) { if ((((ProcessFunction) transformLfts.transformation).getName() != null) && ((ProcessFunction) transformLfts.transformation).getName().equals( DistributedRenderProcess.PROCESS_NAME)) { transformLfts.transformation = null; } } } return retVal; } @Override public void setRendererHints(final Map hints) { hints.put("maxFiltersToSendToDatastore", options.getMaxFilters()); hints.put(StreamingRenderer.LINE_WIDTH_OPTIMIZATION_KEY, options.isOptimizeLineWidth()); super.setRendererHints(hints); } @Override protected BlockingQueue getRequestsQueue() { renderQueue = new DistributedRenderingBlockingQueue(10000); return renderQueue; } public DistributedRenderResult getResult(final BufferedImage parentImage) { return renderQueue.getResult(parentImage); } public class DistributedRenderingBlockingQueue extends RenderingBlockingQueue { private static final long serialVersionUID = -1014302908773318665L; private final Map>> compositeGroupGraphicsToStyleGraphicsMapping = new LinkedHashMap<>(); private final Map compositeGroupGraphicsToCompositeMapping = new HashMap<>(); public DistributedRenderingBlockingQueue(final int capacity) { super(capacity); } @Override public void put(final RenderingRequest e) throws InterruptedException { // for merge requests just collect the graphics objects and // associated composites if (e instanceof MergeLayersRequest) { final List lftsList = ((MergeLayersRequest) e).lfts; final List> styleGraphics = new ArrayList<>(); final Graphics2D parentGraphics = ((MergeLayersRequest) e).graphics; for (final LiteFeatureTypeStyle lfts : lftsList) { if ((lfts.graphics instanceof DelayedBackbufferGraphic) && (lfts.graphics != parentGraphics)) { final DelayedBackbufferGraphic styleGraphic = (DelayedBackbufferGraphic) lfts.graphics; if (styleGraphic.image != null) { styleGraphics.add(Pair.of(styleGraphic.image, lfts.composite)); continue; } } // if no style graphic was added, add a null value as a // placeholder in the list styleGraphics.add(null); } compositeGroupGraphicsToStyleGraphicsMapping.put(parentGraphics, styleGraphics); } else if (e instanceof MargeCompositingGroupRequest) { compositeGroupGraphicsToCompositeMapping.put( ((MargeCompositingGroupRequest) e).compositingGroup.graphics, ((MargeCompositingGroupRequest) e).compositingGroup.composite); } else { super.put(e); } } public DistributedRenderResult getResult(final BufferedImage parentImage) { final List compositeGroups = new ArrayList<>(); for (final Entry>> e : compositeGroupGraphicsToStyleGraphicsMapping.entrySet()) { final Graphics2D compositeGroupGraphic = e.getKey(); final List> orderedStyles = Lists.transform( e.getValue(), new Function, Pair>() { @Override public Pair apply( final Pair input) { if (input == null) { return null; } return Pair.of( new PersistableRenderedImage(input.getKey()), input.getValue() == null ? null : new PersistableComposite(input.getValue())); } }); if (compositeGroupGraphic instanceof DelayedBackbufferGraphic) { final Composite compositeGroupComposite = compositeGroupGraphicsToCompositeMapping.get(compositeGroupGraphic); // because mergelayers wasn't writing to the composite // image, their won't be an image to persist final PersistableComposite persistableCGC = compositeGroupComposite == null ? null : new PersistableComposite(compositeGroupComposite); compositeGroups.add(new CompositeGroupResult(persistableCGC, orderedStyles)); } else { // it must be the parent image compositeGroups.add(new CompositeGroupResult(null, orderedStyles)); } } return new DistributedRenderResult( new PersistableRenderedImage(parentImage), compositeGroups); } } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/FeatureAdapterPersistableRegistry.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector; import org.locationtech.geowave.adapter.vector.index.SimpleFeaturePrimaryIndexConfiguration; import org.locationtech.geowave.adapter.vector.index.VectorTextIndexEntryConverter; import org.locationtech.geowave.adapter.vector.ingest.CQLFilterOptionProvider; import org.locationtech.geowave.adapter.vector.ingest.DataSchemaOptionProvider; import org.locationtech.geowave.adapter.vector.ingest.FeatureSerializationOptionProvider; import org.locationtech.geowave.adapter.vector.ingest.GeometrySimpOptionProvider; import org.locationtech.geowave.adapter.vector.ingest.TypeNameOptionProvider; import org.locationtech.geowave.adapter.vector.query.aggregation.VectorCountAggregation; import org.locationtech.geowave.adapter.vector.render.DistributedRenderAggregation; import org.locationtech.geowave.adapter.vector.render.DistributedRenderOptions; import org.locationtech.geowave.adapter.vector.render.DistributedRenderResult; import org.locationtech.geowave.adapter.vector.render.DistributedRenderResult.CompositeGroupResult; import org.locationtech.geowave.adapter.vector.render.PersistableComposite; import org.locationtech.geowave.adapter.vector.render.PersistableRenderedImage; import org.locationtech.geowave.adapter.vector.util.SimpleFeatureUserDataConfigurationSet; import org.locationtech.geowave.core.index.persist.InternalPersistableRegistry; import org.locationtech.geowave.core.index.persist.PersistableRegistrySpi; public class FeatureAdapterPersistableRegistry implements PersistableRegistrySpi, InternalPersistableRegistry { @Override public PersistableIdAndConstructor[] getSupportedPersistables() { return new PersistableIdAndConstructor[] { // 500 is available // 501 is a legacy class (pre 2.0) // 502 is available new PersistableIdAndConstructor((short) 503, CQLFilterOptionProvider::new), new PersistableIdAndConstructor((short) 504, DataSchemaOptionProvider::new), new PersistableIdAndConstructor((short) 505, FeatureSerializationOptionProvider::new), new PersistableIdAndConstructor((short) 506, TypeNameOptionProvider::new), // 507-508 are available new PersistableIdAndConstructor((short) 509, DistributedRenderOptions::new), new PersistableIdAndConstructor((short) 510, CompositeGroupResult::new), new PersistableIdAndConstructor((short) 511, DistributedRenderResult::new), new PersistableIdAndConstructor((short) 512, PersistableComposite::new), new PersistableIdAndConstructor((short) 513, PersistableRenderedImage::new), // 514-520 is available new PersistableIdAndConstructor((short) 521, DistributedRenderAggregation::new), new PersistableIdAndConstructor((short) 522, SimpleFeatureUserDataConfigurationSet::new), // 523 is used by core-geotime // 524-526 are legacy classes (pre 2.0) // 527-532 are available // 532 is available new PersistableIdAndConstructor((short) 533, SimpleFeaturePrimaryIndexConfiguration::new), // 534 is available new PersistableIdAndConstructor((short) 535, VectorCountAggregation::new), new PersistableIdAndConstructor((short) 536, GeometrySimpOptionProvider::new), // 537-539 are available new PersistableIdAndConstructor((short) 540, VectorTextIndexEntryConverter::new), new PersistableIdAndConstructor((short) 541, FeatureDataAdapter::new)}; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/FeatureDataAdapter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import org.geotools.data.DataUtilities; import org.geotools.feature.SchemaException; import org.geotools.feature.simple.SimpleFeatureTypeBuilder; import org.geotools.referencing.CRS; import org.locationtech.geowave.adapter.vector.util.FeatureDataUtils; import org.locationtech.geowave.adapter.vector.util.SimpleFeatureUserDataConfigurationSet; import org.locationtech.geowave.core.geotime.adapter.SpatialFieldDescriptor; import org.locationtech.geowave.core.geotime.adapter.SpatialFieldDescriptorBuilder; import org.locationtech.geowave.core.geotime.adapter.TemporalFieldDescriptorBuilder; import org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic; import org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.geotime.util.TimeDescriptors; import org.locationtech.geowave.core.geotime.util.TimeDescriptors.TimeDescriptorConfiguration; import org.locationtech.geowave.core.geotime.util.TimeUtils; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.adapter.FieldDescriptor; import org.locationtech.geowave.core.store.adapter.FieldDescriptorBuilder; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.api.RowBuilder; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldUtils; import org.locationtech.geowave.core.store.data.field.FieldWriter; import org.locationtech.geowave.core.store.statistics.DefaultStatisticsProvider; import org.locationtech.geowave.core.store.statistics.adapter.CountStatistic; import org.locationtech.geowave.mapreduce.HadoopDataAdapter; import org.locationtech.geowave.mapreduce.HadoopWritableSerializer; import org.locationtech.jts.geom.Geometry; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.AttributeDescriptor; import org.opengis.feature.type.GeometryDescriptor; import org.opengis.referencing.FactoryException; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.internal.Lists; /** * This data adapter will handle all reading/writing concerns for storing and retrieving GeoTools * SimpleFeature objects to and from a GeoWave persistent store.

The adapter will use the * SimpleFeature's default geometry for spatial indexing.

The adaptor will use the first * temporal attribute (a Calendar or Date object) as the timestamp of a temporal index.

If * the feature type contains a UserData property 'time' for a specific time attribute with * Boolean.TRUE, then the attribute is used as the timestamp of a temporal index.

If the * feature type contains UserData properties 'start' and 'end' for two different time attributes * with value Boolean.TRUE, then the attributes are used for a range index.

If the feature * type contains a UserData property 'time' for *all* time attributes with Boolean.FALSE, then a * temporal index is not used. */ public class FeatureDataAdapter implements GeotoolsFeatureDataAdapter, HadoopDataAdapter, DefaultStatisticsProvider { private static final Logger LOGGER = LoggerFactory.getLogger(FeatureDataAdapter.class); // the original coordinate system will always be represented internally by // the persisted type private SimpleFeatureType featureType; private TimeDescriptors timeDescriptors = null; FieldDescriptor[] fieldDescriptors; Map> descriptorsMap; // ----------------------------------------------------------------------------------- // ----------------------------------------------------------------------------------- protected FeatureDataAdapter() {} // ----------------------------------------------------------------------------------- // ----------------------------------------------------------------------------------- /** * Constructor
Creates a FeatureDataAdapter for the specified SimpleFeatureType * * @param featureType - feature type for this object */ public FeatureDataAdapter(final SimpleFeatureType featureType) { setFeatureType(featureType); } @Override public Class getDataClass() { return SimpleFeature.class; } // ----------------------------------------------------------------------------------- // ----------------------------------------------------------------------------------- /** * Set the FeatureType for this Data Adapter. * * @param featureType - new feature type */ private void setFeatureType(SimpleFeatureType featureType) { if (featureType.getCoordinateReferenceSystem() == null) { featureType = SimpleFeatureTypeBuilder.retype(featureType, GeometryUtils.getDefaultCRS()); } this.featureType = featureType; resetTimeDescriptors(); initializeFieldDescriptors(); } private void initializeFieldDescriptors() { final List attributes = featureType.getAttributeDescriptors(); fieldDescriptors = new FieldDescriptor[attributes.size()]; for (int i = 0; i < attributes.size(); i++) { final AttributeDescriptor attribute = attributes.get(i); if (attribute instanceof GeometryDescriptor) { final SpatialFieldDescriptorBuilder builder = new SpatialFieldDescriptorBuilder<>(attribute.getType().getBinding()); builder.fieldName(attribute.getName().getLocalPart()); builder.crs(((GeometryDescriptor) attribute).getCoordinateReferenceSystem()); if ((featureType.getGeometryDescriptor() != null) && featureType.getGeometryDescriptor().equals(attribute)) { builder.spatialIndexHint(); } fieldDescriptors[i] = builder.build(); } else if ((timeDescriptors != null) && attribute.equals(timeDescriptors.getTime())) { fieldDescriptors[i] = new TemporalFieldDescriptorBuilder<>(attribute.getType().getBinding()).fieldName( attribute.getName().getLocalPart()).timeIndexHint().build(); } else if ((timeDescriptors != null) && attribute.equals(timeDescriptors.getStartRange())) { fieldDescriptors[i] = new TemporalFieldDescriptorBuilder<>(attribute.getType().getBinding()).fieldName( attribute.getName().getLocalPart()).startTimeIndexHint().build(); } else if ((timeDescriptors != null) && attribute.equals(timeDescriptors.getEndRange())) { fieldDescriptors[i] = new TemporalFieldDescriptorBuilder<>(attribute.getType().getBinding()).fieldName( attribute.getName().getLocalPart()).endTimeIndexHint().build(); } else { fieldDescriptors[i] = new FieldDescriptorBuilder<>(attribute.getType().getBinding()).fieldName( attribute.getName().getLocalPart()).build(); } } // this assumes attribute names are unique, which *should* be a fair assumption descriptorsMap = Arrays.stream(fieldDescriptors).collect( Collectors.toMap(FieldDescriptor::fieldName, descriptor -> descriptor)); } /** * Sets the namespace of the reprojected feature type associated with this data adapter * * @param namespaceURI - new namespace URI */ @Override public void setNamespace(final String namespaceURI) { final SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder(); builder.init(featureType); builder.setNamespaceURI(namespaceURI); featureType = builder.buildFeatureType(); } // ---------------------------------------------------------------------------------- /** Map of Field Readers associated with a Field ID */ private final Map> mapOfFieldNameToReaders = new HashMap<>(); /** * {@inheritDoc} * * @return Field Reader for the given Field ID */ @Override public FieldReader getReader(final String fieldName) { // Go to the map to get a reader for given fieldId FieldReader reader = mapOfFieldNameToReaders.get(fieldName); // Check the map to see if a reader has already been found. if (reader == null) { // Reader not in Map, go to the reprojected feature type and get the // default reader final AttributeDescriptor descriptor = featureType.getDescriptor(fieldName); final Class bindingClass = descriptor.getType().getBinding(); reader = (FieldReader) FieldUtils.getDefaultReaderForClass(bindingClass); // Add it to map for the next time mapOfFieldNameToReaders.put(fieldName, reader); } return reader; } // ---------------------------------------------------------------------------------- /** Map of Field Writers associated with a Field ID */ private final Map> mapOfFieldNameToWriters = new HashMap<>(); /** * {@inheritDoc} * * @return Field Writer for the given Field ID */ @Override public FieldWriter getWriter(final String fieldName) { // Go to the map to get a writer for given fieldId FieldWriter writer = mapOfFieldNameToWriters.get(fieldName); // Check the map to see if a writer has already been found. if (writer == null) { final AttributeDescriptor descriptor = featureType.getDescriptor(fieldName); final Class bindingClass = descriptor.getType().getBinding(); writer = (FieldWriter) FieldUtils.getDefaultWriterForClass(bindingClass); if (writer == null) { LOGGER.error("BasicWriter not found for binding type:" + bindingClass.getName().toString()); } mapOfFieldNameToWriters.put(fieldName, writer); } return writer; } @Override public String getTypeName() { return featureType.getTypeName(); } @Override public byte[] getDataId(final SimpleFeature entry) { return StringUtils.stringToBinary(entry.getID()); } @Override public RowBuilder newRowBuilder( final FieldDescriptor[] outputFieldDescriptors) { CoordinateReferenceSystem outputCRS = featureType.getCoordinateReferenceSystem(); final String defaultGeometryField = featureType.getGeometryDescriptor().getLocalName(); for (final FieldDescriptor field : outputFieldDescriptors) { if (field.fieldName().equals(defaultGeometryField) && (field instanceof SpatialFieldDescriptor)) { outputCRS = ((SpatialFieldDescriptor) field).crs(); break; } } CoordinateReferenceSystem persistedCRS = featureType.getCoordinateReferenceSystem(); if (outputCRS == null) { outputCRS = GeometryUtils.getDefaultCRS(); } if (persistedCRS == null) { persistedCRS = GeometryUtils.getDefaultCRS(); } final SimpleFeatureType reprojectedFeatureType; if (outputCRS.equals(persistedCRS)) { reprojectedFeatureType = SimpleFeatureTypeBuilder.retype(featureType, persistedCRS); } else { reprojectedFeatureType = SimpleFeatureTypeBuilder.retype(featureType, outputCRS); } return new FeatureRowBuilder(reprojectedFeatureType); } @Override public SimpleFeatureType getFeatureType() { return featureType; } @Override public boolean hasTemporalConstraints() { return getTimeDescriptors().hasTime(); } public synchronized void resetTimeDescriptors() { timeDescriptors = TimeUtils.inferTimeAttributeDescriptor(featureType); } @Override public synchronized TimeDescriptors getTimeDescriptors() { if (timeDescriptors == null) { timeDescriptors = TimeUtils.inferTimeAttributeDescriptor(featureType); } return timeDescriptors; } @Override public HadoopWritableSerializer createWritableSerializer() { return new FeatureWritableSerializer(featureType); } private static class FeatureWritableSerializer implements HadoopWritableSerializer { private final FeatureWritable writable; FeatureWritableSerializer(final SimpleFeatureType type) { writable = new FeatureWritable(type); } @Override public FeatureWritable toWritable(final SimpleFeature entry) { writable.setFeature(entry); return writable; } @Override public SimpleFeature fromWritable(final FeatureWritable writable) { return writable.getFeature(); } } @Override public Object getFieldValue(final SimpleFeature entry, final String fieldName) { return entry.getAttribute(fieldName); } public static CoordinateReferenceSystem decodeCRS(final String crsCode) { CoordinateReferenceSystem crs = null; try { crs = CRS.decode(crsCode, true); } catch (final FactoryException e) { LOGGER.error("Unable to decode '" + crsCode + "' CRS", e); throw new RuntimeException("Unable to initialize '" + crsCode + "' object", e); } return crs; } @Override public List>> getDefaultStatistics() { final List> statistics = Lists.newArrayList(); final CountStatistic count = new CountStatistic(getTypeName()); count.setInternal(); statistics.add(count); for (int i = 0; i < featureType.getAttributeCount(); i++) { final AttributeDescriptor ad = featureType.getDescriptor(i); if (Geometry.class.isAssignableFrom(ad.getType().getBinding())) { final BoundingBoxStatistic bbox = new BoundingBoxStatistic(getTypeName(), ad.getLocalName()); bbox.setInternal(); statistics.add(bbox); } } final TimeDescriptors timeDescriptors = getTimeDescriptors(); if (timeDescriptors.hasTime()) { if (timeDescriptors.getTime() != null) { final TimeRangeStatistic timeRange = new TimeRangeStatistic(getTypeName(), timeDescriptors.getTime().getLocalName()); timeRange.setInternal(); statistics.add(timeRange); } if (timeDescriptors.getStartRange() != null) { final TimeRangeStatistic timeRange = new TimeRangeStatistic(getTypeName(), timeDescriptors.getStartRange().getLocalName()); timeRange.setInternal(); statistics.add(timeRange); } if (timeDescriptors.getEndRange() != null) { final TimeRangeStatistic timeRange = new TimeRangeStatistic(getTypeName(), timeDescriptors.getEndRange().getLocalName()); timeRange.setInternal(); statistics.add(timeRange); } } return statistics; } @Override public FieldDescriptor[] getFieldDescriptors() { return fieldDescriptors; } @Override public FieldDescriptor getFieldDescriptor(final String fieldName) { return descriptorsMap.get(fieldName); } @Override public byte[] toBinary() { // serialize the persisted/reprojected feature type by using default // fields and // data types final String encodedType = DataUtilities.encodeType(featureType); final String axis = FeatureDataUtils.getAxis(featureType.getCoordinateReferenceSystem()); final String typeName = featureType.getTypeName(); final byte[] typeNameBytes = StringUtils.stringToBinary(typeName); final byte[] axisBytes = StringUtils.stringToBinary(axis); // final SimpleFeatureUserDataConfigurationSet userDataConfiguration = new SimpleFeatureUserDataConfigurationSet(); userDataConfiguration.addConfigurations(typeName, new TimeDescriptorConfiguration(featureType)); final byte[] attrBytes = userDataConfiguration.toBinary(); final String namespace = featureType.getName().getNamespaceURI(); byte[] namespaceBytes; if ((namespace != null) && (namespace.length() > 0)) { namespaceBytes = StringUtils.stringToBinary(namespace); } else { namespaceBytes = new byte[0]; } final byte[] encodedTypeBytes = StringUtils.stringToBinary(encodedType); // 21 bytes is the 7 four byte length fields and one byte for the // version final ByteBuffer buf = ByteBuffer.allocate( encodedTypeBytes.length + typeNameBytes.length + namespaceBytes.length + attrBytes.length + axisBytes.length + VarintUtils.unsignedIntByteLength(typeNameBytes.length) + VarintUtils.unsignedIntByteLength(namespaceBytes.length) + VarintUtils.unsignedIntByteLength(attrBytes.length) + VarintUtils.unsignedIntByteLength(axisBytes.length) + VarintUtils.unsignedIntByteLength(encodedTypeBytes.length)); VarintUtils.writeUnsignedInt(typeNameBytes.length, buf); VarintUtils.writeUnsignedInt(namespaceBytes.length, buf); VarintUtils.writeUnsignedInt(attrBytes.length, buf); VarintUtils.writeUnsignedInt(axisBytes.length, buf); VarintUtils.writeUnsignedInt(encodedTypeBytes.length, buf); buf.put(typeNameBytes); buf.put(namespaceBytes); buf.put(attrBytes); buf.put(axisBytes); buf.put(encodedTypeBytes); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { GeometryUtils.initClassLoader(); // deserialize the feature type final ByteBuffer buf = ByteBuffer.wrap(bytes); final int typeNameByteLength = VarintUtils.readUnsignedInt(buf); final int namespaceByteLength = VarintUtils.readUnsignedInt(buf); final int attrByteLength = VarintUtils.readUnsignedInt(buf); final int axisByteLength = VarintUtils.readUnsignedInt(buf); final int encodedTypeByteLength = VarintUtils.readUnsignedInt(buf); final byte[] typeNameBytes = ByteArrayUtils.safeRead(buf, typeNameByteLength); final byte[] namespaceBytes = ByteArrayUtils.safeRead(buf, namespaceByteLength); final byte[] attrBytes = ByteArrayUtils.safeRead(buf, attrByteLength); final byte[] axisBytes = ByteArrayUtils.safeRead(buf, axisByteLength); final byte[] encodedTypeBytes = ByteArrayUtils.safeRead(buf, encodedTypeByteLength); final String typeName = StringUtils.stringFromBinary(typeNameBytes); String namespace = StringUtils.stringFromBinary(namespaceBytes); if (namespace.length() == 0) { namespace = null; } // 21 bytes is the 7 four byte length fields and one byte for the // version final byte[] secondaryIndexBytes = new byte[buf.remaining()]; buf.get(secondaryIndexBytes); final String encodedType = StringUtils.stringFromBinary(encodedTypeBytes); try { final SimpleFeatureType myType = FeatureDataUtils.decodeType( namespace, typeName, encodedType, StringUtils.stringFromBinary(axisBytes)); final SimpleFeatureUserDataConfigurationSet userDataConfiguration = new SimpleFeatureUserDataConfigurationSet(); userDataConfiguration.fromBinary(attrBytes); userDataConfiguration.updateType(myType); setFeatureType(myType); } catch (final SchemaException e) { LOGGER.error("Unable to deserialized feature type", e); } } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/FeatureRowBuilder.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector; import java.util.Map; import java.util.Map.Entry; import org.geotools.feature.simple.OptimizedSimpleFeatureBuilder; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.api.RowBuilder; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; /** * A GeoWave RowBuilder, used internally by AbstractDataAdapter to construct rows from a set field * values (in this case SimpleFeatures from a set of attribute values). This implementation simply * wraps a geotools SimpleFeatureBuilder. */ public class FeatureRowBuilder implements RowBuilder { protected final OptimizedSimpleFeatureBuilder builder; public FeatureRowBuilder(final SimpleFeatureType type) { builder = new OptimizedSimpleFeatureBuilder(type); } @Override public SimpleFeature buildRow(final byte[] dataId) { return builder.buildFeature(StringUtils.stringFromBinary(dataId)); } @Override public void setField(final String fieldName, final Object fieldValue) { builder.set(fieldName, fieldValue); } @Override public void setFields(final Map values) { for (final Entry entry : values.entrySet()) { builder.set(entry.getKey(), entry.getValue()); } } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/FeatureWritable.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.util.Date; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.io.Writable; import org.geotools.data.DataUtilities; import org.geotools.feature.SchemaException; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.locationtech.geowave.adapter.vector.util.FeatureDataUtils; import org.locationtech.geowave.core.geotime.util.TWKBReader; import org.locationtech.geowave.core.geotime.util.TWKBWriter; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.io.ParseException; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.AttributeDescriptor; import com.clearspring.analytics.util.Varint; /** * This class is used by FeatureDataAdapter to persist SimpleFeature and its SimpleFeatureType. The * attribute types of the feature must be understood before the feature can be deserialized so * therefore each SimpleFeature serializes its type. * *

NOTE: This class caches feature type information. If the feature type changes, then the cache * should be emptied using the clearCache() method. */ public class FeatureWritable implements Writable, java.io.Serializable { private static final Map, SimpleFeatureType> FeatureTypeCache = new ConcurrentHashMap<>(); /** */ private static final long serialVersionUID = 286616522680871139L; private SimpleFeatureType featureType; private SimpleFeature feature; public FeatureWritable() {} public FeatureWritable(final SimpleFeatureType featureType) { this.featureType = featureType; } public FeatureWritable(final SimpleFeatureType featureType, final SimpleFeature feature) { this.featureType = featureType; this.feature = feature; } public SimpleFeature getFeature() { return feature; } public void setFeature(final SimpleFeature feature) { this.feature = feature; } @Override public void readFields(final DataInput input) throws IOException { try { final String ns = input.readUTF(); featureType = FeatureDataUtils.decodeType( "-".equals(ns) ? "" : ns, input.readUTF(), input.readUTF(), input.readUTF()); } catch (final SchemaException e) { throw new IOException("Failed to parse the encoded feature type", e); } final SimpleFeatureBuilder builder = new SimpleFeatureBuilder(featureType); // read the fid final String fid = input.readUTF(); // read the other attributes, build the feature for (final AttributeDescriptor ad : featureType.getAttributeDescriptors()) { final Object att = readAttribute(ad, input); builder.add(att); } // build the feature feature = builder.buildFeature(fid); } @Override public void write(final DataOutput output) throws IOException { output.writeUTF( featureType.getName().getNamespaceURI() == null ? "-" : featureType.getName().getNamespaceURI()); output.writeUTF(featureType.getTypeName()); output.writeUTF(DataUtilities.encodeType(featureType)); output.writeUTF(FeatureDataUtils.getAxis(featureType.getCoordinateReferenceSystem())); // write feature id output.writeUTF(feature.getID()); // write the attributes for (final AttributeDescriptor ad : featureType.getAttributeDescriptors()) { final Object value = feature.getAttribute(ad.getLocalName()); writeAttribute(output, ad, value); } } static void writeAttribute( final DataOutput output, final AttributeDescriptor ad, final Object value) throws IOException { if (value == null) { // null marker output.writeBoolean(true); } else { // not null, write the contents. This one requires some explanation. // We are not writing any type metadata in the stream for the types // we can optimize (primitives, numbers, strings and the like). This // means we have to be 100% sure the class we're writing is actually // the one we can optimize for, and not some subclass. Thus, we are // authorized to use identity comparison instead of isAssignableFrom // or equality, when we read back it must be as if we did not // serialize stuff at all output.writeBoolean(false); final Class binding = ad.getType().getBinding(); if (binding == Boolean.class) { output.writeBoolean((Boolean) value); } else if ((binding == Byte.class) || (binding == byte.class)) { output.writeByte((Byte) value); } else if ((binding == Short.class) || (binding == short.class)) { output.writeShort((Short) value); } else if ((binding == Integer.class) || (binding == int.class)) { Varint.writeSignedVarInt((Integer) value, output); } else if ((binding == Long.class) || (binding == long.class)) { Varint.writeSignedVarLong((Long) value, output); } else if ((binding == Float.class) || (binding == float.class)) { output.writeFloat((Float) value); } else if ((binding == Double.class) || (binding == double.class)) { output.writeDouble((Double) value); } else if (binding == String.class) { output.writeUTF((String) value); } else if ((binding == java.sql.Date.class) || (binding == java.sql.Time.class) || (binding == java.sql.Timestamp.class) || (binding == java.util.Date.class)) { Varint.writeUnsignedVarLong(((Date) value).getTime(), output); } else if (Geometry.class.isAssignableFrom(binding)) { final TWKBWriter writer = new TWKBWriter(); final byte[] buffer = writer.write((Geometry) value); Varint.writeUnsignedVarInt(buffer.length, output); output.write(buffer); } else { // can't optimize, in this case we use an ObjectOutputStream to // write out full metadata final ByteArrayOutputStream bos = new ByteArrayOutputStream(); final ObjectOutputStream oos = new ObjectOutputStream(bos); oos.writeObject(value); oos.flush(); final byte[] bytes = bos.toByteArray(); Varint.writeUnsignedVarInt(bytes.length, output); output.write(bytes); } } } /** * Reads the attributes. * * @param ad * @return * @throws IOException */ Object readAttribute(final AttributeDescriptor ad, final DataInput input) throws IOException { final boolean isNull = input.readBoolean(); if (isNull) { return null; } else { final Class binding = ad.getType().getBinding(); if (binding == Boolean.class) { return input.readBoolean(); } else if ((binding == Byte.class) || (binding == byte.class)) { return input.readByte(); } else if ((binding == Short.class) || (binding == short.class)) { return input.readShort(); } else if ((binding == Integer.class) || (binding == int.class)) { return Varint.readSignedVarInt(input); } else if ((binding == Long.class) || (binding == long.class)) { return Varint.readSignedVarLong(input); } else if ((binding == Float.class) || (binding == float.class)) { return input.readFloat(); } else if ((binding == Double.class) || (binding == double.class)) { return input.readDouble(); } else if (binding == String.class) { return input.readUTF(); } else if (binding == java.sql.Date.class) { return new java.sql.Date(Varint.readUnsignedVarLong(input)); } else if (binding == java.sql.Time.class) { return new java.sql.Time(Varint.readUnsignedVarLong(input)); } else if (binding == java.sql.Timestamp.class) { return new java.sql.Timestamp(Varint.readUnsignedVarLong(input)); } else if (binding == java.util.Date.class) { return new java.util.Date(Varint.readUnsignedVarLong(input)); } else if (Geometry.class.isAssignableFrom(binding)) { final TWKBReader reader = new TWKBReader(); try { final int length = Varint.readUnsignedVarInt(input); final byte[] buffer = new byte[length]; input.readFully(buffer); return reader.read(buffer); } catch (final IOException | ParseException e) { throw new IOException("Failed to read the geometry WKB", e); } } else { final int length = Varint.readUnsignedVarInt(input); final byte[] buffer = new byte[length]; input.readFully(buffer); final ByteArrayInputStream bis = new ByteArrayInputStream(buffer); final ObjectInputStream ois = new ObjectInputStream(bis); try { return ois.readObject(); } catch (final ClassNotFoundException e) { throw new IOException("Could not read back object", e); } } } } private void writeObject(final java.io.ObjectOutputStream out) throws IOException { write(out); } private void readObject(final java.io.ObjectInputStream in) throws IOException, ClassNotFoundException { readFields(in); } public static final void clearCache() { FeatureTypeCache.clear(); } public static final void cache(final SimpleFeatureType featureType) { final Pair id = Pair.of( featureType.getName().getNamespaceURI() == null ? "" : featureType.getName().getNamespaceURI(), featureType.getTypeName()); FeatureTypeCache.put(id, featureType); } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/GeoWaveAvroFeatureUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.apache.avro.io.BinaryDecoder; import org.apache.avro.io.DecoderFactory; import org.apache.avro.specific.SpecificDatumReader; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.feature.simple.SimpleFeatureTypeBuilder; import org.locationtech.geowave.adapter.vector.avro.AvroAttributeValues; import org.locationtech.geowave.adapter.vector.avro.AvroFeatureDefinition; import org.locationtech.geowave.adapter.vector.avro.AvroSimpleFeature; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.geotime.util.TWKBReader; import org.locationtech.geowave.core.geotime.util.TWKBWriter; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldUtils; import org.locationtech.geowave.core.store.data.field.FieldWriter; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.io.ParseException; import org.locationtech.jts.io.WKBReader; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.AttributeDescriptor; import com.google.common.base.Preconditions; public class GeoWaveAvroFeatureUtils { private static final TWKBWriter WKB_WRITER = new TWKBWriter(); private static final DecoderFactory DECODER_FACTORY = DecoderFactory.get(); private static final SpecificDatumReader DATUM_READER = new SpecificDatumReader<>(AvroSimpleFeature.getClassSchema()); private static final TWKBReader WKB_READER = new TWKBReader(); private GeoWaveAvroFeatureUtils() {} /** * Add the attributes, types and classifications for the SimpleFeatureType to the provided * FeatureDefinition * * @param fd - existing Feature Definition (or new one if null) * @param sft - SimpleFeatureType of the simpleFeature being serialized * @param defaultClassifications - map of attribute names to classification * @param defaultClassification - default classification if one could not be found in the map * @return the feature definition * @throws IOException */ public static AvroFeatureDefinition buildFeatureDefinition( AvroFeatureDefinition fd, final SimpleFeatureType sft, final Map defaultClassifications, final String defaultClassification) throws IOException { if (fd == null) { fd = new AvroFeatureDefinition(); } fd.setFeatureTypeName(sft.getTypeName()); final List attributes = new ArrayList<>(sft.getAttributeCount()); final List types = new ArrayList<>(sft.getAttributeCount()); final List classifications = new ArrayList<>(sft.getAttributeCount()); for (final AttributeDescriptor attr : sft.getAttributeDescriptors()) { final String localName = attr.getLocalName(); attributes.add(localName); types.add(attr.getType().getBinding().getCanonicalName()); classifications.add( getClassification(localName, defaultClassifications, defaultClassification)); } fd.setAttributeNames(attributes); fd.setAttributeTypes(types); fd.setAttributeDefaultClassifications(classifications); return fd; } /** * If a classification exists for this attribute name then use it. If not, then use the provided * default classification. * * @param localName - attribute name * @param defaultClassifications - map of attribute names to classification * @param defaultClassification - default classification to use if one is not mapped for the name * provided * @return the classification * @throws IOException */ private static String getClassification( final String localName, final Map defaultClassifications, final String defaultClassification) throws IOException { String classification; if ((defaultClassifications != null) && defaultClassifications.containsKey(localName)) { classification = defaultClassifications.get(localName); } else { classification = defaultClassification; } if (classification == null) { throw new IOException( "No default classification was provided, and no classification for: '" + localName + "' was provided"); } return classification; } /** * Create an AttributeValue from the SimpleFeature's attributes * * @param sf * @param sft * @return the attribute value */ public static synchronized AvroAttributeValues buildAttributeValue( final SimpleFeature sf, final SimpleFeatureType sft) { final AvroAttributeValues attributeValue = new AvroAttributeValues(); final List values = new ArrayList<>(sft.getAttributeCount()); attributeValue.setSerializationVersion( ByteBuffer.wrap(new byte[] {FieldUtils.SERIALIZATION_VERSION})); attributeValue.setFid(sf.getID()); for (final AttributeDescriptor attr : sft.getAttributeDescriptors()) { final Object o = sf.getAttribute(attr.getLocalName()); byte[] bytes; if (o instanceof Geometry) { bytes = WKB_WRITER.write((Geometry) o); } else { final FieldWriter fw = FieldUtils.getDefaultWriterForClass(attr.getType().getBinding()); bytes = fw.writeField(o); } values.add(ByteBuffer.wrap(bytes)); } attributeValue.setValues(values); return attributeValue; } /** * * Deserialize byte array into an AvroSimpleFeature then convert to a SimpleFeature * * @param avroData serialized bytes of a AvroSimpleFeature * @return Collection of GeoTools SimpleFeature instances. * @throws IOException * @throws ClassNotFoundException * @throws ParseException */ public static synchronized SimpleFeature deserializeAvroSimpleFeature(final byte[] avroData) throws IOException, ClassNotFoundException, ParseException { // Deserialize final AvroSimpleFeature sfc = deserializeASF(avroData, null); final AvroFeatureDefinition featureDefinition = sfc.getFeatureType(); return avroSimpleFeatureToGTSimpleFeature( avroFeatureDefinitionToGTSimpleFeatureType(featureDefinition), featureDefinition.getAttributeTypes(), sfc.getValue()); } public static SimpleFeatureType avroFeatureDefinitionToGTSimpleFeatureType( final AvroFeatureDefinition featureDefinition) throws ClassNotFoundException { final SimpleFeatureTypeBuilder sftb = new SimpleFeatureTypeBuilder(); sftb.setCRS(GeometryUtils.getDefaultCRS()); sftb.setName(featureDefinition.getFeatureTypeName()); final List featureTypes = featureDefinition.getAttributeTypes(); final List featureNames = featureDefinition.getAttributeNames(); for (int i = 0; i < featureDefinition.getAttributeNames().size(); i++) { final String type = featureTypes.get(i); final String name = featureNames.get(i); final Class c = Class.forName(jtsCompatibility(type)); sftb.add(name, c); } return sftb.buildFeatureType(); } public static SimpleFeature avroSimpleFeatureToGTSimpleFeature( final SimpleFeatureType type, final List attributeTypes, final AvroAttributeValues attributeValues) throws IOException, ClassNotFoundException, ParseException { // Convert SimpleFeature simpleFeature; final SimpleFeatureBuilder sfb = new SimpleFeatureBuilder(type); // null values should still take a place in the array - check Preconditions.checkArgument(attributeTypes.size() == attributeValues.getValues().size()); final byte serializationVersion = attributeValues.getSerializationVersion().get(); WKBReader legacyReader = null; if (serializationVersion < FieldUtils.SERIALIZATION_VERSION) { legacyReader = new WKBReader(); } for (int i = 0; i < attributeValues.getValues().size(); i++) { final ByteBuffer val = attributeValues.getValues().get(i); if (attributeTypes.get(i).equals("org.locationtech.jts.geom.Geometry")) { if (serializationVersion < FieldUtils.SERIALIZATION_VERSION) { sfb.add(legacyReader.read(val.array())); } else { sfb.add(WKB_READER.read(val.array())); } } else { final FieldReader fr = FieldUtils.getDefaultReaderForClass( Class.forName(jtsCompatibility(attributeTypes.get(i)))); sfb.add(fr.readField(val.array(), serializationVersion)); } } simpleFeature = sfb.buildFeature(attributeValues.getFid()); return simpleFeature; } private static String jtsCompatibility(final String attrTypeName) { if (attrTypeName.startsWith("com.vividsolutions")) { return attrTypeName.replace("com.vividsolutions", "org.locationtech"); } return attrTypeName; } /** * * Deserialize byte stream into an AvroSimpleFeature * * @param avroData serialized bytes of AvroSimpleFeature * @param avroObjectToReuse null or AvroSimpleFeature instance to be re-used. If null a new object * will be allocated. * @return instance of AvroSimpleFeature with values parsed from avroData * @throws IOException */ private static AvroSimpleFeature deserializeASF( final byte[] avroData, AvroSimpleFeature avroObjectToReuse) throws IOException { final BinaryDecoder decoder = DECODER_FACTORY.binaryDecoder(avroData, null); if (avroObjectToReuse == null) { avroObjectToReuse = new AvroSimpleFeature(); } DATUM_READER.setSchema(avroObjectToReuse.getSchema()); return DATUM_READER.read(avroObjectToReuse, decoder); } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/cli/VectorCLIProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.cli; import org.locationtech.geowave.adapter.vector.delete.CQLDelete; import org.locationtech.geowave.adapter.vector.export.VectorLocalExportCommand; import org.locationtech.geowave.adapter.vector.export.VectorMRExportCommand; import org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi; public class VectorCLIProvider implements CLIOperationProviderSpi { private static final Class[] OPERATIONS = new Class[] { VectorSection.class, VectorLocalExportCommand.class, VectorMRExportCommand.class, CQLDelete.class}; @Override public Class[] getOperations() { return OPERATIONS; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/cli/VectorSection.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.cli; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.operations.GeoWaveTopLevelSection; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "vector", parentOperation = GeoWaveTopLevelSection.class) @Parameters(commandDescription = "Vector data operations") public class VectorSection extends DefaultOperation { } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/delete/CQLDelete.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.delete; import java.util.ArrayList; import java.util.List; import org.apache.commons.cli.ParseException; import org.apache.commons.lang3.time.StopWatch; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.core.config.Configurator; import org.locationtech.geowave.adapter.vector.cli.VectorSection; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.Command; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.Query; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.opengis.feature.simple.SimpleFeature; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.IStringConverter; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "cqldelete", parentOperation = VectorSection.class) @Parameters(commandDescription = "Delete data that matches a CQL filter") public class CQLDelete extends DefaultOperation implements Command { private static Logger LOGGER = LoggerFactory.getLogger(CQLDelete.class); @Parameter(description = "") private List parameters = new ArrayList<>(); @Parameter(names = "--cql", required = true, description = "CQL Filter for delete") private String cqlStr; @Parameter( names = "--indexName", required = false, description = "The name of the index (optional)", converter = StringToByteArrayConverter.class) private String indexName; @Parameter( names = "--typeName", required = false, description = "Optional ability to provide a type name for the data adapter", converter = StringToByteArrayConverter.class) private String typeName; @Parameter( names = "--debug", required = false, description = "Print out additional info for debug purposes") private boolean debug = false; public void setParameters(final List parameters) { this.parameters = parameters; } public void setDebug(final boolean debug) { this.debug = debug; } @Override public void execute(final OperationParams params) throws ParseException { if (debug) { Configurator.setLevel(LogManager.getRootLogger().getName(), Level.DEBUG); } final StopWatch stopWatch = new StopWatch(); // Ensure we have all the required arguments if (parameters.size() != 1) { throw new ParameterException("Requires arguments: "); } final String storeName = parameters.get(0); // Attempt to load store. final DataStorePluginOptions storeOptions = CLIUtils.loadStore(storeName, getGeoWaveConfigFile(params), params.getConsole()); final DataStore dataStore = storeOptions.createDataStore(); final PersistentAdapterStore adapterStore = storeOptions.createAdapterStore(); final InternalAdapterStore internalAdapterStore = storeOptions.createInternalAdapterStore(); final GeotoolsFeatureDataAdapter adapter; if (typeName != null) { adapter = (GeotoolsFeatureDataAdapter) adapterStore.getAdapter( internalAdapterStore.getAdapterId(typeName)).getAdapter(); } else { final InternalDataAdapter[] adapters = adapterStore.getAdapters(); adapter = (GeotoolsFeatureDataAdapter) adapters[0].getAdapter(); } if (debug && (adapter != null)) { LOGGER.debug(adapter.toString()); } stopWatch.start(); final long results = delete(adapter, typeName, indexName, dataStore, debug); stopWatch.stop(); if (debug) { LOGGER.debug(results + " results remaining after delete; time = " + stopWatch.toString()); } } protected long delete( final GeotoolsFeatureDataAdapter adapter, final String typeName, final String indexName, final DataStore dataStore, final boolean debug) { long missed = 0; final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder(); final Query query = bldr.addTypeName(typeName).indexName(indexName).constraints( bldr.constraintsFactory().cqlConstraints(cqlStr)).build(); final boolean success = dataStore.delete(query); if (debug) { LOGGER.debug("CQL Delete " + (success ? "Success" : "Failure")); } // Verify delete by running the CQL query if (debug) { try (final CloseableIterator it = dataStore.query(query)) { while (it.hasNext()) { it.next(); missed++; } } } return missed; } public static class StringToByteArrayConverter implements IStringConverter { @Override public ByteArray convert(final String value) { return new ByteArray(value); } } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/export/VectorExportMapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.export; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.avro.mapred.AvroKey; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapreduce.Mapper; import org.locationtech.geowave.adapter.vector.GeoWaveAvroFeatureUtils; import org.locationtech.geowave.adapter.vector.avro.AvroAttributeValues; import org.locationtech.geowave.adapter.vector.avro.AvroSimpleFeatureCollection; import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class VectorExportMapper extends Mapper, NullWritable> { private static final Logger LOGGER = LoggerFactory.getLogger(Logger.class); private int batchSize; private final Map adapterIdToAvroWriterMap = new HashMap<>(); private final NullWritable outVal = NullWritable.get(); private final AvroKey outKey = new AvroKey<>(); @Override protected void map( final GeoWaveInputKey key, final SimpleFeature value, final Mapper, NullWritable>.Context context) throws IOException, InterruptedException { AvroSFCWriter avroWriter = adapterIdToAvroWriterMap.get(key.getInternalAdapterId()); if (avroWriter == null) { avroWriter = new AvroSFCWriter(value.getFeatureType(), batchSize); adapterIdToAvroWriterMap.put(key.getInternalAdapterId(), avroWriter); } final AvroSimpleFeatureCollection retVal = avroWriter.write(value); if (retVal != null) { outKey.datum(retVal); context.write(outKey, outVal); } } @Override protected void setup( final Mapper, NullWritable>.Context context) throws IOException, InterruptedException { super.setup(context); batchSize = context.getConfiguration().getInt( VectorMRExportJobRunner.BATCH_SIZE_KEY, VectorExportOptions.DEFAULT_BATCH_SIZE); } @Override protected void cleanup( final Mapper, NullWritable>.Context context) throws IOException, InterruptedException { super.cleanup(context); writeRemainingAvroBatches(context); } private void writeRemainingAvroBatches( final Mapper, NullWritable>.Context context) throws IOException, InterruptedException { for (final AvroSFCWriter writer : adapterIdToAvroWriterMap.values()) { if (writer.avList.size() > 0) { writer.simpleFeatureCollection.setSimpleFeatureCollection(writer.avList); outKey.datum(writer.simpleFeatureCollection); context.write(outKey, outVal); } } } private static class AvroSFCWriter { private final int batchSize; private final SimpleFeatureType sft; private AvroSimpleFeatureCollection simpleFeatureCollection = null; private List avList = null; private AvroSFCWriter(final SimpleFeatureType sft, final int batchSize) { this.sft = sft; this.batchSize = batchSize; } private AvroSimpleFeatureCollection write(final SimpleFeature feature) { AvroSimpleFeatureCollection retVal = null; if (simpleFeatureCollection == null) { newFeatureCollection(); } else if (avList.size() >= batchSize) { simpleFeatureCollection.setSimpleFeatureCollection(avList); retVal = simpleFeatureCollection; newFeatureCollection(); } final AvroAttributeValues av = GeoWaveAvroFeatureUtils.buildAttributeValue(feature, sft); avList.add(av); return retVal; } // this isn't intended to be thread safe private void newFeatureCollection() { simpleFeatureCollection = new AvroSimpleFeatureCollection(); try { simpleFeatureCollection.setFeatureType( GeoWaveAvroFeatureUtils.buildFeatureDefinition(null, sft, null, "")); } catch (final IOException e) { // this should never actually happen, deault classification is // passed in LOGGER.warn("Unable to find classification", e); } avList = new ArrayList<>(batchSize); } } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/export/VectorExportOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.export; import java.util.List; import com.beust.jcommander.Parameter; public class VectorExportOptions { protected static final int DEFAULT_BATCH_SIZE = 10000; @Parameter(names = "--cqlFilter", description = "Filter exported data based on CQL filter") private String cqlFilter; @Parameter(names = "--typeNames", description = "Comma separated list of type names") private List typeNames; @Parameter(names = "--indexName", description = "The index to export from") private String indexName; @Parameter(names = "--batchSize", description = "Records to process at a time") private int batchSize = DEFAULT_BATCH_SIZE; public String getCqlFilter() { return cqlFilter; } public List getTypeNames() { return typeNames; } public String getIndexName() { return indexName; } public int getBatchSize() { return batchSize; } public void setCqlFilter(final String cqlFilter) { this.cqlFilter = cqlFilter; } public void setTypeNames(final List typeNames) { this.typeNames = typeNames; } public void setIndexName(final String indexName) { this.indexName = indexName; } public void setBatchSize(final int batchSize) { this.batchSize = batchSize; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/export/VectorLocalExportCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.export; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.avro.file.CodecFactory; import org.apache.avro.file.DataFileWriter; import org.apache.avro.generic.GenericDatumWriter; import org.geotools.filter.text.cql2.CQLException; import org.locationtech.geowave.adapter.vector.GeoWaveAvroFeatureUtils; import org.locationtech.geowave.adapter.vector.avro.AvroAttributeValues; import org.locationtech.geowave.adapter.vector.avro.AvroSimpleFeatureCollection; import org.locationtech.geowave.adapter.vector.cli.VectorSection; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.Command; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.index.IndexStore; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import com.beust.jcommander.ParametersDelegate; @GeowaveOperation(name = "localexport", parentOperation = VectorSection.class) @Parameters(commandDescription = "Export data directly to Avro file") public class VectorLocalExportCommand extends DefaultOperation implements Command { @Parameter(description = "") private List parameters = new ArrayList<>(); @ParametersDelegate private VectorLocalExportOptions options = new VectorLocalExportOptions(); private DataStorePluginOptions inputStoreOptions = null; @Override public void execute(final OperationParams params) throws IOException, CQLException { // Ensure we have all the required arguments if (parameters.size() != 1) { throw new ParameterException("Requires arguments: "); } final String storeName = parameters.get(0); // Attempt to load store. inputStoreOptions = CLIUtils.loadStore(storeName, getGeoWaveConfigFile(params), params.getConsole()); final PersistentAdapterStore adapterStore = inputStoreOptions.createAdapterStore(); final IndexStore indexStore = inputStoreOptions.createIndexStore(); final DataStore dataStore = inputStoreOptions.createDataStore(); final InternalAdapterStore internalAdapterStore = inputStoreOptions.createInternalAdapterStore(); try (final DataFileWriter dfw = new DataFileWriter<>( new GenericDatumWriter( AvroSimpleFeatureCollection.SCHEMA$))) { dfw.setCodec(CodecFactory.snappyCodec()); dfw.create(AvroSimpleFeatureCollection.SCHEMA$, options.getOutputFile()); // get appropriate feature adapters final List featureAdapters = new ArrayList<>(); if ((options.getTypeNames() != null) && (options.getTypeNames().size() > 0)) { for (final String typeName : options.getTypeNames()) { final short adapterId = internalAdapterStore.getAdapterId(typeName); final InternalDataAdapter internalDataAdapter = adapterStore.getAdapter(adapterId); if (internalDataAdapter == null) { params.getConsole().println("Type '" + typeName + "' not found"); continue; } else if (!(internalDataAdapter.getAdapter() instanceof GeotoolsFeatureDataAdapter)) { params.getConsole().println( "Type '" + typeName + "' does not support vector export. Instance of " + internalDataAdapter.getAdapter().getClass()); continue; } featureAdapters.add((GeotoolsFeatureDataAdapter) internalDataAdapter.getAdapter()); } } else { final InternalDataAdapter[] adapters = adapterStore.getAdapters(); for (final InternalDataAdapter adapter : adapters) { if (adapter.getAdapter() instanceof GeotoolsFeatureDataAdapter) { featureAdapters.add((GeotoolsFeatureDataAdapter) adapter.getAdapter()); } } } if (featureAdapters.isEmpty()) { params.getConsole().println("Unable to find any vector data types in store"); } Index queryIndex = null; if (options.getIndexName() != null) { queryIndex = indexStore.getIndex(options.getIndexName()); if (queryIndex == null) { params.getConsole().println( "Unable to find index '" + options.getIndexName() + "' in store"); return; } } for (final GeotoolsFeatureDataAdapter adapter : featureAdapters) { params.getConsole().println("Exporting type '" + adapter.getTypeName() + "'"); final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder(); if (options.getIndexName() != null) { bldr.indexName(options.getIndexName()); } if (options.getCqlFilter() != null) { bldr.constraints(bldr.constraintsFactory().cqlConstraints(options.getCqlFilter())); } bldr.addTypeName(adapter.getTypeName()); try (final CloseableIterator it = dataStore.query(bldr.build())) { int iteration = 0; while (it.hasNext()) { final AvroSimpleFeatureCollection simpleFeatureCollection = new AvroSimpleFeatureCollection(); final SimpleFeature next = it.next(); final SimpleFeatureType featureType = next.getFeatureType(); simpleFeatureCollection.setFeatureType( GeoWaveAvroFeatureUtils.buildFeatureDefinition(null, featureType, null, "")); final List avList = new ArrayList<>(options.getBatchSize()); avList.add(GeoWaveAvroFeatureUtils.buildAttributeValue(next, featureType)); while (it.hasNext() && (avList.size() < options.getBatchSize())) { avList.add(GeoWaveAvroFeatureUtils.buildAttributeValue(it.next(), featureType)); } params.getConsole().println( "Exported " + (avList.size() + (iteration * options.getBatchSize())) + " features from '" + adapter.getTypeName() + "'"); iteration++; simpleFeatureCollection.setSimpleFeatureCollection(avList); dfw.append(simpleFeatureCollection); dfw.flush(); } params.getConsole().println("Finished exporting '" + adapter.getTypeName() + "'"); } } } } public List getParameters() { return parameters; } public void setParameters(final String storeName) { parameters = new ArrayList<>(); parameters.add(storeName); } public DataStorePluginOptions getInputStoreOptions() { return inputStoreOptions; } public void setOptions(final VectorLocalExportOptions options) { this.options = options; } public VectorLocalExportOptions getOptions() { return options; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/export/VectorLocalExportOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.export; import java.io.File; import com.beust.jcommander.Parameter; public class VectorLocalExportOptions extends VectorExportOptions { @Parameter(names = "--outputFile", required = true) private File outputFile; public File getOutputFile() { return outputFile; } public void setOutputFile(final File outputFile) { this.outputFile = outputFile; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/export/VectorMRExportCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.export; import java.io.File; import java.util.ArrayList; import java.util.List; import java.util.Properties; import org.locationtech.geowave.adapter.vector.cli.VectorSection; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.Command; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.mapreduce.operations.ConfigHDFSCommand; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import com.beust.jcommander.ParametersDelegate; @GeowaveOperation(name = "mrexport", parentOperation = VectorSection.class) @Parameters(commandDescription = "Export data using MapReduce") public class VectorMRExportCommand extends DefaultOperation implements Command { @Parameter(description = " ") private List parameters = new ArrayList<>(); @ParametersDelegate private VectorMRExportOptions mrOptions = new VectorMRExportOptions(); private DataStorePluginOptions storeOptions = null; @Override public void execute(final OperationParams params) throws Exception { createRunner(params).runJob(); } public VectorMRExportJobRunner createRunner(final OperationParams params) { // Ensure we have all the required arguments if (parameters.size() != 2) { throw new ParameterException( "Requires arguments: "); } final String hdfsPath = parameters.get(0); final String storeName = parameters.get(1); // Config file final File configFile = getGeoWaveConfigFile(params); final Properties configProperties = ConfigOptions.loadProperties(configFile); final String hdfsHostPort = ConfigHDFSCommand.getHdfsUrl(configProperties); // Attempt to load store. if (storeOptions == null) { storeOptions = CLIUtils.loadStore(storeName, configFile, params.getConsole()); } final VectorMRExportJobRunner vectorRunner = new VectorMRExportJobRunner( storeOptions, mrOptions, hdfsHostPort, hdfsPath, params.getConsole()); return vectorRunner; } public List getParameters() { return parameters; } public void setParameters(final String hdfsPath, final String storeName) { parameters = new ArrayList<>(); parameters.add(hdfsPath); parameters.add(storeName); } public VectorMRExportOptions getMrOptions() { return mrOptions; } public void setMrOptions(final VectorMRExportOptions mrOptions) { this.mrOptions = mrOptions; } public DataStorePluginOptions getStoreOptions() { return storeOptions; } public void setStoreOptions(final DataStorePluginOptions storeOptions) { this.storeOptions = storeOptions; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/export/VectorMRExportJobRunner.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.export; import java.io.IOException; import java.util.List; import org.apache.avro.mapred.AvroKey; import org.apache.avro.mapreduce.AvroJob; import org.apache.avro.mapreduce.AvroKeyOutputFormat; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.geotools.filter.text.cql2.CQLException; import org.locationtech.geowave.adapter.vector.avro.AvroSimpleFeatureCollection; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.cli.parser.CommandLineOperationParams; import org.locationtech.geowave.core.cli.parser.OperationParser; import org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.mapreduce.GeoWaveConfiguratorBase; import org.locationtech.geowave.mapreduce.input.GeoWaveInputFormat; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.internal.Console; public class VectorMRExportJobRunner extends Configured implements Tool { private static final Logger LOGGER = LoggerFactory.getLogger(VectorMRExportCommand.class); public static final String BATCH_SIZE_KEY = "BATCH_SIZE"; private final DataStorePluginOptions storeOptions; private final VectorMRExportOptions mrOptions; private final String hdfsHostPort; private final String hdfsPath; private final Console console; public VectorMRExportJobRunner( final DataStorePluginOptions storeOptions, final VectorMRExportOptions mrOptions, final String hdfsHostPort, final String hdfsPath, final Console console) { this.storeOptions = storeOptions; this.mrOptions = mrOptions; this.hdfsHostPort = hdfsHostPort; this.hdfsPath = hdfsPath; this.console = console; } /** Main method to execute the MapReduce analytic. */ public int runJob() throws CQLException, IOException, InterruptedException, ClassNotFoundException { Configuration conf = super.getConf(); if (conf == null) { conf = new Configuration(); setConf(conf); } GeoWaveConfiguratorBase.setRemoteInvocationParams( hdfsHostPort, mrOptions.getResourceManagerHostPort(), conf); final List typeNames = mrOptions.getTypeNames(); final PersistentAdapterStore adapterStore = storeOptions.createAdapterStore(); final InternalAdapterStore internalAdapterStore = storeOptions.createInternalAdapterStore(); final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder(); if ((typeNames != null) && (typeNames.size() > 0)) { bldr.setTypeNames(typeNames.toArray(new String[0])); // options.setAdapters(Lists.transform( // typeNames, // new Function>() { // // @Override // public DataTypeAdapter apply( // final String input ) { // Short internalAdpaterId = // internalAdapterStore.getInternalAdapterId(new ByteArrayId( // input)); // return adapterStore.getAdapter(internalAdpaterId); // } // })); } conf.setInt(BATCH_SIZE_KEY, mrOptions.getBatchSize()); final IndexStore indexStore = storeOptions.createIndexStore(); if (mrOptions.getIndexName() != null) { final Index index = indexStore.getIndex(mrOptions.getIndexName()); if (index == null) { console.println("Unable to find index '" + mrOptions.getIndexName() + "' in store"); return -1; } bldr.indexName(mrOptions.getIndexName()); } if (mrOptions.getCqlFilter() != null) { if ((typeNames == null) || (typeNames.size() != 1)) { console.println("Exactly one type is expected when using CQL filter"); return -1; } final String typeName = typeNames.get(0); final Short internalAdpaterId = internalAdapterStore.getAdapterId(typeName); final InternalDataAdapter adapter = storeOptions.createAdapterStore().getAdapter(internalAdpaterId); if (adapter == null) { console.println("Type '" + typeName + "' not found"); return -1; } if (!(adapter.getAdapter() instanceof GeotoolsFeatureDataAdapter)) { console.println("Type '" + typeName + "' does not support vector export"); return -1; } bldr.constraints(bldr.constraintsFactory().cqlConstraints(mrOptions.getCqlFilter())); } GeoWaveInputFormat.setStoreOptions(conf, storeOptions); // the above code is a temporary placeholder until this gets merged with // the new commandline options GeoWaveInputFormat.setQuery(conf, bldr.build(), adapterStore, internalAdapterStore, indexStore); final Job job = new Job(conf); job.setJarByClass(this.getClass()); job.setJobName("Exporting to " + hdfsPath); FileOutputFormat.setCompressOutput(job, true); FileOutputFormat.setOutputPath(job, new Path(hdfsPath)); job.setMapperClass(VectorExportMapper.class); job.setInputFormatClass(GeoWaveInputFormat.class); job.setOutputFormatClass(AvroKeyOutputFormat.class); job.setMapOutputKeyClass(AvroKey.class); job.setMapOutputValueClass(NullWritable.class); job.setOutputKeyClass(AvroKey.class); job.setOutputValueClass(NullWritable.class); job.setNumReduceTasks(0); AvroJob.setOutputKeySchema(job, AvroSimpleFeatureCollection.SCHEMA$); AvroJob.setMapOutputKeySchema(job, AvroSimpleFeatureCollection.SCHEMA$); GeoWaveInputFormat.setMinimumSplitCount(job.getConfiguration(), mrOptions.getMinSplits()); GeoWaveInputFormat.setMaximumSplitCount(job.getConfiguration(), mrOptions.getMaxSplits()); boolean retVal = false; try { retVal = job.waitForCompletion(true); } catch (final IOException ex) { LOGGER.error("Error waiting for map reduce tile resize job: ", ex); } return retVal ? 0 : 1; } public static void main(final String[] args) throws Exception { final ConfigOptions opts = new ConfigOptions(); final OperationParser parser = new OperationParser(); parser.addAdditionalObject(opts); final VectorMRExportCommand command = new VectorMRExportCommand(); final CommandLineOperationParams params = parser.parse(command, args); opts.prepare(params); final int res = ToolRunner.run(new Configuration(), command.createRunner(params), args); System.exit(res); } @Override public int run(final String[] args) throws Exception { return runJob(); } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/export/VectorMRExportOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.export; import com.beust.jcommander.Parameter; public class VectorMRExportOptions extends VectorExportOptions { @Parameter(names = "--resourceManagerHostPort") private String resourceManagerHostPort; @Parameter(names = "--minSplits", description = "The min partitions for the input data") private Integer minSplits; @Parameter(names = "--maxSplits", description = "The max partitions for the input data") private Integer maxSplits; public Integer getMinSplits() { return minSplits; } public Integer getMaxSplits() { return maxSplits; } public String getResourceManagerHostPort() { return resourceManagerHostPort; } public void setResourceManagerHostPort(final String resourceManagerHostPort) { this.resourceManagerHostPort = resourceManagerHostPort; } public void setMinSplits(final Integer minSplits) { this.minSplits = minSplits; } public void setMaxSplits(final Integer maxSplits) { this.maxSplits = maxSplits; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/field/SimpleFeatureSerializationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.field; import java.io.ByteArrayOutputStream; import java.io.DataOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.data.field.FieldReader; import org.locationtech.geowave.core.store.data.field.FieldUtils; import org.locationtech.geowave.core.store.data.field.FieldWriter; import org.opengis.feature.simple.SimpleFeatureType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class SimpleFeatureSerializationProvider { private static final Logger LOGGER = LoggerFactory.getLogger(SimpleFeatureSerializationProvider.class); public static class WholeFeatureReader implements FieldReader { SimpleFeatureType type; public WholeFeatureReader(final SimpleFeatureType type) { super(); this.type = type; } @Override public byte[][] readField(final byte[] fieldData) { if (fieldData == null) { return null; } final ByteBuffer input = ByteBuffer.wrap(fieldData); final int attrCnt = type.getAttributeCount(); final byte[][] retVal = new byte[attrCnt][]; for (int i = 0; i < attrCnt; i++) { final int byteLength = VarintUtils.readSignedInt(input); if (byteLength < 0) { retVal[i] = null; continue; } final byte[] fieldValue = ByteArrayUtils.safeRead(input, byteLength); retVal[i] = fieldValue; } return retVal; } } public static class WholeFeatureWriter implements FieldWriter { public WholeFeatureWriter() { super(); } @Override public byte[] writeField(final Object[] fieldValue) { if (fieldValue == null) { return new byte[] {}; } final ByteArrayOutputStream baos = new ByteArrayOutputStream(); final DataOutputStream output = new DataOutputStream(baos); try { for (final Object attr : fieldValue) { ByteBuffer lengthBytes; if (attr == null) { lengthBytes = ByteBuffer.allocate(VarintUtils.signedIntByteLength(-1)); VarintUtils.writeSignedInt(-1, lengthBytes); output.write(lengthBytes.array()); continue; } final FieldWriter writer = FieldUtils.getDefaultWriterForClass(attr.getClass()); final byte[] binary = writer.writeField(attr); lengthBytes = ByteBuffer.allocate(VarintUtils.signedIntByteLength(binary.length)); VarintUtils.writeSignedInt(binary.length, lengthBytes); output.write(lengthBytes.array()); output.write(binary); } output.close(); } catch (final IOException e) { LOGGER.error("Unable to write to output", e); } return baos.toByteArray(); } } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/index/ChooseBestMatchIndexQueryStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.index; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import org.locationtech.geowave.core.index.IndexUtils; import org.locationtech.geowave.core.index.QueryRanges; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.IndexStatistic; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.locationtech.geowave.core.store.statistics.binning.CompositeBinningStrategy; import org.locationtech.geowave.core.store.statistics.binning.DataTypeBinningStrategy; import org.locationtech.geowave.core.store.statistics.binning.PartitionBinningStrategy; import org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic; import org.locationtech.geowave.core.store.util.DataStoreUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ChooseBestMatchIndexQueryStrategy implements IndexQueryStrategySPI { public static final String NAME = "Best Match"; private static final Logger LOGGER = LoggerFactory.getLogger(ChooseBestMatchIndexQueryStrategy.class); @Override public String toString() { return NAME; } @Override public CloseableIterator getIndices( final DataStatisticsStore statisticsStore, final AdapterIndexMappingStore mappingStore, final QueryConstraints query, final Index[] indices, final InternalDataAdapter adapter, final Map hints) { return new CloseableIterator() { Index nextIdx = null; boolean done = false; int i = 0; @Override public boolean hasNext() { long min = Long.MAX_VALUE; Index bestIdx = null; while (!done && (i < indices.length)) { nextIdx = indices[i++]; if (nextIdx.getIndexStrategy().getOrderedDimensionDefinitions().length == 0) { continue; } final List constraints = query.getIndexConstraints(nextIdx); RowRangeHistogramStatistic rowRangeHistogramStatistic = null; try (CloseableIterator>> stats = statisticsStore.getIndexStatistics( nextIdx, RowRangeHistogramStatistic.STATS_TYPE, Statistic.INTERNAL_TAG)) { if (stats.hasNext()) { final Statistic statistic = stats.next(); if ((statistic instanceof RowRangeHistogramStatistic) && (statistic.getBinningStrategy() instanceof CompositeBinningStrategy) && ((CompositeBinningStrategy) statistic.getBinningStrategy()).isOfType( DataTypeBinningStrategy.class, PartitionBinningStrategy.class)) { rowRangeHistogramStatistic = (RowRangeHistogramStatistic) statistic; } } } if (rowRangeHistogramStatistic == null) { LOGGER.warn( "Best Match Heuristic requires statistic RowRangeHistogramStatistics for each index to properly choose an index."); } if (IndexUtils.isFullTableScan(constraints)) { // keep this is as a default in case all indices // result in a full table scan if (bestIdx == null) { bestIdx = nextIdx; } } else { final int maxRangeDecomposition; if (hints.containsKey(QueryHint.MAX_RANGE_DECOMPOSITION)) { maxRangeDecomposition = (Integer) hints.get(QueryHint.MAX_RANGE_DECOMPOSITION); } else { LOGGER.warn( "No max range decomposition hint was provided, this should be provided from the data store options"); maxRangeDecomposition = 2000; } final QueryRanges ranges = DataStoreUtils.constraintsToQueryRanges( constraints, nextIdx, null, maxRangeDecomposition); final long temp = DataStoreUtils.cardinality( statisticsStore, rowRangeHistogramStatistic, adapter, nextIdx, ranges); if (temp < min) { bestIdx = nextIdx; min = temp; } } } nextIdx = bestIdx; done = true; return nextIdx != null; } @Override public Index next() throws NoSuchElementException { if (nextIdx == null) { throw new NoSuchElementException(); } final Index returnVal = nextIdx; nextIdx = null; return returnVal; } @Override public void remove() {} @Override public void close() {} }; } @Override public boolean requiresStats() { return true; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/index/ChooseHeuristicMatchIndexQueryStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.index; import java.util.Map; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.base.BaseDataStoreUtils; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import com.google.common.collect.Iterators; /** * This Query Strategy chooses the index that satisfies the most dimensions of the underlying query * first and then if multiple are found it will choose the one that most closely preserves locality. * It won't be optimized for a single prefix query but it will choose the index with the most * dimensions defined, enabling more fine-grained contraints given a larger set of indexable ranges. */ public class ChooseHeuristicMatchIndexQueryStrategy implements IndexQueryStrategySPI { public static final String NAME = "Heuristic Match"; @Override public String toString() { return NAME; } @Override public CloseableIterator getIndices( final DataStatisticsStore statisticsStore, final AdapterIndexMappingStore indexMappingStore, final QueryConstraints query, final Index[] indices, final InternalDataAdapter adapter, final Map hints) { return new CloseableIterator.Wrapper<>( Iterators.singletonIterator( BaseDataStoreUtils.chooseBestIndex(indices, query, adapter, indexMappingStore))); } @Override public boolean requiresStats() { return false; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/index/ChooseLocalityPreservingQueryStrategy.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.index; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import org.locationtech.geowave.core.index.IndexUtils; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; /** * This Query Strategy purely chooses the index that most closely preserves locality given a query. * It will behave the best assuming a single prefix query but because it doesn't always choose the * index with the most dimensions defined, it will not always have the most fine-grained contraints * given a larger set of indexable ranges. */ public class ChooseLocalityPreservingQueryStrategy implements IndexQueryStrategySPI { public static final String NAME = "Preserve Locality"; @Override public String toString() { return NAME; } @Override public CloseableIterator getIndices( final DataStatisticsStore statisticsStore, final AdapterIndexMappingStore mappingStore, final QueryConstraints query, final Index[] indices, final InternalDataAdapter adapter, final Map hints) { return new CloseableIterator() { Index nextIdx = null; boolean done = false; int i = 0; @Override public boolean hasNext() { double indexMax = -1; Index bestIdx = null; while (!done && (i < indices.length)) { nextIdx = indices[i++]; if (nextIdx.getIndexStrategy().getOrderedDimensionDefinitions().length == 0) { continue; } final List queryRanges = query.getIndexConstraints(nextIdx); if (IndexUtils.isFullTableScan(queryRanges)) { // keep this is as a default in case all indices // result in a full table scan if (bestIdx == null) { bestIdx = nextIdx; } } else { double totalMax = 0; for (final MultiDimensionalNumericData qr : queryRanges) { final double[] dataRangePerDimension = new double[qr.getDimensionCount()]; for (int d = 0; d < dataRangePerDimension.length; d++) { dataRangePerDimension[d] = qr.getMaxValuesPerDimension()[d] - qr.getMinValuesPerDimension()[d]; } totalMax += IndexUtils.getDimensionalBitsUsed( nextIdx.getIndexStrategy(), dataRangePerDimension); } if (totalMax > indexMax) { indexMax = totalMax; bestIdx = nextIdx; } } } nextIdx = bestIdx; done = true; return nextIdx != null; } @Override public Index next() throws NoSuchElementException { if (nextIdx == null) { throw new NoSuchElementException(); } final Index returnVal = nextIdx; nextIdx = null; return returnVal; } @Override public void remove() {} @Override public void close() {} }; } @Override public boolean requiresStats() { return false; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/index/IndexQueryStrategySPI.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.index; import java.util.Map; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; public interface IndexQueryStrategySPI { public enum QueryHint { MAX_RANGE_DECOMPOSITION } boolean requiresStats(); CloseableIterator getIndices( DataStatisticsStore statisticsStore, AdapterIndexMappingStore indexMappingStore, QueryConstraints query, Index[] indices, InternalDataAdapter adapter, Map hints); } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/index/SimpleFeaturePrimaryIndexConfiguration.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.index; import java.util.Arrays; import java.util.Collections; import java.util.List; import org.locationtech.geowave.core.geotime.util.SimpleFeatureUserDataConfiguration; import org.locationtech.geowave.core.index.StringUtils; import org.opengis.feature.simple.SimpleFeatureType; public class SimpleFeaturePrimaryIndexConfiguration implements SimpleFeatureUserDataConfiguration, java.io.Serializable { private static final long serialVersionUID = -7425830022998223202L; public static final String INDEX_NAME = "PrimaryIndexName"; private List indexNames = null; public SimpleFeaturePrimaryIndexConfiguration() { super(); } public SimpleFeaturePrimaryIndexConfiguration(final SimpleFeatureType type) { super(); configureFromType(type); } /** * Get all the index names associated with the SimpleFeatureType referenced. * * @param type SFT object which contains Index Names * @return List of index names */ public static final List getIndexNames(final SimpleFeatureType type) { final Object obj = type.getUserData().get(INDEX_NAME); if (obj != null) { return Arrays.asList(obj.toString().split(",")); } return Collections.emptyList(); } /** * {@inheritDoc} This method updates the passed in type by adding a CSV string of all the index * names for this Simple Feature Primary Index Configuration. It is stored in user data as * '{@value #INDEX_NAME}' * * @param type SFT to be updated. */ @Override public void updateType(final SimpleFeatureType type) { final StringBuffer names = new StringBuffer(); if (indexNames == null) { return; } for (final String name : indexNames) { if (names.length() > 0) { names.append(","); } names.append(name); } type.getUserData().put(INDEX_NAME, names.toString()); } @Override public void configureFromType(final SimpleFeatureType type) { indexNames = getIndexNames(type); } public List getIndexNames() { return indexNames; } public void setIndexNames(final List indexNames) { this.indexNames = indexNames; } @Override public byte[] toBinary() { return StringUtils.stringsToBinary(indexNames.toArray(new String[0])); } @Override public void fromBinary(final byte[] bytes) { indexNames = Arrays.asList(StringUtils.stringsFromBinary(bytes)); } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/index/SimpleFeatureSecondaryIndexConfiguration.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.index; import java.util.Set; import org.locationtech.geowave.core.geotime.util.SimpleFeatureUserDataConfiguration; import com.fasterxml.jackson.annotation.JsonIgnore; public interface SimpleFeatureSecondaryIndexConfiguration extends SimpleFeatureUserDataConfiguration { @JsonIgnore public String getIndexKey(); public Set getAttributes(); } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/index/VectorTextIndexEntryConverter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.index; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.text.TextIndexEntryConverter; import org.opengis.feature.simple.SimpleFeature; public class VectorTextIndexEntryConverter implements TextIndexEntryConverter { private int attributeIndex; public VectorTextIndexEntryConverter() { super(); } public VectorTextIndexEntryConverter(final int attributeIndex) { super(); this.attributeIndex = attributeIndex; } @Override public byte[] toBinary() { return VarintUtils.writeUnsignedInt(attributeIndex); } @Override public void fromBinary(final byte[] bytes) { attributeIndex = VarintUtils.readUnsignedInt(ByteBuffer.wrap(bytes)); } @Override public String apply(final SimpleFeature t) { return (String) t.getAttribute(attributeIndex); } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/ingest/AbstractSimpleFeatureIngestFormat.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.ingest; import org.locationtech.geowave.core.ingest.avro.GeoWaveAvroFormatPlugin; import org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestFromHdfsPlugin; import org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi; import org.locationtech.geowave.core.store.ingest.IngestFormatOptions; import org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin; import org.opengis.feature.simple.SimpleFeature; import com.beust.jcommander.ParametersDelegate; public abstract class AbstractSimpleFeatureIngestFormat implements IngestFormatPluginProviderSpi { protected final SerializableSimpleFeatureIngestOptions myOptions = new SerializableSimpleFeatureIngestOptions(); private AbstractSimpleFeatureIngestPlugin getInstance(final IngestFormatOptions options) { final AbstractSimpleFeatureIngestPlugin myInstance = newPluginInstance(options); myInstance.setFilterProvider(myOptions.getCqlFilterOptionProvider()); myInstance.setTypeNameProvider(myOptions.getTypeNameOptionProvider()); myInstance.setSerializationFormatProvider(myOptions.getSerializationFormatOptionProvider()); myInstance.setGeometrySimpOptionProvider(myOptions.getGeometrySimpOptionProvider()); return myInstance; } protected abstract AbstractSimpleFeatureIngestPlugin newPluginInstance( IngestFormatOptions options); @Override public GeoWaveAvroFormatPlugin createAvroFormatPlugin( final IngestFormatOptions options) { return getInstance(options); } @Override public IngestFromHdfsPlugin createIngestFromHdfsPlugin( final IngestFormatOptions options) { return getInstance(options); } @Override public LocalFileIngestPlugin createLocalFileIngestPlugin( final IngestFormatOptions options) { return getInstance(options); } /** * Create an options instance. We may want to change this code from a singleton instance to * actually allow multiple instances per format. */ @Override public IngestFormatOptions createOptionsInstances() { myOptions.setPluginOptions(internalGetIngestFormatOptionProviders()); return myOptions; } protected Object internalGetIngestFormatOptionProviders() { return null; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/ingest/AbstractSimpleFeatureIngestPlugin.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.ingest; import java.net.URL; import java.nio.ByteBuffer; import java.util.Iterator; import org.apache.commons.lang.ArrayUtils; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.ingest.avro.GeoWaveAvroFormatPlugin; import org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestFromHdfsPlugin; import org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestWithMapper; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.CloseableIteratorWrapper; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.ingest.GeoWaveData; import org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin; import org.locationtech.jts.geom.Geometry; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import com.google.common.base.Predicate; import com.google.common.collect.Iterators; public abstract class AbstractSimpleFeatureIngestPlugin implements LocalFileIngestPlugin, IngestFromHdfsPlugin, GeoWaveAvroFormatPlugin, Persistable { protected CQLFilterOptionProvider filterOptionProvider = new CQLFilterOptionProvider(); protected FeatureSerializationOptionProvider serializationFormatOptionProvider = new FeatureSerializationOptionProvider(); protected TypeNameOptionProvider typeNameProvider = new TypeNameOptionProvider(); protected GeometrySimpOptionProvider simpOptionProvider = new GeometrySimpOptionProvider(); public void setFilterProvider(final CQLFilterOptionProvider filterOptionProvider) { this.filterOptionProvider = filterOptionProvider; } public void setSerializationFormatProvider( final FeatureSerializationOptionProvider serializationFormatOptionProvider) { this.serializationFormatOptionProvider = serializationFormatOptionProvider; } public void setTypeNameProvider(final TypeNameOptionProvider typeNameProvider) { this.typeNameProvider = typeNameProvider; } public void setGeometrySimpOptionProvider(final GeometrySimpOptionProvider geometryProvider) { this.simpOptionProvider = geometryProvider; } @Override public byte[] toBinary() { final byte[] filterBinary = filterOptionProvider.toBinary(); final byte[] typeNameBinary = typeNameProvider.toBinary(); final byte[] simpBinary = simpOptionProvider.toBinary(); final byte[] backingBuffer = new byte[filterBinary.length + typeNameBinary.length + simpBinary.length + VarintUtils.unsignedIntByteLength(filterBinary.length) + VarintUtils.unsignedIntByteLength(typeNameBinary.length)]; final ByteBuffer buf = ByteBuffer.wrap(backingBuffer); VarintUtils.writeUnsignedInt(filterBinary.length, buf); buf.put(filterBinary); VarintUtils.writeUnsignedInt(typeNameBinary.length, buf); buf.put(typeNameBinary); buf.put(simpBinary); return ArrayUtils.addAll(serializationFormatOptionProvider.toBinary(), backingBuffer); } @Override public void fromBinary(final byte[] bytes) { final byte[] otherBytes = new byte[bytes.length - 1]; System.arraycopy(bytes, 1, otherBytes, 0, otherBytes.length); final byte[] kryoBytes = new byte[] {bytes[0]}; final ByteBuffer buf = ByteBuffer.wrap(otherBytes); final int filterBinaryLength = VarintUtils.readUnsignedInt(buf); final byte[] filterBinary = ByteArrayUtils.safeRead(buf, filterBinaryLength); final int typeNameBinaryLength = VarintUtils.readUnsignedInt(buf); final byte[] typeNameBinary = ByteArrayUtils.safeRead(buf, typeNameBinaryLength); final byte[] geometrySimpBinary = new byte[buf.remaining()]; buf.get(geometrySimpBinary); serializationFormatOptionProvider = new FeatureSerializationOptionProvider(); serializationFormatOptionProvider.fromBinary(kryoBytes); filterOptionProvider = new CQLFilterOptionProvider(); filterOptionProvider.fromBinary(filterBinary); typeNameProvider = new TypeNameOptionProvider(); typeNameProvider.fromBinary(typeNameBinary); simpOptionProvider = new GeometrySimpOptionProvider(); simpOptionProvider.fromBinary(geometrySimpBinary); } protected DataTypeAdapter newAdapter(final SimpleFeatureType type) { return new FeatureDataAdapter(type); } protected abstract SimpleFeatureType[] getTypes(); @Override public DataTypeAdapter[] getDataAdapters() { final SimpleFeatureType[] types = getTypes(); final DataTypeAdapter[] retVal = new DataTypeAdapter[types.length]; for (int i = 0; i < types.length; i++) { retVal[i] = newAdapter(types[i]); } return retVal; } @Override public CloseableIterator> toGeoWaveData( final URL input, final String[] indexNames) { final CloseableIterator hdfsObjects = toAvroObjects(input); return new CloseableIterator>() { CloseableIterator> currentIterator = null; GeoWaveData next = null; private void computeNext() { if (next == null) { if (currentIterator != null) { if (currentIterator.hasNext()) { next = currentIterator.next(); return; } else { currentIterator.close(); currentIterator = null; } } while (hdfsObjects.hasNext()) { final I hdfsObject = hdfsObjects.next(); currentIterator = wrapIteratorWithFilters(toGeoWaveDataInternal(hdfsObject, indexNames)); if (currentIterator.hasNext()) { next = currentIterator.next(); return; } else { currentIterator.close(); currentIterator = null; } } } } @Override public boolean hasNext() { computeNext(); return next != null; } @Override public GeoWaveData next() { computeNext(); final GeoWaveData retVal = next; next = null; return retVal; } @Override public void close() { hdfsObjects.close(); } }; } protected CloseableIterator> wrapIteratorWithFilters( final CloseableIterator> geowaveData) { final CQLFilterOptionProvider internalFilterProvider; if ((filterOptionProvider != null) && (filterOptionProvider.getCqlFilterString() != null) && !filterOptionProvider.getCqlFilterString().trim().isEmpty()) { internalFilterProvider = filterOptionProvider; } else { internalFilterProvider = null; } final TypeNameOptionProvider internalTypeNameProvider; if ((typeNameProvider != null) && (typeNameProvider.getTypeName() != null) && !typeNameProvider.getTypeName().trim().isEmpty()) { internalTypeNameProvider = typeNameProvider; } else { internalTypeNameProvider = null; } final GeometrySimpOptionProvider internalSimpOptionProvider; if ((simpOptionProvider != null)) { internalSimpOptionProvider = simpOptionProvider; } else { internalSimpOptionProvider = null; } if ((internalFilterProvider != null) || (internalTypeNameProvider != null)) { final Iterator> it = Iterators.filter(geowaveData, new Predicate>() { @Override public boolean apply(final GeoWaveData input) { if ((internalTypeNameProvider != null) && !internalTypeNameProvider.typeNameMatches(input.getTypeName())) { return false; } if ((internalFilterProvider != null) && !internalFilterProvider.evaluate(input.getValue())) { return false; } if ((internalSimpOptionProvider != null)) { final Geometry simpGeom = internalSimpOptionProvider.simplifyGeometry( (Geometry) input.getValue().getDefaultGeometry()); if (!internalSimpOptionProvider.filterGeometry(simpGeom)) { return false; } input.getValue().setDefaultGeometry(simpGeom); } return true; } }); return new CloseableIteratorWrapper<>(geowaveData, it); } return geowaveData; } protected abstract CloseableIterator> toGeoWaveDataInternal( final I hdfsObject, final String[] indexNames); public abstract static class AbstractIngestSimpleFeatureWithMapper implements IngestWithMapper { protected AbstractSimpleFeatureIngestPlugin parentPlugin; public AbstractIngestSimpleFeatureWithMapper( final AbstractSimpleFeatureIngestPlugin parentPlugin) { this.parentPlugin = parentPlugin; } @Override public DataTypeAdapter[] getDataAdapters() { return parentPlugin.getDataAdapters(); } @Override public CloseableIterator> toGeoWaveData( final I input, final String[] indexNames) { return parentPlugin.wrapIteratorWithFilters( parentPlugin.toGeoWaveDataInternal(input, indexNames)); } @Override public byte[] toBinary() { return parentPlugin.toBinary(); } @Override public void fromBinary(final byte[] bytes) { parentPlugin.fromBinary(bytes); } @Override public String[] getSupportedIndexTypes() { return parentPlugin.getSupportedIndexTypes(); } } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/ingest/CQLFilterOptionProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.ingest; import org.geotools.filter.text.cql2.CQLException; import org.geotools.filter.text.ecql.ECQL; import org.locationtech.geowave.core.cli.converters.GeoWaveBaseConverter; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.persist.Persistable; import org.opengis.filter.Filter; import org.opengis.filter.FilterVisitor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; /** Supports converting the filter string to Filter object. */ public class CQLFilterOptionProvider implements Filter, Persistable { private static final Logger LOGGER = LoggerFactory.getLogger(CQLFilterOptionProvider.class); @Parameter( names = "--cql", description = "A CQL filter, only data matching this filter will be ingested", converter = ConvertCQLStrToFilterConverter.class) private FilterParameter convertedFilter = new FilterParameter(null, null); public CQLFilterOptionProvider() { super(); } public String getCqlFilterString() { return convertedFilter.getCqlFilterString(); } @Override public byte[] toBinary() { if (convertedFilter.getCqlFilterString() == null) { return new byte[] {}; } return StringUtils.stringToBinary(convertedFilter.getCqlFilterString()); } @Override public void fromBinary(final byte[] bytes) { if (bytes.length > 0) { // This has the side-effect of setting the 'filter' member // variable. convertedFilter = new ConvertCQLStrToFilterConverter().convert(StringUtils.stringFromBinary(bytes)); } else { convertedFilter.setCqlFilterString(null); convertedFilter.setFilter(null); } } @Override public boolean evaluate(final Object object) { if (convertedFilter.getFilter() == null) { return true; } return convertedFilter.getFilter().evaluate(object); } @Override public Object accept(final FilterVisitor visitor, final Object extraData) { if (convertedFilter.getFilter() == null) { if (visitor != null) { return visitor.visitNullFilter(extraData); } return extraData; } return convertedFilter.getFilter().accept(visitor, extraData); } private static Filter asFilter(final String cqlPredicate) throws CQLException { return ECQL.toFilter(cqlPredicate); } /** This class will ensure that as the CQLFilterString is read in and converted to a filter. */ public static class ConvertCQLStrToFilterConverter extends GeoWaveBaseConverter { public ConvertCQLStrToFilterConverter() { super(""); } public ConvertCQLStrToFilterConverter(final String optionName) { super(optionName); } @Override public FilterParameter convert(String value) { Filter convertedFilter = null; if (value != null) { try { convertedFilter = asFilter(value); } // HP Fortify "Log Forging" false positive // What Fortify considers "user input" comes only // from users with OS-level access anyway catch (final CQLException e) { LOGGER.error("Cannot parse CQL expression '" + value + "'", e); // value = null; // convertedFilter = null; throw new ParameterException("Cannot parse CQL expression '" + value + "'", e); } } else { value = null; } return new FilterParameter(value, convertedFilter); } } public static class FilterParameter { private String cqlFilterString; private Filter filter; public FilterParameter(final String cqlFilterString, final Filter filter) { super(); this.cqlFilterString = cqlFilterString; this.filter = filter; } public String getCqlFilterString() { return cqlFilterString; } public void setCqlFilterString(final String cqlFilterString) { this.cqlFilterString = cqlFilterString; } public Filter getFilter() { return filter; } public void setFilter(final Filter filter) { this.filter = filter; } @Override public String toString() { return cqlFilterString; } } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/ingest/DataSchemaOptionProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.ingest; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.ingest.IngestFormatOptions; import com.beust.jcommander.Parameter; public class DataSchemaOptionProvider implements Persistable, IngestFormatOptions { @Parameter( names = "--extended", description = "A flag to indicate whether extended data format should be used") private boolean includeSupplementalFields = false; public boolean includeSupplementalFields() { return includeSupplementalFields; } @Override public byte[] toBinary() { return new byte[] {includeSupplementalFields ? (byte) 1 : (byte) 0}; } @Override public void fromBinary(final byte[] bytes) { if ((bytes != null) && (bytes.length > 0)) { if (bytes[0] == 1) { includeSupplementalFields = true; } } } /** */ public void setSupplementalFields(final boolean supplementalFields) { includeSupplementalFields = supplementalFields; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/ingest/FeatureSerializationOptionProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.ingest; import org.locationtech.geowave.core.index.persist.Persistable; import com.beust.jcommander.Parameter; public class FeatureSerializationOptionProvider implements Persistable { @Parameter( names = "--avro", description = "A flag to indicate whether avro feature serialization should be used") private boolean avro = false; public boolean isAvro() { return avro; } @Override public byte[] toBinary() { return new byte[] {avro ? (byte) 1 : (byte) 0}; } @Override public void fromBinary(final byte[] bytes) { if ((bytes != null) && (bytes.length > 0)) { if (bytes[0] == 1) { avro = true; } } } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/ingest/GeometrySimpOptionProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.ingest; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.simplify.DouglasPeuckerSimplifier; import com.beust.jcommander.Parameter; public class GeometrySimpOptionProvider implements Persistable { @Parameter( names = "--maxVertices", description = "Maximum number of vertices to allow for the feature. Features with over this vertice count will be discarded.") private int maxVertices = Integer.MAX_VALUE; @Parameter( names = "--minSimpVertices", description = "Minimum vertex count to qualify for geometry simplification.") private int simpVertMin = Integer.MAX_VALUE; @Parameter( names = "--tolerance", description = "Maximum error tolerance in geometry simplification. Should range from 0.0 to 1.0 (i.e. .1 = 10%)") private double tolerance = 0.02; public Geometry simplifyGeometry(final Geometry geom) { if (geom.getCoordinates().length > simpVertMin) { return DouglasPeuckerSimplifier.simplify(geom, tolerance); } return geom; } public boolean filterGeometry(final Geometry geom) { return ((geom.getCoordinates().length < maxVertices) && !geom.isEmpty() && geom.isValid()); } @Override public byte[] toBinary() { final byte[] backingBuffer = new byte[VarintUtils.unsignedIntByteLength(maxVertices) + VarintUtils.unsignedIntByteLength(simpVertMin) + Double.BYTES]; final ByteBuffer buf = ByteBuffer.wrap(backingBuffer); VarintUtils.writeUnsignedInt(maxVertices, buf); VarintUtils.writeUnsignedInt(simpVertMin, buf); buf.putDouble(tolerance); return backingBuffer; } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); maxVertices = VarintUtils.readUnsignedInt(buf); simpVertMin = VarintUtils.readUnsignedInt(buf); tolerance = buf.getDouble(); } public int getMaxVertices() { return maxVertices; } public int getSimpLimit() { return simpVertMin; } public double getTolerance() { return tolerance; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/ingest/MinimalSimpleFeatureIngestFormat.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.ingest; import org.locationtech.geowave.core.ingest.avro.AvroWholeFile; import org.locationtech.geowave.core.ingest.avro.GeoWaveAvroFormatPlugin; import org.locationtech.geowave.core.ingest.hdfs.mapreduce.IngestFromHdfsPlugin; import org.locationtech.geowave.core.ingest.spi.IngestFormatPluginProviderSpi; import org.locationtech.geowave.core.store.ingest.IngestFormatOptions; import org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin; import org.opengis.feature.simple.SimpleFeature; public abstract class MinimalSimpleFeatureIngestFormat implements IngestFormatPluginProviderSpi { protected final SimpleFeatureIngestOptions myOptions = new SimpleFeatureIngestOptions(); private MinimalSimpleFeatureIngestPlugin getInstance(final IngestFormatOptions options) { final MinimalSimpleFeatureIngestPlugin myInstance = newPluginInstance(options); myInstance.setFilterProvider(myOptions.getCqlFilterOptionProvider()); myInstance.setTypeNameProvider(myOptions.getTypeNameOptionProvider()); myInstance.setGeometrySimpOptionProvider(myOptions.getGeometrySimpOptionProvider()); return myInstance; } protected abstract MinimalSimpleFeatureIngestPlugin newPluginInstance( IngestFormatOptions options); @Override public GeoWaveAvroFormatPlugin createAvroFormatPlugin( final IngestFormatOptions options) { throw new UnsupportedOperationException("Avro format is unsupported for this plugin."); } @Override public IngestFromHdfsPlugin createIngestFromHdfsPlugin( final IngestFormatOptions options) { throw new UnsupportedOperationException("Ingest from HDFS is unsupported for this plugin."); } @Override public LocalFileIngestPlugin createLocalFileIngestPlugin( final IngestFormatOptions options) { return getInstance(options); } /** * Create an options instance. We may want to change this code from a singleton instance to * actually allow multiple instances per format. */ @Override public IngestFormatOptions createOptionsInstances() { myOptions.setPluginOptions(internalGetIngestFormatOptionProviders()); return myOptions; } protected Object internalGetIngestFormatOptionProviders() { return null; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/ingest/MinimalSimpleFeatureIngestPlugin.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.ingest; import java.net.URL; import java.nio.ByteBuffer; import java.util.Iterator; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.core.geotime.store.dimension.SpatialField; import org.locationtech.geowave.core.geotime.store.dimension.TimeField; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.CloseableIteratorWrapper; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.ingest.GeoWaveData; import org.locationtech.geowave.core.store.ingest.LocalFileIngestPlugin; import org.locationtech.jts.geom.Geometry; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Predicate; import com.google.common.collect.Iterators; /* */ public abstract class MinimalSimpleFeatureIngestPlugin implements LocalFileIngestPlugin, Persistable { private static final Logger LOGGER = LoggerFactory.getLogger(MinimalSimpleFeatureIngestPlugin.class); protected CQLFilterOptionProvider filterOptionProvider = new CQLFilterOptionProvider(); protected TypeNameOptionProvider typeNameProvider = new TypeNameOptionProvider(); protected GeometrySimpOptionProvider simpOptionProvider = new GeometrySimpOptionProvider(); public void setFilterProvider(final CQLFilterOptionProvider filterOptionProvider) { this.filterOptionProvider = filterOptionProvider; } public void setTypeNameProvider(final TypeNameOptionProvider typeNameProvider) { this.typeNameProvider = typeNameProvider; } public void setGeometrySimpOptionProvider(final GeometrySimpOptionProvider geometryProvider) { this.simpOptionProvider = geometryProvider; } @Override public byte[] toBinary() { final byte[] filterBinary = filterOptionProvider.toBinary(); final byte[] typeNameBinary = typeNameProvider.toBinary(); final byte[] simpBinary = simpOptionProvider.toBinary(); final ByteBuffer buf = ByteBuffer.allocate( filterBinary.length + typeNameBinary.length + simpBinary.length + VarintUtils.unsignedIntByteLength(filterBinary.length) + VarintUtils.unsignedIntByteLength(typeNameBinary.length) + VarintUtils.unsignedIntByteLength(simpBinary.length)); VarintUtils.writeUnsignedInt(filterBinary.length, buf); buf.put(filterBinary); VarintUtils.writeUnsignedInt(typeNameBinary.length, buf); buf.put(typeNameBinary); VarintUtils.writeUnsignedInt(simpBinary.length, buf); buf.put(simpBinary); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int filterBinaryLength = VarintUtils.readUnsignedInt(buf); final byte[] filterBinary = ByteArrayUtils.safeRead(buf, filterBinaryLength); final int typeNameBinaryLength = VarintUtils.readUnsignedInt(buf); final byte[] typeNameBinary = ByteArrayUtils.safeRead(buf, typeNameBinaryLength); final int geometrySimpLength = VarintUtils.readUnsignedInt(buf); final byte[] geometrySimpBinary = ByteArrayUtils.safeRead(buf, geometrySimpLength); filterOptionProvider = new CQLFilterOptionProvider(); filterOptionProvider.fromBinary(filterBinary); typeNameProvider = new TypeNameOptionProvider(); typeNameProvider.fromBinary(typeNameBinary); simpOptionProvider = new GeometrySimpOptionProvider(); simpOptionProvider.fromBinary(geometrySimpBinary); } @Override public String[] getFileExtensionFilters() { return new String[0]; } @Override public void init(URL url) {} @Override public Index[] getRequiredIndices() { return new Index[] {}; } @Override public String[] getSupportedIndexTypes() { return new String[] {SpatialField.DEFAULT_GEOMETRY_FIELD_NAME, TimeField.DEFAULT_FIELD_ID}; } protected DataTypeAdapter newAdapter(final SimpleFeatureType type) { return new FeatureDataAdapter(type); } protected abstract SimpleFeatureType[] getTypes(); protected abstract CloseableIterator getFeatures(URL input); @Override public DataTypeAdapter[] getDataAdapters() { final SimpleFeatureType[] types = getTypes(); final DataTypeAdapter[] retVal = new FeatureDataAdapter[types.length]; for (int i = 0; i < types.length; i++) { retVal[i] = newAdapter(types[i]); } return retVal; } @Override public CloseableIterator> toGeoWaveData( final URL input, final String[] indexNames) { final CloseableIterator filteredFeatures = applyFilters(getFeatures(input)); return toGeoWaveDataInternal(filteredFeatures, indexNames); } private CloseableIterator applyFilters( final CloseableIterator source) { final CQLFilterOptionProvider internalFilterProvider; if ((filterOptionProvider != null) && (filterOptionProvider.getCqlFilterString() != null) && !filterOptionProvider.getCqlFilterString().trim().isEmpty()) { internalFilterProvider = filterOptionProvider; } else { internalFilterProvider = null; } final TypeNameOptionProvider internalTypeNameProvider; if ((typeNameProvider != null) && (typeNameProvider.getTypeName() != null) && !typeNameProvider.getTypeName().trim().isEmpty()) { internalTypeNameProvider = typeNameProvider; } else { internalTypeNameProvider = null; } final GeometrySimpOptionProvider internalSimpOptionProvider; if ((simpOptionProvider != null)) { internalSimpOptionProvider = simpOptionProvider; } else { internalSimpOptionProvider = null; } if ((internalFilterProvider != null) || (internalTypeNameProvider != null)) { final Iterator it = Iterators.filter(source, new Predicate() { @Override public boolean apply(final SimpleFeature input) { if ((internalTypeNameProvider != null) && !internalTypeNameProvider.typeNameMatches(input.getFeatureType().getTypeName())) { return false; } if ((internalFilterProvider != null) && !internalFilterProvider.evaluate(input)) { return false; } if ((internalSimpOptionProvider != null)) { final Geometry simpGeom = internalSimpOptionProvider.simplifyGeometry((Geometry) input.getDefaultGeometry()); if (!internalSimpOptionProvider.filterGeometry(simpGeom)) { return false; } input.setDefaultGeometry(simpGeom); } return true; } }); return new CloseableIteratorWrapper<>(source, it); } return source; } private CloseableIterator> toGeoWaveDataInternal( final CloseableIterator source, final String[] indexNames) { final Iterator> geowaveData = Iterators.transform(source, feature -> { return new GeoWaveData<>(feature.getFeatureType().getTypeName(), indexNames, feature); }); return new CloseableIteratorWrapper<>(source, geowaveData); } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/ingest/SerializableSimpleFeatureIngestOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.ingest; import com.beust.jcommander.ParametersDelegate; /** * An extension of simple feature ingest options that provides additional serialization options to * be specified. */ public class SerializableSimpleFeatureIngestOptions extends SimpleFeatureIngestOptions { @ParametersDelegate private FeatureSerializationOptionProvider serializationFormatOptionProvider = new FeatureSerializationOptionProvider(); public FeatureSerializationOptionProvider getSerializationFormatOptionProvider() { return serializationFormatOptionProvider; } public void setSerializationFormatOptionProvider( final FeatureSerializationOptionProvider serializationFormatOptionProvider) { this.serializationFormatOptionProvider = serializationFormatOptionProvider; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/ingest/SimpleFeatureIngestOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.ingest; import org.locationtech.geowave.core.store.ingest.IngestFormatOptions; import com.beust.jcommander.ParametersDelegate; /** This class is a holder class for options used in AbstractSimpleFeatureIngest. */ public class SimpleFeatureIngestOptions implements IngestFormatOptions { @ParametersDelegate private CQLFilterOptionProvider cqlFilterOptionProvider = new CQLFilterOptionProvider(); @ParametersDelegate private TypeNameOptionProvider typeNameOptionProvider = new TypeNameOptionProvider(); @ParametersDelegate private GeometrySimpOptionProvider simpOptionProvider = new GeometrySimpOptionProvider(); @ParametersDelegate private Object pluginOptions = null; public SimpleFeatureIngestOptions() {} public GeometrySimpOptionProvider getGeometrySimpOptionProvider() { return simpOptionProvider; } public void setGeometrySimpOptionProvider(final GeometrySimpOptionProvider simpOptionProvider) { this.simpOptionProvider = simpOptionProvider; } public CQLFilterOptionProvider getCqlFilterOptionProvider() { return cqlFilterOptionProvider; } public void setCqlFilterOptionProvider(final CQLFilterOptionProvider cqlFilterOptionProvider) { this.cqlFilterOptionProvider = cqlFilterOptionProvider; } public TypeNameOptionProvider getTypeNameOptionProvider() { return typeNameOptionProvider; } public void setTypeNameOptionProvider(final TypeNameOptionProvider typeNameOptionProvider) { this.typeNameOptionProvider = typeNameOptionProvider; } public Object getPluginOptions() { return pluginOptions; } public void setPluginOptions(final Object pluginOptions) { this.pluginOptions = pluginOptions; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/ingest/TypeNameOptionProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.ingest; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.persist.Persistable; import com.beust.jcommander.Parameter; public class TypeNameOptionProvider implements Persistable { @Parameter( names = "--typename", description = "A comma-delimitted set of typenames to ingest, feature types matching the specified typenames will be ingested (optional, by default all types will be ingested)") private String typename = null; private String[] typenames = null; public String getTypeName() { return typename; } public boolean typeNameMatches(final String typeName) { String[] internalTypenames; synchronized (this) { if (typenames == null) { typenames = typename.split(","); } internalTypenames = typenames; } for (final String t : internalTypenames) { if (t.equalsIgnoreCase(typeName)) { return true; } } return false; } @Override public byte[] toBinary() { if (typename == null) { return new byte[] {}; } return StringUtils.stringToBinary(typename); } @Override public void fromBinary(final byte[] bytes) { if (bytes.length > 0) { typename = StringUtils.stringFromBinary(bytes); } else { typename = null; } } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/DecimationProcess.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin; import org.geotools.data.Query; import org.geotools.data.simple.SimpleFeatureCollection; import org.geotools.geometry.jts.ReferencedEnvelope; import org.geotools.process.ProcessException; import org.geotools.process.factory.DescribeParameter; import org.geotools.process.factory.DescribeProcess; import org.geotools.process.factory.DescribeResult; import org.geotools.process.vector.VectorProcess; import org.geotools.util.factory.Hints; import org.opengis.coverage.grid.GridGeometry; /** * This class can be used as a GeoTools Render Transform ('nga:Decimation') within an SLD on any * layer that uses the GeoWave Data Store. An example SLD is provided * (example-slds/DecimatePoints.sld). The pixel-size allows you to skip more than a single pixel. * For example, a pixel size of 3 would skip an estimated 3x3 pixel cell in GeoWave's row IDs. Note * that rows are only skipped when a feature successfully passes filters. */ @SuppressWarnings("deprecation") @DescribeProcess( title = "DecimateToPixelResolution", description = "This process will enable GeoWave to decimate WMS rendering down to pixel resolution to not oversample data. This will efficiently render overlapping geometry that would otherwise be hidden but it assume an opaque style and does not take transparency into account.") public class DecimationProcess implements VectorProcess { public static final Hints.Key PIXEL_SIZE = new Hints.Key(Double.class); public static final Hints.Key OUTPUT_BBOX = new Hints.Key(ReferencedEnvelope.class); public static final Hints.Key OUTPUT_WIDTH = new Hints.Key(Integer.class); public static final Hints.Key OUTPUT_HEIGHT = new Hints.Key(Integer.class); @DescribeResult( name = "result", description = "This is just a pass-through, the key is to provide enough information within invertQuery to perform a map to screen transform") public SimpleFeatureCollection execute( @DescribeParameter( name = "data", description = "Feature collection containing the data") final SimpleFeatureCollection features, @DescribeParameter( name = "outputBBOX", description = "Georeferenced bounding box of the output") final ReferencedEnvelope argOutputEnv, @DescribeParameter( name = "outputWidth", description = "Width of the output raster") final Integer argOutputWidth, @DescribeParameter( name = "outputHeight", description = "Height of the output raster") final Integer argOutputHeight, @DescribeParameter( name = "pixelSize", description = "The pixel size to decimate by") final Double pixelSize) throws ProcessException { // vector-to-vector render transform that is just a pass through - the // key is to add map to screen transform within invertQuery return features; } public Query invertQuery( @DescribeParameter( name = "outputBBOX", description = "Georeferenced bounding box of the output") final ReferencedEnvelope argOutputEnv, @DescribeParameter( name = "outputWidth", description = "Width of the output raster") final Integer argOutputWidth, @DescribeParameter( name = "outputHeight", description = "Height of the output raster") final Integer argOutputHeight, @DescribeParameter( name = "pixelSize", description = "The pixel size to decimate by") final Double pixelSize, final Query targetQuery, final GridGeometry targetGridGeometry) throws ProcessException { // add to the query hints targetQuery.getHints().put(OUTPUT_WIDTH, argOutputWidth); targetQuery.getHints().put(OUTPUT_HEIGHT, argOutputHeight); targetQuery.getHints().put(OUTPUT_BBOX, argOutputEnv); if (pixelSize != null) { targetQuery.getHints().put(PIXEL_SIZE, pixelSize); } return targetQuery; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/DistributedRenderProcess.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin; import org.geotools.data.Query; import org.geotools.data.simple.SimpleFeatureCollection; import org.geotools.process.ProcessException; import org.geotools.process.factory.DescribeParameter; import org.geotools.process.factory.DescribeProcess; import org.geotools.process.factory.DescribeResult; import org.geotools.util.factory.Hints; import org.locationtech.geowave.adapter.vector.render.DistributedRenderOptions; import org.opengis.coverage.grid.GridGeometry; /** * This class can be used as a GeoTools Render Transform ('geowave:DistributedRender') within an SLD * on any layer that uses the GeoWave Data Store. An example SLD is provided * (example-slds/DistributedRender.sld). */ @DescribeProcess( title = "DistributedRender", description = "This process will enable GeoWave to render WMS requests within the server and then this will be responsible for compositing the result client-side.") public class DistributedRenderProcess { public static final String PROCESS_NAME = "geowave:DistributedRender"; public static final Hints.Key OPTIONS = new Hints.Key(DistributedRenderOptions.class); @DescribeResult( name = "result", description = "This is just a pass-through, the key is to provide enough information within invertQuery to perform a map to screen transform") public SimpleFeatureCollection execute( @DescribeParameter( name = "data", description = "Feature collection containing the rendered image") final SimpleFeatureCollection features) throws ProcessException { // this is a pass through, only used so that legend rendering works // appropriately // InternalDistributedRenderProcess is what actually can be used as a // render transformation to perform distributed rendering, within WMS // map request callbacks this transformation will be replaced with // InternalDistributedRenderProcess // therefore all other calls outside of WMS map requests, such as // requesting the legend will behave as expected return features; } public Query invertQuery(final Query targetQuery, final GridGeometry targetGridGeometry) throws ProcessException { return targetQuery; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/GeoWaveDataStoreComponents.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin; import java.io.IOException; import java.util.Arrays; import java.util.Iterator; import java.util.Map; import java.util.Set; import org.geotools.feature.simple.SimpleFeatureTypeBuilder; import org.locationtech.geowave.adapter.vector.index.IndexQueryStrategySPI.QueryHint; import org.locationtech.geowave.adapter.vector.plugin.transaction.GeoWaveTransaction; import org.locationtech.geowave.adapter.vector.plugin.transaction.StatisticsCache; import org.locationtech.geowave.adapter.vector.plugin.transaction.TransactionsAllocator; import org.locationtech.geowave.core.geotime.store.InternalGeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.geotime.store.dimension.CustomCrsIndexModel; import org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.geotime.util.SpatialIndexUtils; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.VisibilityHandler; import org.locationtech.geowave.core.store.api.Writer; import org.locationtech.geowave.core.store.data.visibility.GlobalVisibilityHandler; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.referencing.crs.CoordinateReferenceSystem; import com.google.common.collect.Maps; public class GeoWaveDataStoreComponents { private final InternalGeotoolsFeatureDataAdapter adapter; private final DataStore dataStore; private final IndexStore indexStore; private final DataStatisticsStore dataStatisticsStore; private final AdapterIndexMappingStore indexMappingStore; private final GeoWaveGTDataStore gtStore; private final TransactionsAllocator transactionAllocator; private CoordinateReferenceSystem crs = null; private final SimpleFeatureType featureType; private final Index[] adapterIndices; public GeoWaveDataStoreComponents( final DataStore dataStore, final DataStatisticsStore dataStatisticsStore, final AdapterIndexMappingStore indexMappingStore, final IndexStore indexStore, final InternalGeotoolsFeatureDataAdapter adapter, final GeoWaveGTDataStore gtStore, final TransactionsAllocator transactionAllocator) { this.adapter = adapter; this.dataStore = dataStore; this.indexStore = indexStore; this.dataStatisticsStore = dataStatisticsStore; this.indexMappingStore = indexMappingStore; this.gtStore = gtStore; this.adapterIndices = getPreferredIndices(); CoordinateReferenceSystem adapterCRS = adapter.getFeatureType().getCoordinateReferenceSystem(); if (adapterCRS == null) { adapterCRS = GeometryUtils.getDefaultCRS(); } if (crs.equals(adapterCRS)) { this.featureType = SimpleFeatureTypeBuilder.retype(adapter.getFeatureType(), adapterCRS); } else { this.featureType = SimpleFeatureTypeBuilder.retype(adapter.getFeatureType(), crs); } this.gtStore.setPreferredIndices(adapter, adapterIndices); this.transactionAllocator = transactionAllocator; } private Index[] getPreferredIndices() { // For now just pick indices that match the CRS of the first spatial index we find final AdapterToIndexMapping[] indexMappings = indexMappingStore.getIndicesForAdapter(adapter.getAdapterId()); Index[] preferredIndices = null; if ((indexMappings != null) && indexMappings.length > 0) { preferredIndices = Arrays.stream(indexMappings).map(mapping -> mapping.getIndex(indexStore)).filter( index -> { final CoordinateReferenceSystem indexCRS; if (index.getIndexModel() instanceof CustomCrsIndexModel) { indexCRS = ((CustomCrsIndexModel) index.getIndexModel()).getCrs(); } else if (SpatialIndexUtils.hasSpatialDimensions(index)) { indexCRS = GeometryUtils.getDefaultCRS(); } else { return false; } if (crs == null) { crs = indexCRS; } else if (!crs.equals(indexCRS)) { return false; } return true; }).toArray(Index[]::new); } if (preferredIndices == null || preferredIndices.length == 0) { preferredIndices = gtStore.getPreferredIndices(adapter); this.crs = GeometryUtils.getIndexCrs(preferredIndices[0]); } return preferredIndices; } @SuppressWarnings("unchecked") public void initForWrite() { // this is ensuring the adapter is properly initialized with the // indices and writing it to the adapterStore, in cases where the // featuredataadapter was created from geotools datastore's createSchema dataStore.addType(adapter, adapterIndices); } public CoordinateReferenceSystem getCRS() { return crs; } public SimpleFeatureType getFeatureType() { return featureType; } public IndexStore getIndexStore() { return indexStore; } public InternalGeotoolsFeatureDataAdapter getAdapter() { return adapter; } public DataStore getDataStore() { return dataStore; } public AdapterIndexMappingStore getAdapterIndexMappingStore() { return indexMappingStore; } public GeoWaveGTDataStore getGTstore() { return gtStore; } public Index[] getAdapterIndices() { return adapterIndices; } public DataStatisticsStore getStatsStore() { return dataStatisticsStore; } public CloseableIterator getIndices( final StatisticsCache statisticsCache, final BasicQueryByClass query, final boolean spatialOnly) { final GeoWaveGTDataStore gtStore = getGTstore(); final Map queryHints = Maps.newHashMap(); queryHints.put( QueryHint.MAX_RANGE_DECOMPOSITION, gtStore.getDataStoreOptions().getMaxRangeDecomposition()); final Index[] indices = gtStore.getIndicesForAdapter(adapter, spatialOnly); if (spatialOnly && (indices.length == 0)) { throw new UnsupportedOperationException("Query required spatial index, but none were found."); } return gtStore.getIndexQueryStrategy().getIndices( dataStatisticsStore, indexMappingStore, query, indices, adapter, queryHints); } public void remove(final SimpleFeature feature, final GeoWaveTransaction transaction) throws IOException { final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder(); dataStore.delete( bldr.setAuthorizations(transaction.composeAuthorizations()).addTypeName( adapter.getTypeName()).constraints( bldr.constraintsFactory().dataIds(adapter.getDataId(feature))).build()); } public void remove(final String fid, final GeoWaveTransaction transaction) throws IOException { final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder(); dataStore.delete( bldr.setAuthorizations(transaction.composeAuthorizations()).addTypeName( adapter.getTypeName()).constraints( bldr.constraintsFactory().dataIds(StringUtils.stringToBinary(fid))).build()); } @SuppressWarnings("unchecked") public void write( final Iterator featureIt, final Set fidList, final GeoWaveTransaction transaction) throws IOException { final VisibilityHandler visibilityHandler = new GlobalVisibilityHandler(transaction.composeVisibility()); dataStore.addType(adapter, adapterIndices); try (Writer indexWriter = dataStore.createWriter(adapter.getTypeName())) { while (featureIt.hasNext()) { final SimpleFeature feature = featureIt.next(); fidList.add(feature.getID()); indexWriter.write(feature, visibilityHandler); } } } public void writeCommit(final SimpleFeature feature, final GeoWaveTransaction transaction) throws IOException { final VisibilityHandler visibilityHandler = new GlobalVisibilityHandler(transaction.composeVisibility()); dataStore.addType(adapter, adapterIndices); try (Writer indexWriter = dataStore.createWriter(adapter.getTypeName())) { indexWriter.write(feature, visibilityHandler); } } public String getTransaction() throws IOException { return transactionAllocator.getTransaction(); } public void releaseTransaction(final String txID) throws IOException { transactionAllocator.releaseTransaction(txID); } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/GeoWaveFeatureCollection.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin; import java.io.IOException; import java.util.Iterator; import org.geotools.data.DataUtilities; import org.geotools.data.FeatureReader; import org.geotools.data.Query; import org.geotools.data.store.DataFeatureCollection; import org.geotools.feature.FeatureIterator; import org.geotools.feature.simple.SimpleFeatureTypeBuilder; import org.geotools.geometry.jts.ReferencedEnvelope; import org.locationtech.geowave.adapter.vector.render.DistributedRenderOptions; import org.locationtech.geowave.adapter.vector.render.DistributedRenderResult; import org.locationtech.geowave.core.geotime.store.query.TemporalConstraintsSet; import org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic; import org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic.BoundingBoxValue; import org.locationtech.geowave.core.geotime.util.ExtractGeometryFilterVisitor; import org.locationtech.geowave.core.geotime.util.ExtractGeometryFilterVisitorResult; import org.locationtech.geowave.core.geotime.util.ExtractTimeFilterVisitor; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.statistics.adapter.CountStatistic; import org.locationtech.geowave.core.store.statistics.adapter.CountStatistic.CountValue; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.filter.Filter; import org.opengis.geometry.BoundingBox; import org.opengis.referencing.FactoryException; import org.opengis.referencing.operation.TransformException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class is a helper for the GeoWave GeoTools data store. It represents a collection of feature * data by encapsulating a GeoWave reader and a query object in order to open the appropriate cursor * to iterate over data. It uses Keys within the Query hints to determine whether to perform special * purpose queries such as decimation or distributed rendering. */ public class GeoWaveFeatureCollection extends DataFeatureCollection { private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveFeatureCollection.class); private final GeoWaveFeatureReader reader; private CloseableIterator featureCursor; private final Query query; private static SimpleFeatureType distributedRenderFeatureType; public GeoWaveFeatureCollection(final GeoWaveFeatureReader reader, final Query query) { this.reader = reader; this.query = validateQuery(GeoWaveFeatureCollection.getSchema(reader, query).getTypeName(), query); } @Override public int getCount() { if (query.getFilter().equals(Filter.INCLUDE)) { // GEOWAVE-60 optimization final CountValue count = reader.getTransaction().getDataStatistics().getAdapterStatistic( CountStatistic.STATS_TYPE); if (count != null) { return count.getValue().intValue(); } } else if (query.getFilter().equals(Filter.EXCLUDE)) { return 0; } QueryConstraints constraints; try { constraints = getQueryConstraints(); return (int) reader.getCountInternal( constraints.jtsBounds, constraints.timeBounds, constraints.limit); } catch (TransformException | FactoryException e) { LOGGER.warn("Unable to transform geometry, can't get count", e); } // fallback return 0; } @Override public ReferencedEnvelope getBounds() { double minx = Double.MAX_VALUE, maxx = -Double.MAX_VALUE, miny = Double.MAX_VALUE, maxy = -Double.MAX_VALUE; try { // GEOWAVE-60 optimization final BoundingBoxValue boundingBox = reader.getTransaction().getDataStatistics().getFieldStatistic( BoundingBoxStatistic.STATS_TYPE, reader.getFeatureType().getGeometryDescriptor().getLocalName()); if (boundingBox != null) { return new ReferencedEnvelope( boundingBox.getMinX(), boundingBox.getMaxX(), boundingBox.getMinY(), boundingBox.getMaxY(), reader.getFeatureType().getCoordinateReferenceSystem()); } final Iterator iterator = openIterator(); if (!iterator.hasNext()) { return null; } while (iterator.hasNext()) { final BoundingBox bbox = iterator.next().getBounds(); minx = Math.min(bbox.getMinX(), minx); maxx = Math.max(bbox.getMaxX(), maxx); miny = Math.min(bbox.getMinY(), miny); maxy = Math.max(bbox.getMaxY(), maxy); } close(iterator); } catch (final Exception e) { LOGGER.warn("Error calculating bounds", e); return new ReferencedEnvelope(-180, 180, -90, 90, GeometryUtils.getDefaultCRS()); } return new ReferencedEnvelope(minx, maxx, miny, maxy, GeometryUtils.getDefaultCRS()); } @Override public SimpleFeatureType getSchema() { if (isDistributedRenderQuery()) { return getDistributedRenderFeatureType(); } return reader.getFeatureType(); } public static synchronized SimpleFeatureType getDistributedRenderFeatureType() { if (distributedRenderFeatureType == null) { distributedRenderFeatureType = createDistributedRenderFeatureType(); } return distributedRenderFeatureType; } private static SimpleFeatureType createDistributedRenderFeatureType() { final SimpleFeatureTypeBuilder typeBuilder = new SimpleFeatureTypeBuilder(); typeBuilder.setName("distributed_render"); typeBuilder.add("result", DistributedRenderResult.class); typeBuilder.add("options", DistributedRenderOptions.class); return typeBuilder.buildFeatureType(); } protected boolean isDistributedRenderQuery() { return GeoWaveFeatureCollection.isDistributedRenderQuery(query); } protected static final boolean isDistributedRenderQuery(final Query query) { return query.getHints().containsKey(DistributedRenderProcess.OPTIONS); } private static SimpleFeatureType getSchema(final GeoWaveFeatureReader reader, final Query query) { if (GeoWaveFeatureCollection.isDistributedRenderQuery(query)) { return getDistributedRenderFeatureType(); } return reader.getComponents().getFeatureType(); } protected QueryConstraints getQueryConstraints() throws TransformException, FactoryException { final ReferencedEnvelope referencedEnvelope = getEnvelope(query); final Geometry jtsBounds; final TemporalConstraintsSet timeBounds; if (reader.getGeoWaveFilter() == null || query.getHints().containsKey(SubsampleProcess.SUBSAMPLE_ENABLED)) { jtsBounds = getBBox(query, referencedEnvelope); timeBounds = getBoundedTime(query); } else { // This will be handled by the geowave filter jtsBounds = null; timeBounds = null; } Integer limit = getLimit(query); final Integer startIndex = getStartIndex(query); // limit becomes a 'soft' constraint since GeoServer will inforce // the limit final Long max = (limit != null) ? limit.longValue() + (startIndex == null ? 0 : startIndex.longValue()) : null; // limit only used if less than an integer max value. limit = ((max != null) && (max.longValue() < Integer.MAX_VALUE)) ? max.intValue() : null; return new QueryConstraints(jtsBounds, timeBounds, referencedEnvelope, limit); } @Override protected Iterator openIterator() { try { return openIterator(getQueryConstraints()); } catch (TransformException | FactoryException e) { LOGGER.warn("Unable to transform geometry", e); } return featureCursor; } private Iterator openIterator(final QueryConstraints constraints) { if (reader.getGeoWaveFilter() == null && (((constraints.jtsBounds != null) && constraints.jtsBounds.isEmpty()) || ((constraints.timeBounds != null) && constraints.timeBounds.isEmpty()))) { // return nothing if either constraint is empty featureCursor = reader.getNoData(); } else if (query.getFilter() == Filter.EXCLUDE) { featureCursor = reader.getNoData(); } else if (isDistributedRenderQuery()) { featureCursor = reader.renderData( constraints.jtsBounds, constraints.timeBounds, constraints.limit, (DistributedRenderOptions) query.getHints().get(DistributedRenderProcess.OPTIONS)); } else if (query.getHints().containsKey(SubsampleProcess.OUTPUT_WIDTH) && query.getHints().containsKey(SubsampleProcess.OUTPUT_HEIGHT) && query.getHints().containsKey(SubsampleProcess.OUTPUT_BBOX)) { double pixelSize = 1; if (query.getHints().containsKey(SubsampleProcess.PIXEL_SIZE)) { pixelSize = (Double) query.getHints().get(SubsampleProcess.PIXEL_SIZE); } featureCursor = reader.getData( constraints.jtsBounds, constraints.timeBounds, (Integer) query.getHints().get(SubsampleProcess.OUTPUT_WIDTH), (Integer) query.getHints().get(SubsampleProcess.OUTPUT_HEIGHT), pixelSize, constraints.referencedEnvelope, constraints.limit); } else { featureCursor = reader.getData(constraints.jtsBounds, constraints.timeBounds, constraints.limit); } return featureCursor; } private ReferencedEnvelope getEnvelope(final Query query) throws TransformException, FactoryException { if (query.getHints().containsKey(SubsampleProcess.OUTPUT_BBOX)) { return ((ReferencedEnvelope) query.getHints().get(SubsampleProcess.OUTPUT_BBOX)).transform( reader.getFeatureType().getCoordinateReferenceSystem(), true); } return null; } private Geometry getBBox(final Query query, final ReferencedEnvelope envelope) { if (envelope != null) { return new GeometryFactory().toGeometry(envelope); } final String geomAtrributeName = reader.getComponents().getFeatureType().getGeometryDescriptor().getLocalName(); final ExtractGeometryFilterVisitorResult geoAndCompareOp = ExtractGeometryFilterVisitor.getConstraints( query.getFilter(), reader.getComponents().getCRS(), geomAtrributeName); if (geoAndCompareOp == null) { return reader.clipIndexedBBOXConstraints(null); } else { return reader.clipIndexedBBOXConstraints(geoAndCompareOp.getGeometry()); } } private Query validateQuery(final String typeName, final Query query) { return query == null ? new Query(typeName, Filter.EXCLUDE) : query; } private Integer getStartIndex(final Query query) { return query.getStartIndex(); } private Integer getLimit(final Query query) { if (!query.isMaxFeaturesUnlimited() && (query.getMaxFeatures() >= 0)) { return query.getMaxFeatures(); } return null; } @Override public void accepts( final org.opengis.feature.FeatureVisitor visitor, final org.opengis.util.ProgressListener progress) throws IOException { if (!GeoWaveGTPluginUtils.accepts( reader.getComponents().getStatsStore(), reader.getComponents().getAdapter(), visitor, progress, reader.getFeatureType())) { DataUtilities.visit(this, visitor, progress); } } /** * @param query the query * @return the temporal constraints of the query */ protected TemporalConstraintsSet getBoundedTime(final Query query) { if (query == null) { return null; } final TemporalConstraintsSet constraints = new ExtractTimeFilterVisitor( reader.getComponents().getAdapter().getTimeDescriptors()).getConstraints(query); return constraints.isEmpty() ? null : reader.clipIndexedTemporalConstraints(constraints); } @Override public FeatureReader reader() { return reader; } @Override protected void closeIterator(final Iterator close) { featureCursor.close(); } public Iterator getOpenIterator() { return featureCursor; } @Override public void close(final FeatureIterator iterator) { featureCursor = null; super.close(iterator); } @Override public boolean isEmpty() { try { return !reader.hasNext(); } catch (final IOException e) { LOGGER.warn("Error checking reader", e); } return true; } private static class QueryConstraints { Geometry jtsBounds; TemporalConstraintsSet timeBounds; ReferencedEnvelope referencedEnvelope; Integer limit; public QueryConstraints( final Geometry jtsBounds, final TemporalConstraintsSet timeBounds, final ReferencedEnvelope referencedEnvelope, final Integer limit) { super(); this.jtsBounds = jtsBounds; this.timeBounds = timeBounds; this.referencedEnvelope = referencedEnvelope; this.limit = limit; } } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/GeoWaveFeatureReader.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin; import java.awt.Rectangle; import java.awt.geom.AffineTransform; import java.io.Closeable; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; import java.util.Set; import org.geotools.data.FeatureReader; import org.geotools.data.Query; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.filter.AttributeExpressionImpl; import org.geotools.filter.FidFilterImpl; import org.geotools.filter.spatial.BBOXImpl; import org.geotools.geometry.jts.Decimator; import org.geotools.geometry.jts.ReferencedEnvelope; import org.geotools.referencing.operation.transform.ProjectiveTransform; import org.geotools.renderer.lite.RendererUtilities; import org.locationtech.geowave.adapter.vector.plugin.transaction.GeoWaveTransaction; import org.locationtech.geowave.adapter.vector.plugin.transaction.StatisticsCache; import org.locationtech.geowave.adapter.vector.render.DistributedRenderAggregation; import org.locationtech.geowave.adapter.vector.render.DistributedRenderOptions; import org.locationtech.geowave.adapter.vector.render.DistributedRenderResult; import org.locationtech.geowave.adapter.vector.util.QueryIndexHelper; import org.locationtech.geowave.core.geotime.index.SpatialIndexFilter; import org.locationtech.geowave.core.geotime.index.dimension.SimpleTimeDefinition; import org.locationtech.geowave.core.geotime.index.dimension.TimeDefinition; import org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialQuery; import org.locationtech.geowave.core.geotime.store.query.OptimalCQLQuery; import org.locationtech.geowave.core.geotime.store.query.TemporalConstraintsSet; import org.locationtech.geowave.core.geotime.store.query.api.VectorAggregationQueryBuilder; import org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder; import org.locationtech.geowave.core.geotime.store.query.filter.expression.CQLToGeoWaveConversionException; import org.locationtech.geowave.core.geotime.store.query.filter.expression.CQLToGeoWaveFilterVisitor; import org.locationtech.geowave.core.geotime.util.ExtractAttributesFilter; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.geotime.util.GeometryUtils.GeoConstraintsWrapper; import org.locationtech.geowave.core.geotime.util.SpatialIndexUtils; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.CloseableIteratorWrapper; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintsByClass; import org.locationtech.geowave.core.store.query.constraints.OptimalExpressionQuery; import org.locationtech.geowave.core.store.query.constraints.QueryConstraints; import org.locationtech.geowave.core.store.query.filter.expression.InvalidFilterException; import org.locationtech.geowave.core.store.util.DataStoreUtils; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.filter.Filter; import org.opengis.filter.expression.Expression; import org.opengis.filter.expression.PropertyName; import org.opengis.geometry.MismatchedDimensionException; import org.opengis.referencing.operation.MathTransform2D; import org.opengis.referencing.operation.TransformException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Iterators; import com.google.common.collect.Sets; /** * This class wraps a geotools data store as well as one for statistics (for example to display * Heatmaps) into a GeoTools FeatureReader for simple feature data. It acts as a helper for * GeoWave's GeoTools data store. */ public class GeoWaveFeatureReader implements FeatureReader { private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveFeatureReader.class); private final GeoWaveDataStoreComponents components; private final GeoWaveFeatureCollection featureCollection; private final GeoWaveTransaction transaction; private final Query query; private final Filter filter; private final Object geoWaveFilter; public GeoWaveFeatureReader( final Query query, final GeoWaveTransaction transaction, final GeoWaveDataStoreComponents components) throws IOException { this.components = components; this.transaction = transaction; featureCollection = new GeoWaveFeatureCollection(this, query); this.query = query; this.filter = getFilter(query); Object gwfilter = null; try { gwfilter = this.filter.accept(new CQLToGeoWaveFilterVisitor(components.getAdapter()), null); } catch (CQLToGeoWaveConversionException | InvalidFilterException e) { // Incompatible with GeoWave filter expressions, fall back to regular optimal CQL query } geoWaveFilter = gwfilter; } public GeoWaveTransaction getTransaction() { return transaction; } public GeoWaveDataStoreComponents getComponents() { return components; } public org.locationtech.geowave.core.store.query.filter.expression.Filter getGeoWaveFilter() { return (org.locationtech.geowave.core.store.query.filter.expression.Filter) geoWaveFilter; } @Override public void close() throws IOException { if (featureCollection.getOpenIterator() != null) { featureCollection.closeIterator(featureCollection.getOpenIterator()); } } @Override public SimpleFeatureType getFeatureType() { return components.getFeatureType(); } @Override public boolean hasNext() throws IOException { Iterator it = featureCollection.getOpenIterator(); if (it != null) { // protect againt GeoTools forgetting to call close() // on this FeatureReader, which causes a resource leak if (!it.hasNext()) { ((CloseableIterator) it).close(); } return it.hasNext(); } it = featureCollection.openIterator(); return it.hasNext(); } @Override public SimpleFeature next() throws IOException, IllegalArgumentException, NoSuchElementException { Iterator it = featureCollection.getOpenIterator(); if (it != null) { return it.next(); } it = featureCollection.openIterator(); return it.next(); } public CloseableIterator getNoData() { return new CloseableIterator.Empty<>(); } public long getCount() { return featureCollection.getCount(); } protected long getCountInternal( final Geometry jtsBounds, final TemporalConstraintsSet timeBounds, final Integer limit) { final CountQueryIssuer countIssuer = new CountQueryIssuer(limit); issueQuery(jtsBounds, timeBounds, countIssuer); return countIssuer.count; } private BasicQueryByClass getQuery( final Geometry jtsBounds, final TemporalConstraintsSet timeBounds) { final GeoConstraintsWrapper geoConstraints = QueryIndexHelper.composeGeometricConstraints(getFeatureType(), jtsBounds); if (timeBounds == null) { // if timeBounds are unspecified just use the geoConstraints return new ExplicitSpatialQuery( geoConstraints.getConstraints(), geoConstraints.getGeometry(), GeometryUtils.getCrsCode(components.getCRS())); } else { final ConstraintsByClass timeConstraints = QueryIndexHelper.composeTimeBoundedConstraints( components.getFeatureType(), components.getAdapter().getTimeDescriptors(), timeBounds); /** * NOTE: query to an index that requires a constraint and the constraint is missing equates to * a full table scan. @see BasicQuery */ final BasicQueryByClass query = new ExplicitSpatialQuery( geoConstraints.getConstraints().merge(timeConstraints), geoConstraints.getGeometry(), GeometryUtils.getCrsCode(components.getCRS())); query.setExact(timeBounds.isExact()); return query; } } public CloseableIterator issueQuery( final Geometry jtsBounds, final TemporalConstraintsSet timeBounds, final QueryIssuer issuer) { final List> results = new ArrayList<>(); boolean spatialOnly = false; if (this.query.getHints().containsKey(SubsampleProcess.SUBSAMPLE_ENABLED) && (Boolean) this.query.getHints().get(SubsampleProcess.SUBSAMPLE_ENABLED)) { spatialOnly = true; } if (!spatialOnly && getGeoWaveFilter() != null) { results.add(issuer.query(null, null, spatialOnly)); } else { final BasicQueryByClass query = getQuery(jtsBounds, timeBounds); final StatisticsCache statsCache = getComponents().getGTstore().getIndexQueryStrategy().requiresStats() ? transaction.getDataStatistics() : null; try (CloseableIterator indexIt = getComponents().getIndices(statsCache, query, spatialOnly)) { while (indexIt.hasNext()) { final Index index = indexIt.next(); final CloseableIterator it = issuer.query(index, query, spatialOnly); if (it != null) { results.add(it); } } } } if (results.isEmpty()) { return getNoData(); } return interweaveTransaction( issuer.getLimit(), issuer.getFilter(), new CloseableIteratorWrapper<>(new Closeable() { @Override public void close() throws IOException { for (final CloseableIterator result : results) { result.close(); } } }, Iterators.concat(results.iterator()))); } protected static boolean hasTime(final Index index) { if ((index == null) || (index.getIndexStrategy() == null) || (index.getIndexStrategy().getOrderedDimensionDefinitions() == null)) { return false; } for (final NumericDimensionDefinition dimension : index.getIndexStrategy().getOrderedDimensionDefinitions()) { if ((dimension instanceof TimeDefinition) || (dimension instanceof SimpleTimeDefinition)) { return true; } } return false; } private QueryConstraints createQueryConstraints( final Index index, final BasicQueryByClass baseQuery, final boolean spatialOnly) { if (getGeoWaveFilter() != null) { return new OptimalExpressionQuery( getGeoWaveFilter(), spatialOnly ? new SpatialIndexFilter() : null); } final AdapterToIndexMapping indexMapping = components.getAdapterIndexMappingStore().getMapping( components.getAdapter().getAdapterId(), index.getName()); return OptimalCQLQuery.createOptimalQuery( filter, components.getAdapter(), index, indexMapping, baseQuery); } public Filter getFilter(final Query query) { final Filter filter = query.getFilter(); if (filter instanceof BBOXImpl) { final BBOXImpl bbox = ((BBOXImpl) filter); final Expression exp1 = bbox.getExpression1(); if (exp1 instanceof PropertyName) { final String propName = ((PropertyName) exp1).getPropertyName(); if ((propName == null) || propName.isEmpty()) { bbox.setExpression1( new AttributeExpressionImpl( components.getAdapter().getFeatureType().getGeometryDescriptor().getLocalName())); } } } return filter; } private class BaseIssuer implements QueryIssuer { final Integer limit; public BaseIssuer(final Integer limit) { super(); this.limit = limit; } @Override public CloseableIterator query( final Index index, final BasicQueryByClass query, final boolean spatialOnly) { VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder().addTypeName( components.getAdapter().getTypeName()).setAuthorizations( transaction.composeAuthorizations()).constraints( createQueryConstraints(index, query, spatialOnly)); if (index != null) { bldr.indexName(index.getName()); } if (limit != null) { bldr = bldr.limit(limit); } if (subsetRequested()) { bldr = bldr.subsetFields(components.getAdapter().getTypeName(), getSubset()); } return components.getDataStore().query(bldr.build()); } @Override public Filter getFilter() { return filter; } @Override public Integer getLimit() { return limit; } } private class CountQueryIssuer extends BaseIssuer implements QueryIssuer { private long count = 0; public CountQueryIssuer(final Integer limit) { super(limit); } @Override public CloseableIterator query( final Index index, final BasicQueryByClass query, final boolean spatialOnly) { VectorAggregationQueryBuilder bldr = (VectorAggregationQueryBuilder) VectorAggregationQueryBuilder.newBuilder().count( components.getAdapter().getTypeName()).setAuthorizations( transaction.composeAuthorizations()).constraints( createQueryConstraints(index, query, spatialOnly)); if (index != null) { bldr.indexName(index.getName()); } if (limit != null) { bldr = bldr.limit(limit); } final Long count = components.getDataStore().aggregate(bldr.build()); if (count != null) { this.count = count; } return null; } } private class EnvelopeQueryIssuer extends BaseIssuer implements QueryIssuer { final ReferencedEnvelope envelope; final int width; final int height; final double pixelSize; public EnvelopeQueryIssuer( final int width, final int height, final double pixelSize, final Integer limit, final ReferencedEnvelope envelope) { super(limit); this.width = width; this.height = height; this.pixelSize = pixelSize; this.envelope = envelope; } @Override public CloseableIterator query( final Index index, final BasicQueryByClass query, final boolean spatialOnly) { VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder().addTypeName( components.getAdapter().getTypeName()).setAuthorizations( transaction.composeAuthorizations()).constraints( createQueryConstraints(index, query, spatialOnly)); if (index != null) { bldr.indexName(index.getName()); } if (limit != null) { bldr = bldr.limit(limit); } if (subsetRequested()) { bldr = bldr.subsetFields(components.getAdapter().getTypeName(), getSubset()); } final double east = envelope.getMaxX(); final double west = envelope.getMinX(); final double north = envelope.getMaxY(); final double south = envelope.getMinY(); try { final AffineTransform worldToScreen = RendererUtilities.worldToScreenTransform( new ReferencedEnvelope( new Envelope(west, east, south, north), envelope.getCoordinateReferenceSystem()), new Rectangle(width, height)); final MathTransform2D fullTransform = (MathTransform2D) ProjectiveTransform.create(worldToScreen); // calculate spans try { if (index != null) { final double[] spans = Decimator.computeGeneralizationDistances( fullTransform.inverse(), new Rectangle(width, height), pixelSize); final NumericDimensionDefinition[] dimensions = index.getIndexStrategy().getOrderedDimensionDefinitions(); final double[] maxResolutionSubsampling = new double[dimensions.length]; for (int i = 0; i < dimensions.length; i++) { if (SpatialIndexUtils.isLongitudeDimension(dimensions[i])) { maxResolutionSubsampling[i] = spans[0]; } else if (SpatialIndexUtils.isLatitudeDimension(dimensions[i])) { maxResolutionSubsampling[i] = spans[1]; } else { // Ignore all other dimensions maxResolutionSubsampling[i] = 0; } } bldr = bldr.addHint( DataStoreUtils.MAX_RESOLUTION_SUBSAMPLING_PER_DIMENSION, maxResolutionSubsampling); } return components.getDataStore().query(bldr.build()); } catch (final TransformException e) { throw new IllegalArgumentException("Unable to compute generalization distance", e); } } catch (final MismatchedDimensionException e) { throw new IllegalArgumentException("Unable to create Reference Envelope", e); } } } private class RenderQueryIssuer extends BaseIssuer implements QueryIssuer { final DistributedRenderOptions renderOptions; public RenderQueryIssuer(final Integer limit, final DistributedRenderOptions renderOptions) { super(limit); this.renderOptions = renderOptions; } @Override public CloseableIterator query( final Index index, final BasicQueryByClass query, final boolean spatialOnly) { final VectorAggregationQueryBuilder bldr = (VectorAggregationQueryBuilder) VectorAggregationQueryBuilder.newBuilder().setAuthorizations( transaction.composeAuthorizations()); if (index != null) { bldr.indexName(index.getName()); } bldr.aggregate( components.getAdapter().getTypeName(), new DistributedRenderAggregation(renderOptions)).constraints( createQueryConstraints(index, query, spatialOnly)); final DistributedRenderResult result = components.getDataStore().aggregate(bldr.build()); return new CloseableIterator.Wrapper<>( Iterators.singletonIterator( SimpleFeatureBuilder.build( GeoWaveFeatureCollection.getDistributedRenderFeatureType(), new Object[] {result, renderOptions}, "render"))); } } public CloseableIterator renderData( final Geometry jtsBounds, final TemporalConstraintsSet timeBounds, final Integer limit, final DistributedRenderOptions renderOptions) { return issueQuery(jtsBounds, timeBounds, new RenderQueryIssuer(limit, renderOptions)); } public CloseableIterator getData( final Geometry jtsBounds, final TemporalConstraintsSet timeBounds, final int width, final int height, final double pixelSize, final ReferencedEnvelope envelope, final Integer limit) { return issueQuery( jtsBounds, timeBounds, new EnvelopeQueryIssuer(width, height, pixelSize, limit, envelope)); } public CloseableIterator getData( final Geometry jtsBounds, final TemporalConstraintsSet timeBounds, final Integer limit) { if (filter instanceof FidFilterImpl) { final Set fids = ((FidFilterImpl) filter).getFidsSet(); final byte[][] ids = new byte[fids.size()][]; int i = 0; for (final String fid : fids) { ids[i++] = StringUtils.stringToBinary(fid); } final Index[] writeIndices = components.getAdapterIndices(); final String queryIndexName = ((writeIndices != null) && (writeIndices.length > 0)) ? writeIndices[0].getName() : null; VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder().addTypeName( components.getAdapter().getTypeName()).indexName(queryIndexName).setAuthorizations( transaction.composeAuthorizations()); if (limit != null) { bldr = bldr.limit(limit); } if (subsetRequested()) { bldr = bldr.subsetFields(components.getAdapter().getTypeName(), getSubset()); } return components.getDataStore().query( bldr.constraints(bldr.constraintsFactory().dataIds(ids)).build()); } return issueQuery(jtsBounds, timeBounds, new BaseIssuer(limit)); } public GeoWaveFeatureCollection getFeatureCollection() { return featureCollection; } private CloseableIterator interweaveTransaction( final Integer limit, final Filter filter, final CloseableIterator it) { return transaction.interweaveTransaction(limit, filter, it); } protected TemporalConstraintsSet clipIndexedTemporalConstraints( final TemporalConstraintsSet constraintsSet) { return QueryIndexHelper.clipIndexedTemporalConstraints( transaction.getDataStatistics(), components.getAdapter().getTimeDescriptors(), constraintsSet); } protected Geometry clipIndexedBBOXConstraints(final Geometry bbox) { return QueryIndexHelper.clipIndexedBBOXConstraints( transaction.getDataStatistics(), components.getAdapter().getFeatureType(), components.getCRS(), bbox); } private boolean subsetRequested() { if (query == null) { return false; } return !(query.getPropertyNames() == Query.ALL_NAMES); } private String[] getSubset() { if (query == null) { return new String[0]; } if ((query.getFilter() != null) && !components.getGTstore().getDataStoreOptions().isServerSideLibraryEnabled()) { final ExtractAttributesFilter attributesVisitor = new ExtractAttributesFilter(); final Object obj = query.getFilter().accept(attributesVisitor, null); if ((obj != null) && (obj instanceof Collection)) { final Set properties = Sets.newHashSet(query.getPropertyNames()); for (final String prop : (Collection) obj) { properties.add(prop); } return properties.toArray(new String[0]); } } return query.getPropertyNames(); } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/GeoWaveFeatureSource.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin; import java.io.IOException; import org.geotools.data.FeatureReader; import org.geotools.data.FeatureWriter; import org.geotools.data.Query; import org.geotools.data.store.ContentEntry; import org.geotools.data.store.ContentFeatureStore; import org.geotools.geometry.jts.ReferencedEnvelope; import org.geotools.referencing.crs.DefaultGeographicCRS; import org.locationtech.geowave.adapter.vector.plugin.transaction.GeoWaveEmptyTransaction; import org.locationtech.geowave.adapter.vector.plugin.transaction.GeoWaveTransactionState; import org.locationtech.geowave.adapter.vector.plugin.transaction.StatisticsCache; import org.locationtech.geowave.adapter.vector.plugin.transaction.TransactionsAllocator; import org.locationtech.geowave.core.geotime.store.InternalGeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic; import org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic.BoundingBoxValue; import org.locationtech.geowave.core.store.statistics.adapter.CountStatistic; import org.locationtech.geowave.core.store.statistics.adapter.CountStatistic.CountValue; import org.opengis.feature.FeatureVisitor; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.filter.Filter; import org.opengis.geometry.BoundingBox; import org.opengis.referencing.FactoryException; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.opengis.referencing.operation.TransformException; import org.opengis.util.ProgressListener; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class GeoWaveFeatureSource extends ContentFeatureStore { private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveFeatureSource.class); private final GeoWaveDataStoreComponents components; public GeoWaveFeatureSource( final ContentEntry entry, final Query query, final InternalGeotoolsFeatureDataAdapter adapter, final TransactionsAllocator transactionAllocator) { super(entry, query); components = new GeoWaveDataStoreComponents( getDataStore().getDataStore(), getDataStore().getDataStatisticsStore(), getDataStore().getAdapterIndexMappingStore(), getDataStore().getIndexStore(), adapter, getDataStore(), transactionAllocator); } public GeoWaveDataStoreComponents getComponents() { return components; } @Override protected ReferencedEnvelope getBoundsInternal(final Query query) throws IOException { double minx = -90.0, maxx = 90.0, miny = -180.0, maxy = 180.0; BoundingBoxValue bboxStats = null; if (query.getFilter().equals(Filter.INCLUDE)) { final StatisticsCache statsCache = new GeoWaveEmptyTransaction(components).getDataStatistics(); bboxStats = statsCache.getFieldStatistic( BoundingBoxStatistic.STATS_TYPE, getFeatureType().getGeometryDescriptor().getLocalName()); } CoordinateReferenceSystem bboxCRS = DefaultGeographicCRS.WGS84; if (bboxStats != null) { minx = bboxStats.getMinX(); maxx = bboxStats.getMaxX(); miny = bboxStats.getMinY(); maxy = bboxStats.getMaxY(); BoundingBoxStatistic statistic = (BoundingBoxStatistic) bboxStats.getStatistic(); if (statistic.getDestinationCrs() != null) { bboxCRS = statistic.getDestinationCrs(); } else { bboxCRS = components.getAdapter().getFeatureType().getCoordinateReferenceSystem(); } } else { final FeatureReader reader = new GeoWaveFeatureReader(query, new GeoWaveEmptyTransaction(components), components); if (reader.hasNext()) { bboxCRS = components.getCRS(); BoundingBox featureBounds = reader.next().getBounds(); minx = featureBounds.getMinX(); maxx = featureBounds.getMaxX(); miny = featureBounds.getMinY(); maxy = featureBounds.getMaxY(); while (reader.hasNext()) { featureBounds = reader.next().getBounds(); minx = Math.min(featureBounds.getMinX(), minx); maxx = Math.max(featureBounds.getMaxX(), maxx); miny = Math.min(featureBounds.getMinY(), miny); maxy = Math.max(featureBounds.getMaxY(), maxy); } } reader.close(); } ReferencedEnvelope retVal = new ReferencedEnvelope(minx, maxx, miny, maxy, bboxCRS); if (!bboxCRS.equals(components.getCRS())) { try { retVal = retVal.transform(components.getCRS(), true); } catch (FactoryException | TransformException e) { LOGGER.warn("Unable to transform bounding box for feature source."); } } return retVal; } @Override protected int getCountInternal(final Query query) throws IOException { final CountValue count = new GeoWaveEmptyTransaction(components).getDataStatistics().getAdapterStatistic( CountStatistic.STATS_TYPE); if ((count != null) && query.getFilter().equals(Filter.INCLUDE)) { return count.getValue().intValue(); } else { try (GeoWaveFeatureReader reader = new GeoWaveFeatureReader(query, new GeoWaveEmptyTransaction(components), components)) { return (int) reader.getCount(); } } } public SimpleFeatureType getFeatureType() { return components.getFeatureType(); } @Override protected FeatureReader getReaderInternal(final Query query) throws IOException { final GeoWaveTransactionState state = getDataStore().getMyTransactionState(transaction, this); return new GeoWaveFeatureReader( query, state.getGeoWaveTransaction(query.getTypeName()), components); } @Override protected FeatureWriter getWriterInternal( final Query query, final int flags) throws IOException { final GeoWaveTransactionState state = getDataStore().getMyTransactionState(transaction, this); return new GeoWaveFeatureWriter( components, state.getGeoWaveTransaction(query.getTypeName()), (GeoWaveFeatureReader) getReaderInternal(query)); } @Override public void accepts( final Query query, final FeatureVisitor visitor, final ProgressListener progress) throws IOException { if (!GeoWaveGTPluginUtils.accepts( components.getStatsStore(), components.getAdapter(), visitor, progress, getFeatureType())) { super.accepts(query, visitor, progress); } } @Override protected SimpleFeatureType buildFeatureType() throws IOException { return getFeatureType(); } @Override public GeoWaveGTDataStore getDataStore() { // type narrow this method to prevent a lot of casts resulting in more // readable code. return (GeoWaveGTDataStore) super.getDataStore(); } @Override protected boolean canTransact() { // tell GeoTools that we natively handle this return true; } @Override protected boolean canLock() { // tell GeoTools that we natively handle this return true; } @Override protected boolean canFilter() { return true; } @Override protected void doLockInternal(final String typeName, final SimpleFeature feature) throws IOException { getDataStore().getLockingManager().lockFeatureID(typeName, feature.getID(), transaction, lock); } @Override protected void doUnlockInternal(final String typeName, final SimpleFeature feature) throws IOException { getDataStore().getLockingManager().unLockFeatureID( typeName, feature.getID(), transaction, lock); } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/GeoWaveFeatureWriter.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin; import java.io.IOException; import java.util.List; import java.util.NoSuchElementException; import java.util.UUID; import org.geotools.data.FeatureWriter; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.util.Utilities; import org.locationtech.geowave.adapter.vector.plugin.transaction.GeoWaveTransaction; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.AttributeDescriptor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class wraps a geotools data store as well as one for statistics (for example to display * Heatmaps) into a GeoTools FeatureReader for simple feature data. It acts as a helper for * GeoWave's GeoTools data store. */ public class GeoWaveFeatureWriter implements FeatureWriter { private SimpleFeature original = null; private SimpleFeature live = null; private final GeoWaveTransaction transaction; private final GeoWaveFeatureReader myReader; private final SimpleFeatureType featureType; public GeoWaveFeatureWriter( final GeoWaveDataStoreComponents components, final GeoWaveTransaction transaction, final GeoWaveFeatureReader reader) { components.initForWrite(); this.transaction = transaction; myReader = reader; featureType = components.getFeatureType(); } @Override public void close() throws IOException {} @Override public SimpleFeatureType getFeatureType() { return featureType; } @Override public boolean hasNext() throws IOException { return ((myReader != null) && myReader.hasNext()); } @Override public SimpleFeature next() throws IOException, IllegalArgumentException, NoSuchElementException { if (hasNext()) { original = myReader.next(); final List descriptors = featureType.getAttributeDescriptors(); final Object[] defaults = new Object[descriptors.size()]; int p = 0; for (final AttributeDescriptor descriptor : descriptors) { defaults[p++] = original.getAttribute(descriptor.getName()); } live = SimpleFeatureBuilder.build(featureType, defaults, original.getID()); } else { original = null; final List descriptors = featureType.getAttributeDescriptors(); final Object[] defaults = new Object[descriptors.size()]; int p = 0; for (final AttributeDescriptor descriptor : descriptors) { defaults[p++] = descriptor.getDefaultValue(); } live = SimpleFeatureBuilder.build(featureType, defaults, UUID.randomUUID().toString()); } return live; } @Override public void remove() throws IOException { transaction.remove(live.getID(), live); } private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveFeatureWriter.class); @Override public void write() throws IOException { if (live == null) { LOGGER.error("Unable to process transaction " + transaction.toString()); throw new IOException("No current feature to write"); } if (original == null) { transaction.add(live.getID(), live); } else if (!Utilities.deepEquals(live, original)) { transaction.modify(live.getID(), original, live); } original = null; live = null; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/GeoWaveGSProcessFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin; import org.geotools.process.factory.AnnotatedBeanProcessFactory; import org.geotools.text.Text; /** * This is the GeoTools Factory for introducing the nga:Decimation rendering transform. GeoTools * uses Java SPI to inject the WPS process (see * META-INF/services/org.geotools.process.ProcessFactory). */ public class GeoWaveGSProcessFactory extends AnnotatedBeanProcessFactory { public GeoWaveGSProcessFactory() { super( Text.text("GeoWave Process Factory"), "geowave", SubsampleProcess.class, DistributedRenderProcess.class); } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/GeoWaveGTDataStore.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin; import java.io.Closeable; import java.io.IOException; import java.net.URI; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import org.geotools.data.FeatureListenerManager; import org.geotools.data.Query; import org.geotools.data.Transaction; import org.geotools.data.store.ContentDataStore; import org.geotools.data.store.ContentEntry; import org.geotools.data.store.ContentFeatureSource; import org.geotools.feature.NameImpl; import org.locationtech.geowave.adapter.auth.AuthorizationSPI; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.adapter.vector.index.IndexQueryStrategySPI; import org.locationtech.geowave.adapter.vector.index.SimpleFeaturePrimaryIndexConfiguration; import org.locationtech.geowave.adapter.vector.plugin.lock.LockingManagement; import org.locationtech.geowave.adapter.vector.plugin.transaction.GeoWaveAutoCommitTransactionState; import org.locationtech.geowave.adapter.vector.plugin.transaction.GeoWaveTransactionManagementState; import org.locationtech.geowave.adapter.vector.plugin.transaction.GeoWaveTransactionState; import org.locationtech.geowave.adapter.vector.plugin.transaction.MemoryTransactionsAllocator; import org.locationtech.geowave.adapter.vector.plugin.transaction.TransactionsAllocator; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.geotime.store.InternalGeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.geotime.store.dimension.TimeField; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.geotime.util.SpatialIndexUtils; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.DataStoreOptions; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.dimension.NumericDimensionField; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.Name; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Lists; public class GeoWaveGTDataStore extends ContentDataStore { /** Package logger */ private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveGTDataStore.class); private FeatureListenerManager listenerManager = null; protected PersistentAdapterStore adapterStore; protected InternalAdapterStore internalAdapterStore; protected IndexStore indexStore; protected DataStatisticsStore dataStatisticsStore; protected DataStore dataStore; protected DataStoreOptions dataStoreOptions; protected AdapterIndexMappingStore adapterIndexMappingStore; private final Map preferredIndexes = new ConcurrentHashMap<>(); private final AuthorizationSPI authorizationSPI; private final IndexQueryStrategySPI indexQueryStrategy; private final URI featureNameSpaceURI; private int transactionBufferSize = 10000; private final TransactionsAllocator transactionsAllocator; public GeoWaveGTDataStore(final GeoWavePluginConfig config) throws IOException { listenerManager = new FeatureListenerManager(); lockingManager = config.getLockingManagementFactory().createLockingManager(config); authorizationSPI = config.getAuthorizationFactory().create(config.getAuthorizationURL()); init(config); featureNameSpaceURI = config.getFeatureNamespace(); indexQueryStrategy = config.getIndexQueryStrategy(); transactionBufferSize = config.getTransactionBufferSize(); transactionsAllocator = new MemoryTransactionsAllocator(); } private void init(final GeoWavePluginConfig config) { dataStore = config.getDataStore(); dataStoreOptions = config.getDataStoreOptions(); dataStatisticsStore = config.getDataStatisticsStore(); indexStore = config.getIndexStore(); adapterStore = config.getAdapterStore(); adapterIndexMappingStore = config.getAdapterIndexMappingStore(); internalAdapterStore = config.getInternalAdapterStore(); } public AuthorizationSPI getAuthorizationSPI() { return authorizationSPI; } public FeatureListenerManager getListenerManager() { return listenerManager; } public IndexQueryStrategySPI getIndexQueryStrategy() { return indexQueryStrategy; } public DataStore getDataStore() { return dataStore; } public DataStoreOptions getDataStoreOptions() { return dataStoreOptions; } public PersistentAdapterStore getAdapterStore() { return adapterStore; } public InternalAdapterStore getInternalAdapterStore() { return internalAdapterStore; } public AdapterIndexMappingStore getAdapterIndexMappingStore() { return adapterIndexMappingStore; } public IndexStore getIndexStore() { return indexStore; } public DataStatisticsStore getDataStatisticsStore() { return dataStatisticsStore; } private Index[] filterIndices(final Index[] unfiltered, final boolean spatialOnly) { if (spatialOnly) { final List filtered = Lists.newArrayList(); for (int i = 0; i < unfiltered.length; i++) { if (SpatialIndexUtils.hasSpatialDimensions(unfiltered[i])) { filtered.add(unfiltered[i]); } } return filtered.toArray(new Index[filtered.size()]); } return unfiltered; } public void setPreferredIndices(final GeotoolsFeatureDataAdapter adapter, final Index[] indices) { preferredIndexes.put(adapter.getFeatureType().getName().toString(), indices); } protected Index[] getIndicesForAdapter( final GeotoolsFeatureDataAdapter adapter, final boolean spatialOnly) { Index[] currentSelections = preferredIndexes.get(adapter.getFeatureType().getName().toString()); if (currentSelections != null) { return filterIndices(currentSelections, spatialOnly); } final short internalAdapterId = internalAdapterStore.getAdapterId(adapter.getTypeName()); final AdapterToIndexMapping[] adapterIndexMappings = adapterIndexMappingStore.getIndicesForAdapter(internalAdapterId); if ((adapterIndexMappings != null) && (adapterIndexMappings.length > 0)) { currentSelections = Arrays.stream(adapterIndexMappings).map(mapping -> mapping.getIndex(indexStore)).toArray( Index[]::new); } else { currentSelections = getPreferredIndices(adapter); } preferredIndexes.put(adapter.getFeatureType().getName().toString(), currentSelections); return filterIndices(currentSelections, spatialOnly); } @Override public void createSchema(final SimpleFeatureType featureType) { if (featureType.getGeometryDescriptor() == null) { throw new UnsupportedOperationException("Schema missing geometry"); } final FeatureDataAdapter adapter = new FeatureDataAdapter(featureType); final short adapterId = internalAdapterStore.addTypeName(adapter.getTypeName()); if (!adapterStore.adapterExists(adapterId)) { if (featureNameSpaceURI != null) { adapter.setNamespace(featureNameSpaceURI.toString()); } dataStore.addType(adapter); } } private InternalGeotoolsFeatureDataAdapter getAdapter(final String typeName) { final InternalGeotoolsFeatureDataAdapter featureAdapter; final Short adapterId = internalAdapterStore.getAdapterId(typeName); if (adapterId == null) { return null; } final InternalDataAdapter adapter = adapterStore.getAdapter(adapterId); if ((adapter == null) || !(adapter instanceof InternalGeotoolsFeatureDataAdapter)) { return null; } featureAdapter = (InternalGeotoolsFeatureDataAdapter) adapter; if (featureNameSpaceURI != null) { featureAdapter.setNamespace(featureNameSpaceURI.toString()); } return featureAdapter; } @Override protected List createTypeNames() throws IOException { final List names = new ArrayList<>(); final InternalDataAdapter[] adapters = adapterStore.getAdapters(); for (final InternalDataAdapter adapter : adapters) { if (adapter.getAdapter() instanceof GeotoolsFeatureDataAdapter) { names.add(((GeotoolsFeatureDataAdapter) adapter.getAdapter()).getFeatureType().getName()); } } return names; } @Override public ContentFeatureSource getFeatureSource(final String typeName) throws IOException { return getFeatureSource(typeName, Transaction.AUTO_COMMIT); } @Override public ContentFeatureSource getFeatureSource(final String typeName, final Transaction tx) throws IOException { return super.getFeatureSource(new NameImpl(null, typeName), tx); } @Override public ContentFeatureSource getFeatureSource(final Name typeName, final Transaction tx) throws IOException { return getFeatureSource(typeName.getLocalPart(), tx); } @Override public ContentFeatureSource getFeatureSource(final Name typeName) throws IOException { return getFeatureSource(typeName.getLocalPart(), Transaction.AUTO_COMMIT); } @Override public void dispose() { if (dataStore instanceof Closeable) { try { ((Closeable) dataStore).close(); } catch (final IOException e) { LOGGER.error("Unable to close geowave datastore", e); } } } @Override protected ContentFeatureSource createFeatureSource(final ContentEntry entry) throws IOException { return new GeoWaveFeatureSource( entry, Query.ALL, getAdapter(entry.getTypeName()), transactionsAllocator); } @Override public void removeSchema(final Name typeName) throws IOException { this.removeSchema(typeName.getLocalPart()); } @Override public void removeSchema(final String typeName) throws IOException { dataStore.removeType(typeName); } /** * Used to retrieve the TransactionStateDiff for this transaction. * *

* * @param transaction * @return GeoWaveTransactionState or null if subclass is handling differences * @throws IOException */ protected GeoWaveTransactionState getMyTransactionState( final Transaction transaction, final GeoWaveFeatureSource source) throws IOException { synchronized (transaction) { GeoWaveTransactionState state = null; if (transaction == Transaction.AUTO_COMMIT) { state = new GeoWaveAutoCommitTransactionState(source); } else { state = (GeoWaveTransactionState) transaction.getState(this); if (state == null) { state = new GeoWaveTransactionManagementState( transactionBufferSize, source.getComponents(), transaction, (LockingManagement) lockingManager); transaction.putState(this, state); } } return state; } } public Index[] getPreferredIndices(final GeotoolsFeatureDataAdapter adapter) { final List currentSelectionsList = new ArrayList<>(2); final List indexNames = SimpleFeaturePrimaryIndexConfiguration.getIndexNames(adapter.getFeatureType()); final boolean canUseTime = adapter.hasTemporalConstraints(); /** * Requires the indices to EXIST prior to set up of the adapter. Otherwise, only Geospatial is * chosen and the index Names are ignored. */ CoordinateReferenceSystem selectedCRS = null; try (CloseableIterator indices = indexStore.getIndices()) { while (indices.hasNext()) { final Index index = indices.next(); final CoordinateReferenceSystem indexCRS = GeometryUtils.getIndexCrs(index); if ((selectedCRS != null) && !selectedCRS.equals(indexCRS)) { continue; } if (!indexNames.isEmpty()) { // Only used selected preferred indices if (indexNames.contains(index.getName())) { selectedCRS = indexCRS; currentSelectionsList.add(index); } } final NumericDimensionField[] dims = index.getIndexModel().getDimensions(); boolean hasLat = false; boolean hasLong = false; boolean hasTime = false; for (final NumericDimensionField dim : dims) { hasLat |= SpatialIndexUtils.isLatitudeDimension(dim); hasLong |= SpatialIndexUtils.isLongitudeDimension(dim); hasTime |= dim instanceof TimeField; } if (hasLat && hasLong) { // If not requiring time OR (requires time AND has time // constraints) if (!hasTime || canUseTime) { selectedCRS = indexCRS; currentSelectionsList.add(index); } } } } if (currentSelectionsList.isEmpty()) { currentSelectionsList.add( SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions())); } return currentSelectionsList.toArray(new Index[currentSelectionsList.size()]); } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/GeoWaveGTDataStoreFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin; import java.awt.RenderingHints.Key; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import org.geotools.data.DataStore; import org.geotools.data.DataStoreFactorySpi; import org.geotools.util.factory.FactoryIteratorProvider; import org.geotools.util.factory.GeoTools; import org.locationtech.geowave.core.store.GeoWaveStoreFinder; import org.locationtech.geowave.core.store.StoreFactoryFamilySpi; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Function; import com.google.common.collect.Iterators; /** * This factory is injected by GeoTools using Java SPI and is used to expose GeoWave as a DataStore * to GeoTools. It should be defined within a file * META-INF/services/org.geotools.data.DataStoreFactorySpi to inject this into GeoTools. */ public class GeoWaveGTDataStoreFactory implements DataStoreFactorySpi { private static class DataStoreCacheEntry { private final Map params; private final DataStore dataStore; public DataStoreCacheEntry(final Map params, final DataStore dataStore) { this.params = params; this.dataStore = dataStore; } } public static final String DISPLAY_NAME_PREFIX = "GeoWave Datastore - "; private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveGTDataStoreFactory.class); private final List dataStoreCache = new ArrayList<>(); private final StoreFactoryFamilySpi geowaveStoreFactoryFamily; private static Boolean isAvailable = null; /** * Public "no argument" constructor called by Factory Service Provider (SPI) entry listed in * META-INF/services/org.geotools.data.DataStoreFactorySPI */ public GeoWaveGTDataStoreFactory() { final Collection dataStoreFactories = GeoWaveStoreFinder.getRegisteredStoreFactoryFamilies().values(); if (dataStoreFactories.isEmpty()) { LOGGER.error("No GeoWave DataStore found! Geotools datastore for GeoWave is unavailable"); geowaveStoreFactoryFamily = null; } else { final Iterator it = dataStoreFactories.iterator(); geowaveStoreFactoryFamily = it.next(); if (it.hasNext()) { GeoTools.addFactoryIteratorProvider(new GeoWaveGTDataStoreFactoryIteratorProvider()); } } } public GeoWaveGTDataStoreFactory(final StoreFactoryFamilySpi geowaveStoreFactoryFamily) { this.geowaveStoreFactoryFamily = geowaveStoreFactoryFamily; } // GeoServer seems to call this several times so we should cache a // connection if the parameters are the same, I'm not sure this is entirely // correct but it keeps us from making several connections for the same data // store @Override public DataStore createDataStore(final Map params) throws IOException { // iterate in reverse over the cache so the most recently added is // accessed first for (int index = dataStoreCache.size() - 1; index >= 0; index--) { final DataStoreCacheEntry cacheEntry = dataStoreCache.get(index); if (paramsEqual(params, cacheEntry.params)) { return cacheEntry.dataStore; } } return createNewDataStore(params); } private boolean paramsEqual(final Map params1, final Map params2) { if (params1.size() == params2.size()) { for (final Entry entry : params1.entrySet()) { final Object value = params2.get(entry.getKey()); if (value == null) { if (entry.getValue() == null) { continue; } return false; } else if (!value.equals(entry.getValue())) { return false; } } return true; } return false; } @Override public DataStore createNewDataStore(final Map params) throws IOException { final GeoWaveGTDataStore dataStore; try { dataStore = new GeoWaveGTDataStore(new GeoWavePluginConfig(geowaveStoreFactoryFamily, params)); dataStoreCache.add(new DataStoreCacheEntry(params, dataStore)); } catch (final Exception ex) { throw new IOException("Error initializing datastore", ex); } return dataStore; } @Override public String getDisplayName() { return DISPLAY_NAME_PREFIX + geowaveStoreFactoryFamily.getType().toUpperCase(); } @Override public String getDescription() { return "A datastore that uses the GeoWave API for spatial data persistence in " + geowaveStoreFactoryFamily.getType() + ". " + geowaveStoreFactoryFamily.getDescription(); } @Override public Param[] getParametersInfo() { final List params = GeoWavePluginConfig.getPluginParams(geowaveStoreFactoryFamily); return params.toArray(new Param[params.size()]); } @Override public boolean canProcess(final Map params) { try { final Map dataStoreParams = params.entrySet().stream().filter( e -> !GeoWavePluginConfig.BASE_GEOWAVE_PLUGIN_PARAM_KEYS.contains( e.getKey())).collect( HashMap::new, (m, e) -> m.put( e.getKey() == null ? null : e.getKey().toString(), e.getValue() == null ? null : e.getValue().toString()), HashMap::putAll); final Map originalParams = params.entrySet().stream().collect( HashMap::new, (m, e) -> m.put( e.getKey() == null ? null : e.getKey().toString(), e.getValue() == null ? null : e.getValue().toString()), HashMap::putAll); return GeoWaveStoreFinder.exactMatch( geowaveStoreFactoryFamily, dataStoreParams, originalParams); } catch (final Exception e) { LOGGER.info("unable to process params as GeoWave datastore", e); return false; } } @Override public synchronized boolean isAvailable() { if (isAvailable == null) { if (geowaveStoreFactoryFamily == null) { isAvailable = false; } else { try { Class.forName("org.locationtech.geowave.adapter.vector.plugin.GeoWaveGTDataStore"); isAvailable = true; } catch (final ClassNotFoundException e) { isAvailable = false; } } } return isAvailable; } @Override public Map getImplementationHints() { // No implementation hints required at this time return Collections.emptyMap(); } private static class GeoWaveGTDataStoreFactoryIteratorProvider implements FactoryIteratorProvider { @Override public Iterator iterator(final Class cls) { if ((cls != null) && cls.isAssignableFrom(DataStoreFactorySpi.class)) { return (Iterator) new GeoWaveGTDataStoreFactoryIterator(); } return null; } private static class GeoWaveGTDataStoreFactoryIterator implements Iterator { private final Iterator it; private GeoWaveGTDataStoreFactoryIterator() { final Iterator geowaveDataStoreIt = GeoWaveStoreFinder.getRegisteredStoreFactoryFamilies().values().iterator(); geowaveDataStoreIt.next(); it = Iterators.transform(geowaveDataStoreIt, new GeoWaveStoreToGeoToolsDataStore()); } @Override public boolean hasNext() { return it.hasNext(); } @Override public DataStoreFactorySpi next() { return it.next(); } @Override public void remove() {} } } /** * Below is a set of 9 additional GeoWaveGTDataStoreFactory's, its a bit of a hack, but must be * done because the geotools factory registry will re-use instances of the same class, so each * individual geowave data store must be registered as a different class (the alternative is * dynamic compilation of classes to add to the classloader). */ private static class GeoWaveStoreToGeoToolsDataStore implements Function { private int i = 0; public GeoWaveStoreToGeoToolsDataStore() {} @Override public DataStoreFactorySpi apply(final StoreFactoryFamilySpi input) { i++; switch (i) { case 1: return new GeoWaveGTDataStoreFactory1(input); case 2: return new GeoWaveGTDataStoreFactory2(input); case 3: return new GeoWaveGTDataStoreFactory3(input); case 4: return new GeoWaveGTDataStoreFactory4(input); case 5: return new GeoWaveGTDataStoreFactory5(input); case 6: return new GeoWaveGTDataStoreFactory6(input); case 7: return new GeoWaveGTDataStoreFactory7(input); case 8: return new GeoWaveGTDataStoreFactory8(input); case 9: return new GeoWaveGTDataStoreFactory9(input); } LOGGER.error("Too many GeoWave Datastores registered for GeoTools data store"); return new GeoWaveGTDataStoreFactory(input); } } private static class GeoWaveGTDataStoreFactory1 extends GeoWaveGTDataStoreFactory { public GeoWaveGTDataStoreFactory1(final StoreFactoryFamilySpi geowaveStoreFactoryFamily) { super(geowaveStoreFactoryFamily); } } private static class GeoWaveGTDataStoreFactory2 extends GeoWaveGTDataStoreFactory { public GeoWaveGTDataStoreFactory2(final StoreFactoryFamilySpi geowaveStoreFactoryFamily) { super(geowaveStoreFactoryFamily); } } private static class GeoWaveGTDataStoreFactory3 extends GeoWaveGTDataStoreFactory { public GeoWaveGTDataStoreFactory3(final StoreFactoryFamilySpi geowaveStoreFactoryFamily) { super(geowaveStoreFactoryFamily); } } private static class GeoWaveGTDataStoreFactory4 extends GeoWaveGTDataStoreFactory { public GeoWaveGTDataStoreFactory4(final StoreFactoryFamilySpi geowaveStoreFactoryFamily) { super(geowaveStoreFactoryFamily); } } private static class GeoWaveGTDataStoreFactory5 extends GeoWaveGTDataStoreFactory { public GeoWaveGTDataStoreFactory5(final StoreFactoryFamilySpi geowaveStoreFactoryFamily) { super(geowaveStoreFactoryFamily); } } private static class GeoWaveGTDataStoreFactory6 extends GeoWaveGTDataStoreFactory { public GeoWaveGTDataStoreFactory6(final StoreFactoryFamilySpi geowaveStoreFactoryFamily) { super(geowaveStoreFactoryFamily); } } private static class GeoWaveGTDataStoreFactory7 extends GeoWaveGTDataStoreFactory { public GeoWaveGTDataStoreFactory7(final StoreFactoryFamilySpi geowaveStoreFactoryFamily) { super(geowaveStoreFactoryFamily); } } private static class GeoWaveGTDataStoreFactory8 extends GeoWaveGTDataStoreFactory { public GeoWaveGTDataStoreFactory8(final StoreFactoryFamilySpi geowaveStoreFactoryFamily) { super(geowaveStoreFactoryFamily); } } private static class GeoWaveGTDataStoreFactory9 extends GeoWaveGTDataStoreFactory { public GeoWaveGTDataStoreFactory9(final StoreFactoryFamilySpi geowaveStoreFactoryFamily) { super(geowaveStoreFactoryFamily); } } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/GeoWaveGTPluginUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin; import java.io.IOException; import java.math.BigDecimal; import java.math.BigInteger; import java.sql.Timestamp; import java.util.Calendar; import java.util.Collection; import java.util.Date; import java.util.List; import java.util.Map; import java.util.TimeZone; import org.geotools.feature.visitor.MaxVisitor; import org.geotools.feature.visitor.MinVisitor; import org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic; import org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic.TimeRangeValue; import org.locationtech.geowave.core.geotime.util.ExtractAttributesFilter; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.FieldStatistic; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.locationtech.geowave.core.store.statistics.field.NumericRangeStatistic; import org.locationtech.geowave.core.store.statistics.field.NumericRangeStatistic.NumericRangeValue; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.AttributeDescriptor; import com.beust.jcommander.internal.Lists; import com.beust.jcommander.internal.Maps; class GeoWaveGTPluginUtils { protected static Map>> getFieldStats( final DataStatisticsStore statisticsStore, final DataTypeAdapter adapter) { final Map>> adapterFieldStatistics = Maps.newHashMap(); try (CloseableIterator>> statistics = statisticsStore.getFieldStatistics(adapter, null, null, null)) { while (statistics.hasNext()) { final FieldStatistic next = (FieldStatistic) statistics.next(); List> fieldStats = adapterFieldStatistics.get(next.getFieldName()); if (fieldStats == null) { fieldStats = Lists.newArrayList(); adapterFieldStatistics.put(next.getFieldName(), fieldStats); } fieldStats.add(next); } } return adapterFieldStatistics; } protected static boolean accepts( final DataStatisticsStore statisticsStore, final DataTypeAdapter adapter, final org.opengis.feature.FeatureVisitor visitor, final org.opengis.util.ProgressListener progress, final SimpleFeatureType featureType) throws IOException { if ((visitor instanceof MinVisitor)) { final ExtractAttributesFilter filter = new ExtractAttributesFilter(); final MinVisitor minVisitor = (MinVisitor) visitor; final Collection attrs = (Collection) minVisitor.getExpression().accept(filter, null); int acceptedCount = 0; final Map>> adapterFieldStatistics = getFieldStats(statisticsStore, adapter); for (final String attr : attrs) { if (!adapterFieldStatistics.containsKey(attr)) { continue; } for (final FieldStatistic stat : adapterFieldStatistics.get(attr)) { if ((stat instanceof TimeRangeStatistic) && (stat.getBinningStrategy() == null)) { final TimeRangeValue statValue = statisticsStore.getStatisticValue((TimeRangeStatistic) stat); if (statValue != null) { minVisitor.setValue(convertToType(attr, new Date(statValue.getMin()), featureType)); acceptedCount++; } } else if (stat instanceof NumericRangeStatistic) { try (CloseableIterator values = statisticsStore.getStatisticValues((NumericRangeStatistic) stat)) { NumericRangeValue statValue = ((NumericRangeStatistic) stat).createEmpty(); while (values.hasNext()) { statValue.merge(values.next()); } if (statValue.isSet()) { minVisitor.setValue(convertToType(attr, statValue.getMin(), featureType)); acceptedCount++; } } } } } if (acceptedCount > 0) { if (progress != null) { progress.complete(); } return true; } } else if ((visitor instanceof MaxVisitor)) { final ExtractAttributesFilter filter = new ExtractAttributesFilter(); final MaxVisitor maxVisitor = (MaxVisitor) visitor; final Collection attrs = (Collection) maxVisitor.getExpression().accept(filter, null); int acceptedCount = 0; final Map>> adapterFieldStatistics = getFieldStats(statisticsStore, adapter); for (final String attr : attrs) { for (final FieldStatistic stat : adapterFieldStatistics.get(attr)) { if ((stat instanceof TimeRangeStatistic) && (stat.getBinningStrategy() == null)) { final TimeRangeValue statValue = statisticsStore.getStatisticValue((TimeRangeStatistic) stat); if (statValue != null) { maxVisitor.setValue(convertToType(attr, new Date(statValue.getMax()), featureType)); acceptedCount++; } } else if (stat instanceof NumericRangeStatistic) { try (CloseableIterator values = statisticsStore.getStatisticValues((NumericRangeStatistic) stat)) { NumericRangeValue statValue = ((NumericRangeStatistic) stat).createEmpty(); while (values.hasNext()) { statValue.merge(values.next()); } if (statValue.isSet()) { maxVisitor.setValue(convertToType(attr, statValue.getMax(), featureType)); acceptedCount++; } } } } } if (acceptedCount > 0) { if (progress != null) { progress.complete(); } return true; } } return false; } protected static Object convertToType( final String attrName, final Object value, final SimpleFeatureType featureType) { final AttributeDescriptor descriptor = featureType.getDescriptor(attrName); if (descriptor == null) { return value; } final Class attrClass = descriptor.getType().getBinding(); if (attrClass.isInstance(value)) { return value; } if (Number.class.isAssignableFrom(attrClass) && Number.class.isInstance(value)) { if (Double.class.isAssignableFrom(attrClass)) { return ((Number) value).doubleValue(); } if (Float.class.isAssignableFrom(attrClass)) { return ((Number) value).floatValue(); } if (Long.class.isAssignableFrom(attrClass)) { return ((Number) value).longValue(); } if (Integer.class.isAssignableFrom(attrClass)) { return ((Number) value).intValue(); } if (Short.class.isAssignableFrom(attrClass)) { return ((Number) value).shortValue(); } if (Byte.class.isAssignableFrom(attrClass)) { return ((Number) value).byteValue(); } if (BigInteger.class.isAssignableFrom(attrClass)) { return BigInteger.valueOf(((Number) value).longValue()); } if (BigDecimal.class.isAssignableFrom(attrClass)) { return BigDecimal.valueOf(((Number) value).doubleValue()); } } if (Calendar.class.isAssignableFrom(attrClass)) { if (Date.class.isInstance(value)) { final Calendar c = Calendar.getInstance(TimeZone.getTimeZone("UTC")); c.setTime((Date) value); return c; } } if (Timestamp.class.isAssignableFrom(attrClass)) { if (Date.class.isInstance(value)) { final Timestamp ts = new Timestamp(((Date) value).getTime()); return ts; } } return value; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/GeoWavePluginConfig.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin; import java.io.Serializable; import java.net.MalformedURLException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.function.Function; import java.util.stream.Collectors; import org.geotools.data.DataAccessFactory.Param; import org.geotools.data.Parameter; import org.locationtech.geowave.adapter.auth.AuthorizationFactorySPI; import org.locationtech.geowave.adapter.auth.EmptyAuthorizationFactory; import org.locationtech.geowave.adapter.vector.index.ChooseHeuristicMatchIndexQueryStrategy; import org.locationtech.geowave.adapter.vector.index.IndexQueryStrategySPI; import org.locationtech.geowave.adapter.vector.plugin.lock.LockingManagementFactory; import org.locationtech.geowave.core.index.SPIServiceRegistry; import org.locationtech.geowave.core.store.DataStoreOptions; import org.locationtech.geowave.core.store.GeoWaveStoreFinder; import org.locationtech.geowave.core.store.StoreFactoryFamilySpi; import org.locationtech.geowave.core.store.StoreFactoryOptions; import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.config.ConfigOption; import org.locationtech.geowave.core.store.config.ConfigUtils; import org.locationtech.geowave.core.store.index.IndexStore; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class encapsulates the parameterized configuration that can be provided per GeoWave data * store within GeoTools. For GeoServer this configuration can be provided within the data store * definition workflow. */ public class GeoWavePluginConfig { private static final Logger LOGGER = LoggerFactory.getLogger(GeoWavePluginConfig.class); public static final String GEOWAVE_NAMESPACE_KEY = StoreFactoryOptions.GEOWAVE_NAMESPACE_OPTION; // name matches the workspace parameter provided to the factory protected static final String FEATURE_NAMESPACE_KEY = "namespace"; protected static final String LOCK_MGT_KEY = "Lock Management"; protected static final String AUTH_MGT_KEY = "Authorization Management Provider"; protected static final String AUTH_URL_KEY = "Authorization Data URL"; protected static final String TRANSACTION_BUFFER_SIZE = "Transaction Buffer Size"; public static final String QUERY_INDEX_STRATEGY_KEY = "Query Index Strategy"; public static final String DEFAULT_QUERY_INDEX_STRATEGY = ChooseHeuristicMatchIndexQueryStrategy.NAME; private static final Param GEOWAVE_NAMESPACE = new Param( GEOWAVE_NAMESPACE_KEY, String.class, "The table namespace associated with this data store", false); private static final Param TRANSACTION_BUFFER_SIZE_PARAM = new Param( TRANSACTION_BUFFER_SIZE, Integer.class, "Number of buffered feature insertions before flushing to the datastore when writing using WFS-T (advanced option, for basic usage leave as default).", false); private static final Param FEATURE_NAMESPACE = new Param( FEATURE_NAMESPACE_KEY, String.class, "The overriding namespace for all feature types maintained within this data store", false); private static final Param LOCK_MGT = new Param( LOCK_MGT_KEY, String.class, "WFS-T Locking Support (advanced option, for basic usage leave as default).", false, null, getLockMgtOptions()); private static final Param AUTH_MGT = new Param( AUTH_MGT_KEY, String.class, "The provider to obtain authorization given a user (advanced option, for basic usage leave as default).", true, null, getAuthSPIOptions()); private static final Param AUTH_URL = new Param( AUTH_URL_KEY, String.class, "The providers data URL (advanced option, for basic usage leave as default).", false); private static final Param QUERY_INDEX_STRATEGY = new Param( QUERY_INDEX_STRATEGY_KEY, String.class, "Strategy to choose an index during query processing (advanced option, for basic usage leave as default).", false, null, getIndexQueryStrategyOptions()); private static final List BASE_GEOWAVE_PLUGIN_PARAMS = Arrays.asList( new Param[] { FEATURE_NAMESPACE, GEOWAVE_NAMESPACE, LOCK_MGT, AUTH_MGT, AUTH_URL, TRANSACTION_BUFFER_SIZE_PARAM, QUERY_INDEX_STRATEGY}); public static final List BASE_GEOWAVE_PLUGIN_PARAM_KEYS = Arrays.asList( BASE_GEOWAVE_PLUGIN_PARAMS.stream().map(p -> p.key).toArray(size -> new String[size])); private final PersistentAdapterStore adapterStore; private final InternalAdapterStore internalAdapterStore; private final DataStore dataStore; private final DataStoreOptions dataStoreOptions; private final IndexStore indexStore; private final DataStatisticsStore dataStatisticsStore; private final String name; private final URI featureNameSpaceURI; private final LockingManagementFactory lockingManagementFactory; private final AuthorizationFactorySPI authorizationFactory; private final URL authorizationURL; private final Integer transactionBufferSize; private final IndexQueryStrategySPI indexQueryStrategy; private final AdapterIndexMappingStore adapterIndexMappingStore; private static Map> paramMap = new HashMap<>(); public static synchronized List getPluginParams( final StoreFactoryFamilySpi storeFactoryFamily) { List params = paramMap.get(storeFactoryFamily.getType()); if (params == null) { final ConfigOption[] configOptions = GeoWaveStoreFinder.getAllOptions(storeFactoryFamily, false); params = Arrays.stream(configOptions).map(new GeoWaveConfigOptionToGeoToolsConfigOption()).collect( Collectors.toList()); params.addAll(BASE_GEOWAVE_PLUGIN_PARAMS); paramMap.put(storeFactoryFamily.getType(), params); } return params; } public GeoWavePluginConfig(final DataStorePluginOptions params) throws GeoWavePluginException { this( params.getFactoryFamily(), // converting to Map to Map params.getOptionsAsMap().entrySet().stream().collect( Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))); } public GeoWavePluginConfig( final StoreFactoryFamilySpi storeFactoryFamily, final Map params) throws GeoWavePluginException { Object param = params.get(GEOWAVE_NAMESPACE_KEY); name = storeFactoryFamily.getType() + (param == null ? "" : ("_" + param)); final Map paramStrs = new HashMap<>(); // first converts serializable objects to String to avoid any issue if // there's a difference how geotools is converting objects to how // geowave intends to convert objects for (final Entry e : params.entrySet()) { paramStrs.put(e.getKey(), e.getValue() == null ? null : e.getValue().toString()); } param = params.get(FEATURE_NAMESPACE_KEY); URI namespaceURI = null; if ((param != null) && !param.toString().trim().isEmpty()) { try { namespaceURI = param instanceof String ? new URI(param.toString()) : (URI) param; } catch (final URISyntaxException e) { LOGGER.error("Malformed Feature Namespace URI : " + param, e); } } featureNameSpaceURI = namespaceURI; param = params.get(TRANSACTION_BUFFER_SIZE); Integer bufferSizeFromParam = 10000; if ((param != null) && !param.toString().trim().isEmpty()) { try { bufferSizeFromParam = param instanceof Integer ? (Integer) param : Integer.parseInt(param.toString()); } catch (final Exception e) { LOGGER.error("Malformed buffer size : " + param, e); } } transactionBufferSize = bufferSizeFromParam; param = params.get(LOCK_MGT_KEY); final Iterator it = getLockManagementFactoryList(); LockingManagementFactory factory = null; while (it.hasNext()) { factory = it.next(); if ((param == null) || param.toString().trim().isEmpty() || param.toString().equals(factory.toString())) { break; } } final StoreFactoryOptions options = ConfigUtils.populateOptionsFromList( storeFactoryFamily.getAdapterStoreFactory().createOptionsInstance(), paramStrs); adapterStore = storeFactoryFamily.getAdapterStoreFactory().createStore(options); internalAdapterStore = storeFactoryFamily.getInternalAdapterStoreFactory().createStore(options); dataStore = storeFactoryFamily.getDataStoreFactory().createStore(options); dataStoreOptions = options.getStoreOptions(); dataStatisticsStore = storeFactoryFamily.getDataStatisticsStoreFactory().createStore(options); indexStore = storeFactoryFamily.getIndexStoreFactory().createStore(options); adapterIndexMappingStore = storeFactoryFamily.getAdapterIndexMappingStoreFactory().createStore(options); lockingManagementFactory = factory; authorizationFactory = getAuthorizationFactory(params); authorizationURL = getAuthorizationURL(params); indexQueryStrategy = getIndexQueryStrategy(params); } public String getName() { return name; } public static AuthorizationFactorySPI getAuthorizationFactory(final Map params) throws GeoWavePluginException { final Object param = params.get(AUTH_MGT_KEY); final Iterator authIt = getAuthorizationFactoryList(); AuthorizationFactorySPI authFactory = new EmptyAuthorizationFactory(); while (authIt.hasNext()) { authFactory = authIt.next(); if ((param == null) || param.toString().trim().isEmpty() || param.toString().equals(authFactory.toString())) { break; } } return authFactory; } public IndexQueryStrategySPI getIndexQueryStrategy() { return indexQueryStrategy; } public PersistentAdapterStore getAdapterStore() { return adapterStore; } public InternalAdapterStore getInternalAdapterStore() { return internalAdapterStore; } public DataStore getDataStore() { return dataStore; } public DataStoreOptions getDataStoreOptions() { return dataStoreOptions; } public AdapterIndexMappingStore getAdapterIndexMappingStore() { return adapterIndexMappingStore; } public IndexStore getIndexStore() { return indexStore; } public DataStatisticsStore getDataStatisticsStore() { return dataStatisticsStore; } public static IndexQueryStrategySPI getIndexQueryStrategy(final Map params) throws GeoWavePluginException { final Object param = params.get(QUERY_INDEX_STRATEGY_KEY); final String strategy = ((param == null) || param.toString().trim().isEmpty()) ? DEFAULT_QUERY_INDEX_STRATEGY : param.toString(); final Iterator it = getInxexQueryStrategyList(); while (it.hasNext()) { final IndexQueryStrategySPI spi = it.next(); if (spi.toString().equals(strategy)) { return spi; } } // This would only get hit if the default query index strategy is removed from the spi registry. return null; } public static URL getAuthorizationURL(final Map params) throws GeoWavePluginException { final Object param = params.get(AUTH_URL_KEY); if ((param == null) || param.toString().trim().isEmpty()) { return null; } else { try { return new URL(param.toString()); } catch (final MalformedURLException e) { throw new GeoWavePluginException( "Accumulo Plugin: malformed Authorization Service URL " + param.toString()); } } } protected AuthorizationFactorySPI getAuthorizationFactory() { return authorizationFactory; } protected URL getAuthorizationURL() { return authorizationURL; } public LockingManagementFactory getLockingManagementFactory() { return lockingManagementFactory; } public URI getFeatureNamespace() { return featureNameSpaceURI; } public Integer getTransactionBufferSize() { return transactionBufferSize; } private static Map> getLockMgtOptions() { final List options = new ArrayList<>(); final Iterator it = getLockManagementFactoryList(); while (it.hasNext()) { options.add(it.next().toString()); } final Map> map = new HashMap<>(); map.put(Parameter.OPTIONS, options); return map; } static final List BooleanOptions = Arrays.asList("true", "false"); private static Map> getIndexQueryStrategyOptions() { final List options = new ArrayList<>(); final Iterator it = getInxexQueryStrategyList(); while (it.hasNext()) { options.add(it.next().toString()); } final Map> map = new HashMap<>(); map.put(Parameter.OPTIONS, options); return map; } private static Map> getAuthSPIOptions() { final List options = new ArrayList<>(); final Iterator it = getAuthorizationFactoryList(); while (it.hasNext()) { options.add(it.next().toString()); } final Map> map = new HashMap<>(); map.put(Parameter.OPTIONS, options); return map; } private static Iterator getLockManagementFactoryList() { return new SPIServiceRegistry(GeoWavePluginConfig.class).load(LockingManagementFactory.class); } private static Iterator getAuthorizationFactoryList() { return new SPIServiceRegistry(GeoWavePluginConfig.class).load(AuthorizationFactorySPI.class); } private static Iterator getInxexQueryStrategyList() { return new SPIServiceRegistry(GeoWavePluginConfig.class).load(IndexQueryStrategySPI.class); } private static class GeoWaveConfigOptionToGeoToolsConfigOption implements Function { @Override public Param apply(final ConfigOption input) { if (input.isPassword()) { return new Param( input.getName(), String.class, input.getDescription(), !input.isOptional(), "mypassword", Collections.singletonMap(Parameter.IS_PASSWORD, Boolean.TRUE)); } if (input.getType().isPrimitive() && (input.getType() == boolean.class)) { return new Param( input.getName(), input.getType(), input.getDescription(), true, "true", Collections.singletonMap(Parameter.OPTIONS, BooleanOptions)); } return new Param( input.getName(), input.usesStringConverter() ? String.class : input.getType(), input.getDescription(), !input.isOptional()); } } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/GeoWavePluginException.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin; /** A basic, general exception thrown within the GeoWave plugin to GeoTools. */ public class GeoWavePluginException extends Exception { private static final long serialVersionUID = -8043877412333078281L; public GeoWavePluginException(final String msg) { super(msg); } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/GeoWaveQueryCaps.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin; import org.geotools.data.QueryCapabilities; import org.opengis.filter.sort.SortBy; /** A definition of the Query capabilities provided to GeoTools by the GeoWave data store. */ public class GeoWaveQueryCaps extends QueryCapabilities { public GeoWaveQueryCaps() {} // TODO implement sorting... @Override public boolean supportsSorting(final SortBy[] sortAttributes) { // called for every WFS-T operation. Without sorting requests, the // argument is empty or null // returning false fails the operation, disabling any capability of // writing. return (sortAttributes == null) || (sortAttributes.length == 0); } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/InternalProcessFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin; import org.geotools.process.factory.AnnotatedBeanProcessFactory; import org.geotools.text.Text; import org.locationtech.geowave.adapter.vector.render.InternalDistributedRenderProcess; public class InternalProcessFactory extends AnnotatedBeanProcessFactory { public InternalProcessFactory() { super( Text.text("Internal GeoWave Process Factory"), "internal", InternalDistributedRenderProcess.class); } @Override public boolean isAvailable() { return true; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/QueryIssuer.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass; import org.opengis.feature.simple.SimpleFeature; import org.opengis.filter.Filter; public interface QueryIssuer { CloseableIterator query( Index index, BasicQueryByClass constraints, boolean spatialOnly); Filter getFilter(); Integer getLimit(); } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/SubsampleProcess.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin; import org.geotools.data.Query; import org.geotools.data.simple.SimpleFeatureCollection; import org.geotools.geometry.jts.ReferencedEnvelope; import org.geotools.process.ProcessException; import org.geotools.process.factory.DescribeParameter; import org.geotools.process.factory.DescribeProcess; import org.geotools.process.factory.DescribeResult; import org.geotools.util.factory.Hints; import org.opengis.coverage.grid.GridGeometry; /** * This class can be used as a GeoTools Render Transform ('geowave:Subsample') within an SLD on any * layer that uses the GeoWave Data Store. An example SLD is provided * (example-slds/SubsamplePoints.sld). The pixel-size allows you to skip more than a single pixel. * For example, a pixel size of 3 would skip an estimated 3x3 pixel cell in GeoWave's row IDs. Note * that rows are only skipped when a feature successfully passes filters. */ @DescribeProcess( title = "SubsampleAtScreenResolution", description = "This process will enable GeoWave to subsample WMS requests based on pixel resolution to not oversample data. This will efficiently render overlapping point geometry that would otherwise be hidden but it assumes an opaque style and does not take transparency into account. It will use the centroid for other geometry types than point which can produce visual artifacts - distributed rendering is an alternative approach to efficiently render lines and polygons") public class SubsampleProcess { public static final Hints.Key SUBSAMPLE_ENABLED = new Hints.Key(Boolean.class); public static final Hints.Key PIXEL_SIZE = new Hints.Key(Double.class); public static final Hints.Key OUTPUT_BBOX = new Hints.Key(ReferencedEnvelope.class); public static final Hints.Key OUTPUT_WIDTH = new Hints.Key(Integer.class); public static final Hints.Key OUTPUT_HEIGHT = new Hints.Key(Integer.class); @DescribeResult( name = "result", description = "This is just a pass-through, the key is to provide enough information within invertQuery to perform a map to screen transform") public SimpleFeatureCollection execute( @DescribeParameter( name = "data", description = "Feature collection containing the data") final SimpleFeatureCollection features, @DescribeParameter( name = "outputBBOX", description = "Georeferenced bounding box of the output") final ReferencedEnvelope argOutputEnv, @DescribeParameter( name = "outputWidth", description = "Width of the output raster") final Integer argOutputWidth, @DescribeParameter( name = "outputHeight", description = "Height of the output raster") final Integer argOutputHeight, @DescribeParameter( name = "pixelSize", description = "The pixel size to base subsampling on") final Double pixelSize) throws ProcessException { // vector-to-vector render transform that is just a pass through - the // key is to add map to screen transform within invertQuery return features; } public Query invertQuery( @DescribeParameter( name = "outputBBOX", description = "Georeferenced bounding box of the output") final ReferencedEnvelope argOutputEnv, @DescribeParameter( name = "outputWidth", description = "Width of the output raster") final Integer argOutputWidth, @DescribeParameter( name = "outputHeight", description = "Height of the output raster") final Integer argOutputHeight, @DescribeParameter( name = "pixelSize", description = "The pixel size to base subsampling on") final Double pixelSize, final Query targetQuery, final GridGeometry targetGridGeometry) throws ProcessException { // add to the query hints targetQuery.getHints().put(SUBSAMPLE_ENABLED, true); targetQuery.getHints().put(OUTPUT_WIDTH, argOutputWidth); targetQuery.getHints().put(OUTPUT_HEIGHT, argOutputHeight); targetQuery.getHints().put(OUTPUT_BBOX, argOutputEnv); if (pixelSize != null) { targetQuery.getHints().put(PIXEL_SIZE, pixelSize); } return targetQuery; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/lock/AbstractLockingManagement.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin.lock; import java.io.IOException; import java.lang.reflect.Constructor; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.Set; import org.geotools.data.FeatureLock; import org.geotools.data.Transaction; import org.locationtech.geowave.adapter.vector.plugin.GeoWavePluginConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Simplifies Lock management from the more complex Geotools approach which is used in several * different scenarios (e.g. directory management, wfs-t, etc.) * *

Implementers implement three abstract methods. The Geotools still helps with management, * providing a locking source. */ public abstract class AbstractLockingManagement implements LockingManagement { private static final Logger LOGGER = LoggerFactory.getLogger(AbstractLockingManagement.class); public static final String LOCKING_MANAGEMENT_CLASS = "GEOWAVE_LM"; public static final Object LOCKING_MANAGEMENT_CLASS_LCK = new Object(); public static AbstractLockingManagement getLockingManagement( final GeoWavePluginConfig pluginConfig) { synchronized (LOCKING_MANAGEMENT_CLASS_LCK) { final String val = System.getenv(LOCKING_MANAGEMENT_CLASS); if (val == null) { return new MemoryLockManager(pluginConfig); } else { try { final Class lockManagerClass = (Class) Class.forName(val); if (!AbstractLockingManagement.class.isAssignableFrom(lockManagerClass)) { throw new IllegalArgumentException("Invalid LockManagement class " + val); } else { final Constructor cons = lockManagerClass.getConstructor(GeoWavePluginConfig.class); return (AbstractLockingManagement) cons.newInstance(pluginConfig); } } catch (final Exception ex) { // HP Fortify "Log Forging" false positive // What Fortify considers "user input" comes only // from users with OS-level access anyway LOGGER.error("Cannot instantiate lock management class " + val, ex); return new MemoryLockManager(pluginConfig); } } } } private static Set EMPTY_SET = new HashSet<>(); @Override public void lock(final Transaction transaction, final String featureID) { lock( transaction, featureID, transaction == Transaction.AUTO_COMMIT ? EMPTY_SET : transaction.getAuthorizations(), 1 /* minutes */); } private void lock( final Transaction transaction, final String featureID, final Set authorizations, final long expiryInMinutes) { AuthorizedLock lock = transaction == Transaction.AUTO_COMMIT ? null : (AuthorizedLock) transaction.getState(this); if (lock == null) { lock = new AuthorizedLock(this, authorizations, expiryInMinutes); if (transaction != Transaction.AUTO_COMMIT) { transaction.putState(this, lock); } } lock(lock, featureID); } private void unlock( final Transaction transaction, final String featureID, final Set authorizations, final long expiryInMinutes) { AuthorizedLock lock = transaction == Transaction.AUTO_COMMIT ? null : (AuthorizedLock) transaction.getState(this); if (lock == null) { lock = new AuthorizedLock(this, authorizations, expiryInMinutes); if (transaction != Transaction.AUTO_COMMIT) { transaction.putState(this, lock); } } unlock(lock, featureID); } @Override public void lockFeatureID( final String typeName, final String featureID, final Transaction transaction, final FeatureLock featureLock) { final Set set = new LinkedHashSet<>(); set.add(featureLock.getAuthorization()); this.lock(transaction, featureID, set, featureLock.getDuration()); } @Override public void unLockFeatureID( final String typeName, final String featureID, final Transaction transaction, final FeatureLock featureLock) throws IOException { final Set set = new LinkedHashSet<>(); set.add(featureLock.getAuthorization()); this.unlock(transaction, featureID, set, featureLock.getDuration()); } @Override public boolean release(final String authID, final Transaction transaction) throws IOException { AuthorizedLock lock = transaction == Transaction.AUTO_COMMIT ? null : (AuthorizedLock) transaction.getState(this); if (lock == null) { lock = new AuthorizedLock(this, authID, 1 /* minutes */); } releaseAll(lock); return true; } @Override public boolean refresh(final String authID, final Transaction transaction) throws IOException { AuthorizedLock lock = transaction == Transaction.AUTO_COMMIT ? null : (AuthorizedLock) transaction.getState(this); if (lock == null) { lock = new AuthorizedLock(this, authID, 1 /* minutes */); } resetAll(lock); return true; } /** * If already locked and request lock has proper authorization * {@link AuthorizedLock#isAuthorized}, then return. If already locked and request does not have * proper authorization, block until the lock is released or expired. If not already locked, * create the lock. * *

Make sure there is some mechanism for expired locks to be discovered and released so that * clients are not blocked indefinitely. * * @param lock * @param featureID */ public abstract void lock(AuthorizedLock lock, String featureID); /** * If authorized {@link AuthorizedLock#isAuthorized}, unlock the featureID * * @param lock * @param featureID */ public abstract void unlock(AuthorizedLock lock, String featureID); /** * Release all locks associated with a transaction or associated authorizations. Occurs on commit * and rollback. Basically,invalidate all authorized locks {@link AuthorizedLock#isAuthorized} * * @param lock */ public abstract void releaseAll(AuthorizedLock lock); /** * Reset all locks associated with a transaction. Occurs on commit and rollback. Basically, call * {@link AuthorizedLock#resetExpireTime} for all authorized locks * {@link AuthorizedLock#isAuthorized} * * @param lock */ public abstract void resetAll(AuthorizedLock lock); } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/lock/AuthorizedLock.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin.lock; import java.io.IOException; import java.util.HashSet; import java.util.Set; import java.util.UUID; import org.geotools.data.Transaction; import org.geotools.data.Transaction.State; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; /** * Instances of this class represent a the lock constraints associated with one or more feature * instances. * *

When serializing this object, note the reserialization requires setting the lockingManagement * attribute. */ @SuppressFBWarnings({"SE_TRANSIENT_FIELD_NOT_RESTORED"}) public class AuthorizedLock implements State, java.io.Serializable { /** */ private static final long serialVersionUID = -1421146354351269795L; private final Set authorizations = new HashSet<>(); private final String ID = UUID.randomUUID().toString(); private long expireTime = System.currentTimeMillis(); private transient AbstractLockingManagement lockingManagement; private long expiryInMinutes; public AuthorizedLock() {} public AuthorizedLock( final AbstractLockingManagement lockingManagement, final long expiryInMinutes) { super(); expireTime = System.currentTimeMillis() + (expiryInMinutes * 60000); this.expiryInMinutes = expiryInMinutes; this.lockingManagement = lockingManagement; } public AuthorizedLock( final AbstractLockingManagement lockingManagement, final String authorization, final long expiryInMinutes) { super(); authorizations.add(authorization); expireTime = System.currentTimeMillis() + (expiryInMinutes * 60000); this.expiryInMinutes = expiryInMinutes; this.lockingManagement = lockingManagement; } public AuthorizedLock( final AbstractLockingManagement lockingManagement, final Set authorizations, final long expiryInMinutes) { super(); this.authorizations.addAll(authorizations); expireTime = System.currentTimeMillis() + (expiryInMinutes * 60000); this.expiryInMinutes = expiryInMinutes; this.lockingManagement = lockingManagement; } public AbstractLockingManagement getLockingManagement() { return lockingManagement; } public void setLockingManagement(final AbstractLockingManagement lockingManagement) { this.lockingManagement = lockingManagement; } public void resetExpireTime() { expireTime = System.currentTimeMillis() + (expiryInMinutes * 60000); } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((ID == null) ? 0 : ID.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final AuthorizedLock other = (AuthorizedLock) obj; if (ID == null) { if (other.ID != null) { return false; } } else if (!ID.equals(other.ID)) { return false; } return true; } public long getExpireTime() { return expireTime; } public boolean isStale() { return expireTime < System.currentTimeMillis(); } @Override public synchronized void setTransaction(final Transaction transaction) { if (transaction != null) { resetExpireTime(); authorizations.addAll(transaction.getAuthorizations()); } } @Override public synchronized void addAuthorization(final String AuthID) throws IOException { authorizations.add(AuthID); } public synchronized void invalidate() { expireTime = 0; notify(); } public boolean isAuthorized(final AuthorizedLock lock) { boolean ok = false; for (final String auth : lock.authorizations) { ok |= isAuthorized(auth); } return ok || ID.equals(lock.ID); } public boolean isAuthorized(final String authID) { return authorizations.contains(authID); } @Override public synchronized void commit() throws IOException { authorizations.clear(); // need to remove authorizations to release // only those // locks that this transaction created (same ID) lockingManagement.releaseAll(this); invalidate(); } @Override public synchronized void rollback() { authorizations.clear(); // need to remove authorizations to release // only those // locks that this transaction created (same ID) lockingManagement.releaseAll(this); invalidate(); } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/lock/LockingManagement.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin.lock; import org.geotools.data.LockingManager; import org.geotools.data.Transaction; import org.geotools.data.shapefile.index.LockManager; /** * An extension to {@link LockManager} to support requesting a lock on a specific feature under a * provided transaction. Implementers must check transaction state as AUTO_COMMIT. Locking under an * AUTO_COMMIT is not authorized. */ public interface LockingManagement extends LockingManager { /** * Lock a feature for a provided transaction. This is typically used for modifications (updates). * * @param transaction * @param featureID */ public void lock(Transaction transaction, String featureID); } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/lock/LockingManagementFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin.lock; import org.locationtech.geowave.adapter.vector.plugin.GeoWavePluginConfig; /** * Factories are used with the {@link java.util.ServiceLoader} approach to discover locking * management strategies. */ public interface LockingManagementFactory { public LockingManagement createLockingManager(GeoWavePluginConfig plugginData); } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/lock/MemoryLockManager.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin.lock; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import org.locationtech.geowave.adapter.vector.plugin.GeoWavePluginConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; /** Single GeoServer lock support. In a clustered model, do not use. */ public class MemoryLockManager extends AbstractLockingManagement { private static final Logger LOGGER = LoggerFactory.getLogger(MemoryLockManager.class); private static final Map> LOCKS = new HashMap<>(); private final Map locks; public MemoryLockManager(final String instanceName) { Map lockSet; synchronized (LOCKS) { lockSet = LOCKS.get(instanceName); if (lockSet == null) { lockSet = new HashMap<>(); LOCKS.put(instanceName, lockSet); } } locks = lockSet; } public MemoryLockManager(final GeoWavePluginConfig pluginConfig) { this(pluginConfig.getName()); } @Override public void releaseAll(final AuthorizedLock lock) { final ArrayList toRelease = new ArrayList<>(); synchronized (locks) { final Iterator> it = locks.entrySet().iterator(); while (it.hasNext()) { final Entry entry = it.next(); if (entry.getValue().equals(lock) || entry.getValue().isAuthorized(lock)) { toRelease.add(entry.getValue()); it.remove(); } } } for (final AuthorizedLock lockToRelease : toRelease) { lockToRelease.invalidate(); } } /** * Release all locks associated with a transaction. Occurs on commit and rollback * * @param lock */ @Override public void resetAll(final AuthorizedLock lock) { final ArrayList toRelease = new ArrayList<>(); synchronized (locks) { final Iterator> it = locks.entrySet().iterator(); while (it.hasNext()) { final Entry entry = it.next(); if (entry.getValue().equals(lock) || entry.getValue().isAuthorized(lock)) { toRelease.add(entry.getValue()); } } } for (final AuthorizedLock lockToRelease : toRelease) { lockToRelease.resetExpireTime(); } } @SuppressFBWarnings( value = {"MWN_MISMATCHED_WAIT"}, justification = "incorrect flag; lock held (in synchronized block)") @Override public void lock(final AuthorizedLock lock, final String featureID) { AuthorizedLock featureLock = null; synchronized (locks) { featureLock = locks.get(featureID); if ((featureLock == null) || featureLock.isStale()) { featureLock = lock; locks.put(featureID, lock); return; } else if (featureLock.isAuthorized(lock)) { return; } } // want to loop until this 'lock' is the 'winning' lock. while (featureLock != lock) { // at this point, some other transaction may have the lock synchronized (featureLock) { // check if stale, which occurs when the transaction is // completed. while (!featureLock.isStale()) { try { // only wait a little, because the feature lock could be // stale // flagged as mismatched wait...but this is correct featureLock.wait( Math.min(5000, featureLock.getExpireTime() - System.currentTimeMillis())); } catch (final InterruptedException ex) { } catch (final Exception e) { LOGGER.error( "Memory lock manager filed to wait for lock release. Will cycle till lock is stale.", e); } } } synchronized (locks) { featureLock = locks.get(featureID); // did this code win the race to get the lock for the feature // ID? if ((featureLock == null) || featureLock.isStale()) { locks.put(featureID, lock); featureLock = lock; } } } } @Override public boolean exists(final String authID) { synchronized (locks) { final Iterator> it = locks.entrySet().iterator(); while (it.hasNext()) { final Entry entry = it.next(); if (entry.getValue().isAuthorized(authID) || !entry.getValue().isStale()) { return true; } } } return false; } @Override public void unlock(final AuthorizedLock lock, final String featureID) { AuthorizedLock featureLock = null; boolean notify = false; synchronized (locks) { featureLock = locks.get(featureID); if ((featureLock != null) && featureLock.isAuthorized(lock)) { locks.remove(featureID); notify = true; } } if (notify) { featureLock.invalidate(); } } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/lock/MemoryLockManagerFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin.lock; import org.locationtech.geowave.adapter.vector.plugin.GeoWavePluginConfig; public class MemoryLockManagerFactory implements LockingManagementFactory { @Override public LockingManagement createLockingManager(final GeoWavePluginConfig plugginData) { return new MemoryLockManager(plugginData); } @Override public String toString() { return "memory"; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/transaction/AbstractTransactionManagement.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin.transaction; import org.locationtech.geowave.adapter.vector.plugin.GeoWaveDataStoreComponents; public abstract class AbstractTransactionManagement implements GeoWaveTransaction { protected final GeoWaveDataStoreComponents components; public AbstractTransactionManagement(final GeoWaveDataStoreComponents components) { super(); this.components = components; } @Override public StatisticsCache getDataStatistics() { return new StatisticsCache( components.getStatsStore(), components.getAdapter(), composeAuthorizations()); } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/transaction/GeoWaveAutoCommitTransactionState.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin.transaction; import java.io.IOException; import org.geotools.data.Transaction; import org.locationtech.geowave.adapter.vector.plugin.GeoWaveDataStoreComponents; import org.locationtech.geowave.adapter.vector.plugin.GeoWaveFeatureSource; public class GeoWaveAutoCommitTransactionState implements GeoWaveTransactionState { private final GeoWaveDataStoreComponents components; public GeoWaveAutoCommitTransactionState(final GeoWaveFeatureSource source) { components = source.getComponents(); } @Override public void setTransaction(final Transaction transaction) {} /** @see org.geotools.data.Transaction.State#addAuthorization(java.lang.String) */ @Override public void addAuthorization(final String AuthID) throws IOException { // not required for } /** * Will apply differences to store. * * @see org.geotools.data.Transaction.State#commit() */ @Override public void commit() throws IOException { // not required for } /** @see org.geotools.data.Transaction.State#rollback() */ @Override public void rollback() throws IOException {} @Override public GeoWaveTransaction getGeoWaveTransaction(final String typeName) { // TODO Auto-generated method stub return new GeoWaveEmptyTransaction(components); } @Override public String toString() { return "GeoWaveAutoCommitTransactionState"; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/transaction/GeoWaveEmptyTransaction.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin.transaction; import java.io.IOException; import org.geotools.data.Transaction; import org.geotools.geometry.jts.ReferencedEnvelope; import org.geotools.util.factory.Hints; import org.locationtech.geowave.adapter.vector.plugin.GeoWaveDataStoreComponents; import org.locationtech.geowave.core.store.CloseableIterator; import org.opengis.feature.simple.SimpleFeature; import org.opengis.filter.Filter; /** * Commit changes immediately */ public class GeoWaveEmptyTransaction extends AbstractTransactionManagement implements GeoWaveTransaction { /** Create an empty Diff */ public GeoWaveEmptyTransaction(final GeoWaveDataStoreComponents components) { super(components); } /** Return true if transaction is empty */ @Override public boolean isEmpty() { return true; } @Override public void flush() throws IOException {} /** * Record a modification to the indicated fid * * @param fid * @param original the original feature(prior state) * @param updated the update feature replacement feature; null to indicate remove */ @Override public void modify(final String fid, final SimpleFeature original, final SimpleFeature updated) throws IOException { // point move? if (!updated.getBounds().equals(original.getBounds())) { components.remove(original, this); components.writeCommit(updated, new GeoWaveEmptyTransaction(components)); } else { components.writeCommit(updated, new GeoWaveEmptyTransaction(components)); } final ReferencedEnvelope bounds = new ReferencedEnvelope(); bounds.include(updated.getBounds()); bounds.include(original.getBounds()); components.getGTstore().getListenerManager().fireFeaturesChanged( updated.getFeatureType().getTypeName(), Transaction.AUTO_COMMIT, bounds, true); } @Override public void add(final String fid, final SimpleFeature feature) throws IOException { feature.getUserData().put(Hints.USE_PROVIDED_FID, true); if (feature.getUserData().containsKey(Hints.PROVIDED_FID)) { final String providedFid = (String) feature.getUserData().get(Hints.PROVIDED_FID); feature.getUserData().put(Hints.PROVIDED_FID, providedFid); } else { feature.getUserData().put(Hints.PROVIDED_FID, feature.getID()); } components.writeCommit(feature, this); components.getGTstore().getListenerManager().fireFeaturesAdded( components.getAdapter().getFeatureType().getTypeName(), Transaction.AUTO_COMMIT, ReferencedEnvelope.reference(feature.getBounds()), true); } @Override public void remove(final String fid, final SimpleFeature feature) throws IOException { components.remove(feature, this); components.getGTstore().getListenerManager().fireFeaturesRemoved( feature.getFeatureType().getTypeName(), Transaction.AUTO_COMMIT, ReferencedEnvelope.reference(feature.getBounds()), true); } public String getID() { return ""; } @Override public CloseableIterator interweaveTransaction( final Integer limit, final Filter filter, final CloseableIterator it) { return it; } @Override public String[] composeAuthorizations() { return components.getGTstore().getAuthorizationSPI().getAuthorizations(); } @Override public String composeVisibility() { return ""; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/transaction/GeoWaveTransaction.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin.transaction; import java.io.IOException; import org.locationtech.geowave.core.store.CloseableIterator; import org.opengis.feature.simple.SimpleFeature; import org.opengis.filter.Filter; /** * Represent the Writer's pluggable strategy of a transaction */ public interface GeoWaveTransaction { /** Flush in memory records to store for query processing. */ public void flush() throws IOException; /** @return true if transaction is empty */ public boolean isEmpty(); /** * Record a modification to the indicated fid * * @param fid the feature ID * @param old the original feature * @param updated the replacement feature; null to indicate remove */ public void modify(String fid, SimpleFeature old, SimpleFeature updated) throws IOException; public void add(String fid, SimpleFeature f) throws IOException; public void remove(String fid, SimpleFeature feature) throws IOException; public String[] composeAuthorizations(); public String composeVisibility(); public StatisticsCache getDataStatistics(); public CloseableIterator interweaveTransaction( final Integer limit, final Filter filter, final CloseableIterator it); } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/transaction/GeoWaveTransactionManagement.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin.transaction; import java.io.IOException; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import java.util.NoSuchElementException; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import org.apache.commons.lang3.tuple.Pair; import org.geotools.data.Transaction; import org.geotools.geometry.jts.ReferencedEnvelope; import org.geotools.util.factory.Hints; import org.locationtech.geowave.adapter.vector.plugin.GeoWaveDataStoreComponents; import org.locationtech.geowave.adapter.vector.plugin.lock.LockingManagement; import org.locationtech.geowave.core.store.CloseableIterator; import org.opengis.feature.simple.SimpleFeature; import org.opengis.filter.Filter; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.LinkedListMultimap; import com.google.common.collect.Multimap; /** * Captures changes made to a FeatureStore prior to being committed. * *

This is used to simulate the functionality of a database including transaction independence. */ public class GeoWaveTransactionManagement extends AbstractTransactionManagement implements GeoWaveTransaction { protected static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveTransactionManagement.class); /** Map of modified features; by feature id */ private final Map modifiedFeatures = new ConcurrentHashMap<>(); private final Map addedFeatures = new ConcurrentHashMap<>(); private final Multimap removedFeatures = LinkedListMultimap.create(); private StatisticsCache statsCache = null; /** List of added feature ids; values stored in added above */ private final Set addedFidList = new HashSet<>(); private int maxAdditionBufferSize = 10000; private final LockingManagement lockingManager; private final Transaction transaction; private final String txID; private final String typeName; private static class ModifiedFeature { public ModifiedFeature( final SimpleFeature oldFeature, final SimpleFeature newFeature, final boolean alreadyWritten) { super(); this.newFeature = newFeature; this.oldFeature = oldFeature; this.alreadyWritten = alreadyWritten; } final boolean alreadyWritten; final SimpleFeature newFeature; final SimpleFeature oldFeature; } /** Simple object used for locking */ final Object mutex; /** * Create an empty Diff * * @throws IOException */ public GeoWaveTransactionManagement( final int maxAdditionBufferSize, final GeoWaveDataStoreComponents components, final String typeName, final Transaction transaction, final LockingManagement lockingManager, final String txID) throws IOException { super(components); this.maxAdditionBufferSize = maxAdditionBufferSize; mutex = this; this.typeName = typeName; this.transaction = transaction; this.lockingManager = lockingManager; this.txID = txID; } /** * Check if modifiedFeatures and addedFeatures are empty. * * @return true if Diff is empty */ @Override public boolean isEmpty() { synchronized (mutex) { return modifiedFeatures.isEmpty() && addedFidList.isEmpty() && removedFeatures.isEmpty() && addedFeatures.isEmpty(); } } /** Clear diff - called during rollback. */ public void clear() { synchronized (mutex) { addedFidList.clear(); modifiedFeatures.clear(); removedFeatures.clear(); addedFeatures.clear(); } } /** * Record a modification to the indicated feature ID. * * @param fid the feature ID * @param original original feature * @param updated replacement feature; null to indicate remove */ @Override public void modify(final String fid, final SimpleFeature original, final SimpleFeature updated) throws IOException { lockingManager.lock(transaction, fid); // assumptions: (1) will not get a modification to a deleted feature // thus, only contents of the removed features collection for this // feature relate to moving bounds. // @see {@link #interweaveTransaction(CloseableIterator)} // // Cannot assume that a modification occurs for a newly added fid // TODO: skipping this for now. creates a problem because // the row IDs may or maynot change. If they change completely, then // it is not an issue. However, a mix of changed or unchanged means // that the original rows become invisible for the duration of the // transaction // The problem now is that the bounded query may not return the moved // record, if it has moved outside // the query space. oh well! final ModifiedFeature modRecord = modifiedFeatures.get(fid); if (!updated.getBounds().equals(original.getBounds())) { // retain original--original position is removed later. // The original feature needs to be excluded in a query // and removed at commit removedFeatures.put(fid, original); } if (((modRecord != null) && modRecord.alreadyWritten) || addedFidList.contains(fid)) { components.writeCommit(updated, this); synchronized (mutex) { if (modRecord != null) { modifiedFeatures.put(fid, new ModifiedFeature(modRecord.oldFeature, updated, true)); } else { LOGGER.error("modRecord was set to null in another thread; synchronization issue"); } } } else { synchronized (mutex) { modifiedFeatures.put( fid, new ModifiedFeature( modRecord == null ? original : modRecord.oldFeature, updated, false)); } } final ReferencedEnvelope bounds = new ReferencedEnvelope((CoordinateReferenceSystem) null); bounds.include(original.getBounds()); bounds.include(updated.getBounds()); components.getGTstore().getListenerManager().fireFeaturesChanged( components.getAdapter().getFeatureType().getTypeName(), transaction, bounds, false); } @Override public void add(final String fid, final SimpleFeature feature) throws IOException { feature.getUserData().put(Hints.USE_PROVIDED_FID, true); if (feature.getUserData().containsKey(Hints.PROVIDED_FID)) { final String providedFid = (String) feature.getUserData().get(Hints.PROVIDED_FID); feature.getUserData().put(Hints.PROVIDED_FID, providedFid); } else { feature.getUserData().put(Hints.PROVIDED_FID, feature.getID()); } if (addedFeatures.size() >= maxAdditionBufferSize) { flushAddsToStore(true); } addedFeatures.put(fid, feature); components.getGTstore().getListenerManager().fireFeaturesAdded( components.getAdapter().getFeatureType().getTypeName(), transaction, ReferencedEnvelope.reference(feature.getBounds()), false); } @Override public void remove(final String fid, final SimpleFeature feature) throws IOException { synchronized (mutex) { if (addedFidList.remove(fid)) { components.remove(feature, this); } else { addedFeatures.remove(fid); // will remove at the end of the transaction, except ones // created in the transaction. removedFeatures.put(fid, feature); modifiedFeatures.remove(fid); } } components.getGTstore().getListenerManager().fireFeaturesRemoved( components.getAdapter().getFeatureType().getTypeName(), transaction, ReferencedEnvelope.reference(feature.getBounds()), false); } public void rollback() throws IOException { statsCache = null; for (final String fid : addedFidList) { components.remove(fid, this); } clear(); } @Override public String[] composeAuthorizations() { return components.getGTstore().getAuthorizationSPI().getAuthorizations(); } @Override public String composeVisibility() { return txID; } public String getID() { return txID; } @Override public void flush() throws IOException { flushAddsToStore(true); } private void flushAddsToStore(final boolean autoCommitAdds) throws IOException { final Set captureList = autoCommitAdds ? new HashSet<>() : addedFidList; components.write( addedFeatures.values().iterator(), captureList, autoCommitAdds ? new GeoWaveEmptyTransaction(components) : this); addedFeatures.clear(); } public void commit() throws IOException { flushAddsToStore(true); final Iterator> updateIt = getUpdates(); // if (addedFidList.size() > 0) { // final String transId = "\\(?" + txID + "\\)?"; // final VisibilityTransformer visibilityTransformer = new // VisibilityTransformer( // "&?" + transId, // ""); // for (final Collection rowIDs : addedFidList.values()) { // components.replaceDataVisibility( // this, // rowIDs, // visibilityTransformer); // } // // components.replaceStatsVisibility( // this, // visibilityTransformer); // } final Iterator removeIt = removedFeatures.values().iterator(); while (removeIt.hasNext()) { final SimpleFeature delFeatured = removeIt.next(); components.remove(delFeatured, this); final ModifiedFeature modFeature = modifiedFeatures.get(delFeatured.getID()); // only want notify updates to existing (not new) features if ((modFeature == null) || modFeature.alreadyWritten) { components.getGTstore().getListenerManager().fireFeaturesRemoved( typeName, transaction, ReferencedEnvelope.reference(delFeatured.getBounds()), true); } } while (updateIt.hasNext()) { final Pair pair = updateIt.next(); components.writeCommit(pair.getRight(), new GeoWaveEmptyTransaction(components)); final ReferencedEnvelope bounds = new ReferencedEnvelope((CoordinateReferenceSystem) null); bounds.include(pair.getLeft().getBounds()); bounds.include(pair.getRight().getBounds()); components.getGTstore().getListenerManager().fireFeaturesChanged( typeName, transaction, ReferencedEnvelope.reference(pair.getRight().getBounds()), true); } statsCache = null; } private Iterator> getUpdates() { final Iterator> entries = modifiedFeatures.entrySet().iterator(); return new Iterator>() { Pair pair = null; @Override public boolean hasNext() { while (entries.hasNext() && (pair == null)) { final Entry entry = entries.next(); if (!entry.getValue().alreadyWritten) { pair = Pair.of(entry.getValue().oldFeature, entry.getValue().newFeature); } else { pair = null; } } return pair != null; } @Override public Pair next() throws NoSuchElementException { if (pair == null) { throw new NoSuchElementException(); } final Pair retVal = pair; pair = null; return retVal; } @Override public void remove() {} }; } @Override public StatisticsCache getDataStatistics() { if (statsCache == null) { statsCache = super.getDataStatistics(); } return statsCache; } @Override public CloseableIterator interweaveTransaction( final Integer limit, final Filter filter, final CloseableIterator it) { return new CloseableIterator() { Iterator addedIt = addedFeatures.values().iterator(); SimpleFeature feature = null; long count = 0; @Override public boolean hasNext() { if ((limit != null) && (limit.intValue() > 0) && (count > limit)) { return false; } while (addedIt.hasNext() && (feature == null)) { feature = addedIt.next(); if (!filter.evaluate(feature)) { feature = null; } } while (it.hasNext() && (feature == null)) { feature = it.next(); final ModifiedFeature modRecord = modifiedFeatures.get(feature.getID()); // exclude removed features // and include updated features not written yet. final Collection oldFeatures = removedFeatures.get(feature.getID()); if (modRecord != null) { feature = modRecord.newFeature; } else if ((oldFeatures != null) && !oldFeatures.isEmpty()) { // need to check if the removed feature // was just moved meaning its original matches the // boundaries of this 'feature'. matchesOne(oldFeatures, // feature)) feature = null; } } return feature != null; } @Override public SimpleFeature next() throws NoSuchElementException { if (feature == null) { throw new NoSuchElementException(); } final SimpleFeature retVal = feature; feature = null; count++; return retVal; } @Override public void remove() { removedFeatures.put(feature.getID(), feature); } @Override public void close() { it.close(); } }; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/transaction/GeoWaveTransactionManagementState.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin.transaction; import java.io.IOException; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import org.geotools.data.DataSourceException; import org.geotools.data.Transaction; import org.locationtech.geowave.adapter.vector.plugin.GeoWaveDataStoreComponents; import org.locationtech.geowave.adapter.vector.plugin.lock.LockingManagement; /** Implements the transaction state protocol with Geotools. */ public class GeoWaveTransactionManagementState implements GeoWaveTransactionState { private final GeoWaveDataStoreComponents components; private final LockingManagement lockingManager; private Transaction transaction; private final String txID; private final int transactionBufferSize; /** * Map of differences by typeName. * *

Differences are stored as a Map of Feature by fid, and are reset during a commit() or * rollback(). */ private final Map typeNameDiff = new HashMap<>(); public GeoWaveTransactionManagementState( final int transactionBufferSize, final GeoWaveDataStoreComponents components, final Transaction transaction, final LockingManagement lockingManager) throws IOException { this.transactionBufferSize = transactionBufferSize; this.components = components; this.transaction = transaction; this.lockingManager = lockingManager; txID = components.getTransaction(); } @Override public synchronized void setTransaction(final Transaction transaction) { if (transaction != null) { // configure this.transaction = transaction; } else { this.transaction = null; if (typeNameDiff != null) { for (final Iterator i = typeNameDiff.values().iterator(); i.hasNext();) { final GeoWaveTransactionManagement diff = i.next(); diff.clear(); } typeNameDiff.clear(); } } } @Override public synchronized GeoWaveTransactionManagement getGeoWaveTransaction(final String typeName) throws IOException { if (!exists(typeName)) { throw new RuntimeException(typeName + " not defined"); } if (typeNameDiff.containsKey(typeName)) { return typeNameDiff.get(typeName); } else { final GeoWaveTransactionManagement transX = new GeoWaveTransactionManagement( transactionBufferSize, components, typeName, transaction, lockingManager, txID); typeNameDiff.put(typeName, transX); return transX; } } boolean exists(final String typeName) throws IOException { String[] types; types = components.getGTstore().getTypeNames(); Arrays.sort(types); return Arrays.binarySearch(types, typeName) != -1; } /** @see org.geotools.data.Transaction.State#addAuthorization(java.lang.String) */ @Override public synchronized void addAuthorization(final String AuthID) throws IOException { // not required } /** * Will apply differences to store. * * @see org.geotools.data.Transaction.State#commit() */ @Override public synchronized void commit() throws IOException { try { for (final Iterator> i = typeNameDiff.entrySet().iterator(); i.hasNext();) { final Map.Entry entry = i.next(); final String typeName = entry.getKey(); final GeoWaveTransactionManagement diff = entry.getValue(); applyDiff(typeName, diff); diff.clear(); } } finally { components.releaseTransaction(txID); } } /** * Called by commit() to apply one set of diff * *

The provided will be modified as the differences are applied, If the operations are * all successful diff will be empty at the end of this process.

* *

diff can be used to represent the following operations:

* *
  • fid|null: represents a fid being removed
  • * *
  • fid|feature: where fid exists, represents feature modification
  • fid|feature: where * fid does not exist, represents feature being modified
* * * @param typeName typeName being updated * @param diff differences to apply to FeatureWriter * * @throws IOException If the entire diff cannot be writen out * @throws DataSourceException If the entire diff cannot be writen out */ void applyDiff(final String typeName, final GeoWaveTransactionManagement diff) throws IOException { IOException cause = null; if (diff.isEmpty()) { return; } try { diff.commit(); } catch (final IOException e) { cause = e; throw e; } catch (final RuntimeException e) { cause = new IOException(e); throw e; } finally { try { components.getGTstore().getListenerManager().fireChanged(typeName, transaction, true); diff.clear(); } catch (final RuntimeException e) { if (cause != null) { e.initCause(cause); } throw e; } } } /** @see org.geotools.data.Transaction.State#rollback() */ @Override public synchronized void rollback() throws IOException { Entry entry; try { for (final Iterator> i = typeNameDiff.entrySet().iterator(); i.hasNext();) { entry = i.next(); final String typeName = entry.getKey(); final GeoWaveTransactionManagement diff = entry.getValue(); diff.rollback(); components.getGTstore().getListenerManager().fireChanged(typeName, transaction, false); } } finally { components.releaseTransaction(txID); } } @Override public String toString() { return "GeoWaveTransactionManagementState [components=" + components + ", lockingManager=" + lockingManager + ", transaction=" + transaction + ", txID=" + txID + ", typeNameDiff=" + typeNameDiff + "]"; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/transaction/GeoWaveTransactionState.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin.transaction; import java.io.IOException; import org.geotools.data.Transaction.State; public interface GeoWaveTransactionState extends State { public GeoWaveTransaction getGeoWaveTransaction(String typeName) throws IOException; } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/transaction/MemoryTransactionsAllocator.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin.transaction; import java.io.IOException; import java.util.LinkedList; import java.util.UUID; public class MemoryTransactionsAllocator implements TransactionsAllocator { private final LinkedList lockPaths = new LinkedList<>(); public MemoryTransactionsAllocator() { super(); } public void close() throws InterruptedException {} @Override public void releaseTransaction(final String txID) throws IOException { synchronized (lockPaths) { if (!lockPaths.contains(txID)) { lockPaths.add(txID); } } } @Override public String getTransaction() throws IOException { synchronized (lockPaths) { if (lockPaths.size() > 0) { return lockPaths.removeFirst(); } } return UUID.randomUUID().toString(); } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/transaction/StatisticsCache.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin.transaction; import java.util.Map; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.locationtech.geowave.core.store.statistics.StatisticId; import org.locationtech.geowave.core.store.statistics.StatisticType; import com.beust.jcommander.internal.Maps; import com.google.common.primitives.Bytes; public class StatisticsCache { protected final DataStatisticsStore statisticsStore; protected final DataTypeAdapter adapter; protected final String[] authorizations; protected Map> cache = Maps.newHashMap(); public StatisticsCache( final DataStatisticsStore statisticsStore, final DataTypeAdapter adapter, String... authorizations) { this.statisticsStore = statisticsStore; this.adapter = adapter; this.authorizations = authorizations; } @SuppressWarnings("unchecked") public , R> V getFieldStatistic( final StatisticType statisticType, final String fieldName) { if (statisticType == null || fieldName == null) { return null; } ByteArray key = new ByteArray( Bytes.concat( statisticType.getBytes(), StatisticId.UNIQUE_ID_SEPARATOR, StringUtils.stringToBinary(fieldName))); if (cache.containsKey(key)) { return (V) cache.get(key); } V retVal = null; try (CloseableIterator>> statsIter = statisticsStore.getFieldStatistics(adapter, statisticType, fieldName, null)) { if (statsIter.hasNext()) { Statistic stat = (Statistic) statsIter.next(); V value = statisticsStore.getStatisticValue(stat, authorizations); if (value != null) { retVal = value; } } } cache.put(key, retVal); return retVal; } @SuppressWarnings("unchecked") public , R> V getAdapterStatistic( final StatisticType statisticType) { ByteArray key = statisticType; if (cache.containsKey(key)) { return (V) cache.get(key); } V retVal = null; try (CloseableIterator>> statsIter = statisticsStore.getDataTypeStatistics(adapter, statisticType, null)) { if (statsIter.hasNext()) { Statistic stat = (Statistic) statsIter.next(); V value = statisticsStore.getStatisticValue(stat, authorizations); if (value != null) { retVal = value; } } } cache.put(key, retVal); return retVal; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/plugin/transaction/TransactionsAllocator.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin.transaction; import java.io.IOException; /** * Allocate a transaction ID. Controls the space of transaction IDs, allowing them to be reusable. * Essentially represents an unbounded pool of IDs. However, upper bound is determined by the number * of simultaneous transactions. * *

The set of IDs is associated with visibility/access. */ public interface TransactionsAllocator { public String getTransaction() throws IOException; public void releaseTransaction(String txID) throws IOException; } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/query/GeoJsonQueryOutputFormat.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.query; import java.io.IOException; import java.util.Iterator; import java.util.NoSuchElementException; import java.util.concurrent.atomic.AtomicLong; import org.geotools.data.DataUtilities; import org.geotools.data.simple.SimpleFeatureCollection; import org.geotools.data.simple.SimpleFeatureIterator; import org.geotools.feature.AttributeTypeBuilder; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.feature.simple.SimpleFeatureTypeBuilder; import org.geotools.geojson.feature.FeatureJSON; import org.locationtech.geowave.core.store.cli.query.QueryOutputFormatSpi; import org.locationtech.geowave.core.store.query.gwql.ResultSet; import org.locationtech.jts.geom.Geometry; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import com.beust.jcommander.Parameter; import com.google.common.collect.Iterators; public class GeoJsonQueryOutputFormat extends QueryOutputFormatSpi { public static final String FORMAT_NAME = "geojson"; @Parameter(names = {"-o", "--outputFile"}, required = true, description = "Output file") private String outputFile; @Parameter( names = {"-t", "--typeName"}, required = true, description = "Output feature type name") private String typeName = "results"; public GeoJsonQueryOutputFormat() { super(FORMAT_NAME); } @Override public void output(ResultSet results) { int geometryColumn = -1; for (int i = 0; i < results.columnCount(); i++) { if (Geometry.class.isAssignableFrom(results.columnType(i))) { geometryColumn = i; break; } } if (geometryColumn < 0) { throw new RuntimeException( "Unable to output results to a geojson without a geometry column."); } SimpleFeatureTypeBuilder ftb = new SimpleFeatureTypeBuilder(); ftb.setName(typeName); // TODO: This CRS needs to ultimately come from the query... // ftb.setCRS(results.getCRS()); for (int i = 0; i < results.columnCount(); i++) { AttributeTypeBuilder atb = new AttributeTypeBuilder(); atb.setBinding(results.columnType(i)); atb.nillable(true); ftb.add(atb.buildDescriptor(results.columnName(i))); } SimpleFeatureType sft = ftb.buildFeatureType(); final SimpleFeatureBuilder sfb = new SimpleFeatureBuilder(sft); final AtomicLong nextId = new AtomicLong(0L); Iterator features = Iterators.transform(results, r -> { sfb.reset(); for (int i = 0; i < results.columnCount(); i++) { sfb.add(r.columnValue(i)); } SimpleFeature feature = sfb.buildFeature(Long.toString(nextId.incrementAndGet())); return feature; }); try { SimpleFeatureCollection featureCollection = DataUtilities.collection(new SimpleFeatureIterator() { @Override public boolean hasNext() { return features.hasNext(); } @Override public SimpleFeature next() throws NoSuchElementException { return features.next(); } @Override public void close() {} }); FeatureJSON io = new FeatureJSON(); io.writeFeatureCollection(featureCollection, outputFile); } catch (IOException e) { throw new RuntimeException( "Encountered exception when writing geojson file: " + e.getMessage(), e); } } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/query/ShapefileQueryOutputFormat.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.query; import java.io.File; import java.io.IOException; import java.io.Serializable; import java.util.Iterator; import java.util.Map; import java.util.NoSuchElementException; import java.util.concurrent.atomic.AtomicLong; import org.geotools.data.DataStore; import org.geotools.data.DataUtilities; import org.geotools.data.DefaultTransaction; import org.geotools.data.FileDataStoreFactorySpi; import org.geotools.data.FileDataStoreFinder; import org.geotools.data.Transaction; import org.geotools.data.simple.SimpleFeatureCollection; import org.geotools.data.simple.SimpleFeatureIterator; import org.geotools.data.simple.SimpleFeatureStore; import org.geotools.feature.AttributeTypeBuilder; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.feature.simple.SimpleFeatureTypeBuilder; import org.locationtech.geowave.core.store.cli.query.QueryOutputFormatSpi; import org.locationtech.geowave.core.store.query.gwql.ResultSet; import org.locationtech.jts.geom.Geometry; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import com.beust.jcommander.Parameter; import com.beust.jcommander.internal.Maps; import com.google.common.collect.Iterators; /** * Since most of this class is basic geotools data store logic, it would be easy to abstract out the * geotools portion and create several output formats for other geotools data store formats such as * postgis. */ public class ShapefileQueryOutputFormat extends QueryOutputFormatSpi { public static final String FORMAT_NAME = "shp"; @Parameter(names = {"-o", "--outputFile"}, required = true, description = "Output file") private String outputFile; @Parameter( names = {"-t", "--typeName"}, required = true, description = "Output feature type name") private String typeName = "results"; public ShapefileQueryOutputFormat() { super(FORMAT_NAME); } @Override public void output(final ResultSet results) { int geometryColumn = -1; for (int i = 0; i < results.columnCount(); i++) { if (Geometry.class.isAssignableFrom(results.columnType(i))) { geometryColumn = i; break; } } if (geometryColumn < 0) { throw new RuntimeException( "Unable to output results to a shapefile without a geometry column."); } final SimpleFeatureTypeBuilder ftb = new SimpleFeatureTypeBuilder(); // TODO: This CRS needs to ultimately come from the query... // ftb.setCRS(results.getCRS()); ftb.setName(typeName); for (int i = 0; i < results.columnCount(); i++) { final AttributeTypeBuilder atb = new AttributeTypeBuilder(); atb.setBinding(results.columnType(i)); atb.nillable(true); if (i == geometryColumn) { ftb.add(atb.buildDescriptor("the_geom")); } else { ftb.add(atb.buildDescriptor(results.columnName(i))); } } final SimpleFeatureType sft = ftb.buildFeatureType(); final SimpleFeatureBuilder sfb = new SimpleFeatureBuilder(sft); final AtomicLong nextId = new AtomicLong(0L); final Iterator features = Iterators.transform(results, r -> { sfb.reset(); for (int i = 0; i < results.columnCount(); i++) { sfb.add(r.columnValue(i)); } final SimpleFeature feature = sfb.buildFeature(Long.toString(nextId.incrementAndGet())); return feature; }); final FileDataStoreFactorySpi factory = FileDataStoreFinder.getDataStoreFactory("shp"); final File file = new File(outputFile); final Map params = Maps.newHashMap(); final Transaction transaction = new DefaultTransaction("Write Results"); try { params.put("url", file.toURI().toURL()); final DataStore dataStore = factory.createNewDataStore(params); dataStore.createSchema(sft); final SimpleFeatureStore store = (SimpleFeatureStore) dataStore.getFeatureSource(dataStore.getTypeNames()[0]); store.setTransaction(transaction); final SimpleFeatureCollection featureCollection = DataUtilities.collection(new SimpleFeatureIterator() { @Override public boolean hasNext() { return features.hasNext(); } @Override public SimpleFeature next() throws NoSuchElementException { return features.next(); } @Override public void close() {} }); store.addFeatures(featureCollection); transaction.commit(); } catch (final Exception e) { try { transaction.rollback(); } catch (final IOException ioe) { throw new RuntimeException("Encountered an error when rolling back transaction", ioe); } throw new RuntimeException( "Encountered an error when writing the features to the file: " + e.getMessage(), e); } } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/query/aggregation/VectorCountAggregation.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.query.aggregation; import java.nio.ByteBuffer; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.query.aggregate.FieldNameParam; import org.opengis.feature.simple.SimpleFeature; /** * Counts non-null values of a simple feature attribute. If no attribute is specified, counts each * simple feature. */ public class VectorCountAggregation implements Aggregation { private FieldNameParam fieldNameParam; private long count = 0; public VectorCountAggregation() { this(null); } public VectorCountAggregation(final FieldNameParam fieldNameParam) { super(); this.fieldNameParam = fieldNameParam; } @Override public FieldNameParam getParameters() { return fieldNameParam; } @Override public void setParameters(final FieldNameParam fieldNameParam) { this.fieldNameParam = fieldNameParam; } @Override public Long merge(final Long result1, final Long result2) { return result1 + result2; } @Override public Long getResult() { return count; } @Override public byte[] resultToBinary(final Long result) { final ByteBuffer buffer = ByteBuffer.allocate(VarintUtils.unsignedLongByteLength(result)); VarintUtils.writeUnsignedLong(result, buffer); return buffer.array(); } @Override public Long resultFromBinary(final byte[] binary) { return VarintUtils.readUnsignedLong(ByteBuffer.wrap(binary)); } @Override public void clearResult() { count = 0; } @Override public void aggregate(final DataTypeAdapter adapter, final SimpleFeature entry) { Object o; if ((fieldNameParam != null) && !fieldNameParam.isEmpty()) { o = entry.getAttribute(fieldNameParam.getFieldName()); if (o != null) { count++; } } else { count++; } } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/render/AsyncQueueFeatureCollection.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.render; import org.geotools.data.simple.SimpleFeatureIterator; import org.geotools.feature.collection.BaseSimpleFeatureCollection; import org.geotools.feature.collection.DelegateSimpleFeatureIterator; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import com.oath.cyclops.async.adapters.Queue; public class AsyncQueueFeatureCollection extends BaseSimpleFeatureCollection { private final Queue asyncQueue; public AsyncQueueFeatureCollection( final SimpleFeatureType type, final Queue asyncQueue) { super(type); this.asyncQueue = asyncQueue; } @Override public SimpleFeatureIterator features() { return new DelegateSimpleFeatureIterator(asyncQueue.stream().iterator()); } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/render/DistributedRenderAggregation.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.render; import java.awt.geom.Point2D; import java.util.HashMap; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import org.geoserver.wms.DefaultWebMapService; import org.geoserver.wms.GetMapRequest; import org.geoserver.wms.ScaleComputationMethod; import org.geoserver.wms.WMSMapContent; import org.geotools.map.FeatureLayer; import org.geotools.map.MapViewport; import org.locationtech.geowave.core.store.api.Aggregation; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.oath.cyclops.async.adapters.Queue; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; public class DistributedRenderAggregation implements Aggregation { private static final Logger LOGGER = LoggerFactory.getLogger(DistributedRenderAggregation.class); private DistributedRenderMapOutputFormat currentRenderer; private DistributedRenderResult currentResult; // use a cyclops-react queue to feed simple features asynchronously while a // render thread consumes the features private Queue queue; private CompletableFuture asyncRenderer; private DistributedRenderOptions options; public DistributedRenderAggregation() {} public DistributedRenderAggregation(final DistributedRenderOptions options) { this.options = options; } @Override public DistributedRenderOptions getParameters() { return options; } @Override public void setParameters(final DistributedRenderOptions options) { this.options = options; } @SuppressFBWarnings( value = "NP_NONNULL_PARAM_VIOLATION", justification = "This usage requires null params and is tested to work.") private void initRenderer(final SimpleFeatureType type) { currentRenderer = new DistributedRenderMapOutputFormat(options); final WMSMapContent mapContent = new WMSMapContent(); final GetMapRequest request = new GetMapRequest(); mapContent.setBgColor(options.getBgColor()); request.setBgColor(options.getBgColor()); mapContent.setPalette(options.getPalette()); request.setPalette(options.getPalette()); mapContent.setAngle(options.getAngle()); request.setAngle(options.getAngle()); mapContent.setBuffer(options.getBuffer()); request.setBuffer(options.getBuffer()); mapContent.setMapWidth(options.getMapWidth()); request.setWidth(options.getMapWidth()); mapContent.setMapHeight(options.getMapHeight()); request.setHeight(options.getMapHeight()); mapContent.setTransparent(options.isTransparent()); request.setTransparent(options.isTransparent()); mapContent.setViewport(new MapViewport(options.getEnvelope())); request.setBbox(options.getEnvelope()); request.setInterpolations(options.getInterpolations()); final Map formatOptions = new HashMap<>(); formatOptions.put("antialias", options.getAntialias()); formatOptions.put("timeout", options.getMaxRenderTime()); formatOptions.put("kmplacemark", Boolean.valueOf(options.isKmlPlacemark())); // this sets a static variable, but its the only method available // (multiple geoserver clients with different settings hitting the same // distributed backend, may conflict on these settings) // we get around this by overriding these settings on the renderHints // object within DistributedRenderer so it is no longer using these // static settings, but these static properties must be set to avoid // NPEs System.setProperty("OPTIMIZE_LINE_WIDTH", Boolean.toString(options.isOptimizeLineWidth())); System.setProperty("MAX_FILTER_RULES", Integer.toString(options.getMaxFilters())); System.setProperty( "USE_GLOBAL_RENDERING_POOL", Boolean.toString(DistributedRenderOptions.isUseGlobalRenderPool())); new DefaultWebMapService(null).setApplicationContext(null); request.setFormatOptions(formatOptions); request.setWidth(options.getMapWidth()); request.setHeight(options.getMapHeight()); request.setTiled(options.isMetatile()); request.setScaleMethod( options.isRenderScaleMethodAccurate() ? ScaleComputationMethod.Accurate : ScaleComputationMethod.OGC); if (options.isMetatile()) { // it doesn't matter what this is, as long as its not null, we are // just ensuring proper transparency usage based on meta-tiling // rules request.setTilesOrigin(new Point2D.Double()); } mapContent.setRequest(request); queue = new Queue<>(); mapContent.addLayer( new FeatureLayer(new AsyncQueueFeatureCollection(type, queue), options.getStyle())); // produce map in a separate thread... asyncRenderer = CompletableFuture.supplyAsync(() -> { currentRenderer.produceMap(mapContent).dispose(); return currentRenderer.getDistributedRenderResult(); }); } @Override public DistributedRenderResult getResult() { if ((queue != null) && (asyncRenderer != null)) { queue.close(); DistributedRenderResult result = null; // may not need to do this, waiting on map production may be // sufficient try { if (options.getMaxRenderTime() > 0) { result = asyncRenderer.get(options.getMaxRenderTime(), TimeUnit.SECONDS); } else { result = asyncRenderer.get(); } } catch (InterruptedException | ExecutionException | TimeoutException e) { LOGGER.warn("Unable to get distributed render result", e); } currentResult = result; clearRenderer(); } return currentResult; } @Override public void clearResult() { stopRenderer(); clearRenderer(); currentResult = null; } public void stopRenderer() { if (currentRenderer != null) { currentRenderer.stopRendering(); } if (asyncRenderer != null) { asyncRenderer.cancel(true); } } public void clearRenderer() { queue = null; currentRenderer = null; asyncRenderer = null; } private synchronized void ensureOpen(final SimpleFeatureType type) { if (currentRenderer == null) { initRenderer(type); } } @Override public void aggregate(final DataTypeAdapter adapter, final SimpleFeature entry) { ensureOpen(entry.getFeatureType()); queue.add(entry); } @Override public byte[] resultToBinary(final DistributedRenderResult result) { return result.toBinary(); } @Override public DistributedRenderResult resultFromBinary(final byte[] binary) { final DistributedRenderResult result = new DistributedRenderResult(); result.fromBinary(binary); return result; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/render/DistributedRenderCallback.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.render; import org.geoserver.wms.GetMapCallbackAdapter; import org.geoserver.wms.WMS; import org.geoserver.wms.WMSMapContent; import org.geotools.factory.CommonFactoryFinder; import org.geotools.map.FeatureLayer; import org.geotools.map.Layer; import org.geotools.process.Processors; import org.geotools.process.function.DistributedRenderProcessUtils; import org.geotools.process.function.ProcessFunction; import org.geotools.styling.FeatureTypeStyle; import org.geotools.styling.RasterSymbolizer; import org.geotools.styling.Rule; import org.geotools.styling.Style; import org.geotools.styling.StyleFactory; import org.geotools.styling.visitor.DuplicatingStyleVisitor; import org.locationtech.geowave.adapter.vector.plugin.DistributedRenderProcess; import org.locationtech.geowave.adapter.vector.plugin.InternalProcessFactory; import org.opengis.filter.expression.Expression; /** * The purpose of this callback is completely to get the layer Style accessible from the query, in * particular making the style available to either the FeatureReader or to a RenderingTransformation */ public class DistributedRenderCallback extends GetMapCallbackAdapter { private final WMS wms; public DistributedRenderCallback(final WMS wms) { this.wms = wms; } @Override public Layer beforeLayer(final WMSMapContent mapContent, final Layer layer) { // sanity check the style if ((layer instanceof FeatureLayer) && (layer.getStyle() != null) && (layer.getStyle().featureTypeStyles() != null) && !layer.getStyle().featureTypeStyles().isEmpty()) { final Style layerStyle = layer.getStyle(); final FeatureTypeStyle style = layerStyle.featureTypeStyles().get(0); // check if their is a DistributedRender rendering // transformation if ((style instanceof ProcessFunction) && (style.getTransformation() != null) && (((ProcessFunction) style.getTransformation()).getName() != null) && ((ProcessFunction) style.getTransformation()).getName().equals( DistributedRenderProcess.PROCESS_NAME)) { // if their is a DistributedRender transformation, we need // to provide more information that can only be found final DuplicatingStyleVisitor cloner = new DuplicatingStyleVisitor(); layerStyle.accept(cloner); layer.getQuery().getHints().put( DistributedRenderProcess.OPTIONS, new DistributedRenderOptions(wms, mapContent, layerStyle)); // now that the options with the distributed render style // have been set the original style will be used with // distributed rendering // now, replace the style with a direct raster symbolizer, // so the GridCoverage result of the distributed rendering // process is directly rendered to the map in place of the // original style final Style directRasterStyle = (Style) cloner.getCopy(); directRasterStyle.featureTypeStyles().clear(); Processors.addProcessFactory(new InternalProcessFactory()); directRasterStyle.featureTypeStyles().add( getDirectRasterStyle( layer.getFeatureSource().getSchema().getGeometryDescriptor().getLocalName(), DistributedRenderProcessUtils.getRenderingProcess())); ((FeatureLayer) layer).setStyle(directRasterStyle); } } return layer; } private static FeatureTypeStyle getDirectRasterStyle( final String geometryPropertyName, final Expression transformation) { final StyleFactory styleFactory = CommonFactoryFinder.getStyleFactory(); final FeatureTypeStyle style = styleFactory.createFeatureTypeStyle(); final Rule rule = styleFactory.createRule(); rule.setName("Distributed Render - Direct Raster"); final RasterSymbolizer symbolizer = styleFactory.createRasterSymbolizer(); symbolizer.setGeometryPropertyName(geometryPropertyName); rule.symbolizers().add(symbolizer); style.rules().add(rule); style.setTransformation(transformation); return style; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/render/DistributedRenderMapOutputFormat.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.render; import java.awt.image.BufferedImage; import java.awt.image.IndexColorModel; import java.awt.image.RenderedImage; import org.geoserver.wms.map.RenderedImageMapOutputFormat; import org.geotools.renderer.lite.DistributedRenderer; import org.geotools.renderer.lite.StreamingRenderer; public class DistributedRenderMapOutputFormat extends RenderedImageMapOutputFormat { private final DistributedRenderOptions options; private DistributedRenderer currentRenderer; private BufferedImage currentImage; public DistributedRenderMapOutputFormat(final DistributedRenderOptions options) { super(new DistributedRenderWMSFacade(options)); this.options = options; } @Override protected StreamingRenderer buildRenderer() { currentRenderer = new DistributedRenderer(options); return currentRenderer; } public void stopRendering() { if (currentRenderer != null) { currentRenderer.stopRendering(); } } @Override protected RenderedImage prepareImage( final int width, final int height, final IndexColorModel palette, final boolean transparent) { currentImage = (BufferedImage) super.prepareImage(width, height, palette, transparent); return currentImage; } public DistributedRenderResult getDistributedRenderResult() { return currentRenderer.getResult(currentImage); } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/render/DistributedRenderOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.render; import java.awt.Color; import java.awt.image.IndexColorModel; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.BitSet; import java.util.Collections; import java.util.List; import javax.media.jai.Interpolation; import javax.media.jai.InterpolationNearest; import javax.media.jai.remote.SerializableState; import javax.media.jai.remote.SerializerFactory; import javax.xml.transform.TransformerException; import org.geoserver.wms.DefaultWebMapService; import org.geoserver.wms.GetMapRequest; import org.geoserver.wms.WMS; import org.geoserver.wms.WMSMapContent; import org.geotools.factory.CommonFactoryFinder; import org.geotools.geometry.jts.ReferencedEnvelope; import org.geotools.referencing.CRS; import org.geotools.renderer.lite.StreamingRenderer; import org.geotools.styling.Style; import org.geotools.xml.styling.SLDParser; import org.geotools.xml.styling.SLDTransformer; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.Persistable; import org.opengis.referencing.FactoryException; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Function; import com.google.common.collect.Lists; public class DistributedRenderOptions implements Persistable { private static final Logger LOGGER = LoggerFactory.getLogger(DistributedRenderOptions.class); // it doesn't make sense to grab this from the context of the geoserver // settings, although it is unclear whether in distributed rendering this // should be enabled or disabled by default private static final boolean USE_GLOBAL_RENDER_POOL = true; private String antialias; private boolean continuousMapWrapping; private boolean advancedProjectionHandlingEnabled; private boolean optimizeLineWidth; private boolean transparent; private boolean isMetatile; private boolean kmlPlacemark; private boolean renderScaleMethodAccurate; private int mapWidth; private int mapHeight; private int buffer; private double angle; private IndexColorModel palette; private Color bgColor; private int maxRenderTime; private int maxErrors; private int maxFilters; private ReferencedEnvelope envelope; private int wmsIterpolationOrdinal; private List interpolationOrdinals; private Style style; public DistributedRenderOptions() {} public DistributedRenderOptions( final WMS wms, final WMSMapContent mapContent, final Style style) { optimizeLineWidth = DefaultWebMapService.isLineWidthOptimizationEnabled(); maxFilters = DefaultWebMapService.getMaxFilterRules(); transparent = mapContent.isTransparent(); buffer = mapContent.getBuffer(); angle = mapContent.getAngle(); mapWidth = mapContent.getMapWidth(); mapHeight = mapContent.getMapHeight(); bgColor = mapContent.getBgColor(); palette = mapContent.getPalette(); renderScaleMethodAccurate = StreamingRenderer.SCALE_ACCURATE.equals(mapContent.getRendererScaleMethod()); wmsIterpolationOrdinal = wms.getInterpolation().ordinal(); maxErrors = wms.getMaxRenderingErrors(); this.style = style; envelope = mapContent.getRenderingArea(); final GetMapRequest request = mapContent.getRequest(); final Object timeoutOption = request.getFormatOptions().get("timeout"); int localMaxRenderTime = 0; if (timeoutOption != null) { try { // local render time is in millis, while WMS max render time is // in seconds localMaxRenderTime = Integer.parseInt(timeoutOption.toString()) / 1000; } catch (final NumberFormatException e) { LOGGER.warn("Could not parse format_option \"timeout\": " + timeoutOption, e); } } maxRenderTime = getMaxRenderTime(localMaxRenderTime, wms); isMetatile = request.isTiled() && (request.getTilesOrigin() != null); final Object antialiasObj = request.getFormatOptions().get("antialias"); if (antialiasObj != null) { antialias = antialiasObj.toString(); } if (request.getFormatOptions().get("kmplacemark") != null) { kmlPlacemark = ((Boolean) request.getFormatOptions().get("kmplacemark")).booleanValue(); } // turn on advanced projection handling advancedProjectionHandlingEnabled = wms.isAdvancedProjectionHandlingEnabled(); final Object advancedProjectionObj = request.getFormatOptions().get(WMS.ADVANCED_PROJECTION_KEY); if ((advancedProjectionObj != null) && "false".equalsIgnoreCase(advancedProjectionObj.toString())) { advancedProjectionHandlingEnabled = false; continuousMapWrapping = false; } final Object mapWrappingObj = request.getFormatOptions().get(WMS.ADVANCED_PROJECTION_KEY); if ((mapWrappingObj != null) && "false".equalsIgnoreCase(mapWrappingObj.toString())) { continuousMapWrapping = false; } final List interpolations = request.getInterpolations(); if ((interpolations == null) || interpolations.isEmpty()) { interpolationOrdinals = Collections.emptyList(); } else { interpolationOrdinals = Lists.transform(interpolations, new Function() { @Override public Integer apply(final Interpolation input) { if (input instanceof InterpolationNearest) { return Interpolation.INTERP_NEAREST; } else if (input instanceof InterpolationNearest) { return Interpolation.INTERP_NEAREST; } else if (input instanceof InterpolationNearest) { return Interpolation.INTERP_NEAREST; } else if (input instanceof InterpolationNearest) { return Interpolation.INTERP_NEAREST; } return Interpolation.INTERP_NEAREST; } }); } } public int getMaxRenderTime(final int localMaxRenderTime, final WMS wms) { final int wmsMaxRenderTime = wms.getMaxRenderingTime(); if (wmsMaxRenderTime == 0) { maxRenderTime = localMaxRenderTime; } else if (localMaxRenderTime != 0) { maxRenderTime = Math.min(wmsMaxRenderTime, localMaxRenderTime); } else { maxRenderTime = wmsMaxRenderTime; } return maxRenderTime; } public boolean isOptimizeLineWidth() { return optimizeLineWidth; } public int getMaxErrors() { return maxErrors; } public void setMaxErrors(final int maxErrors) { this.maxErrors = maxErrors; } public void setOptimizeLineWidth(final boolean optimizeLineWidth) { this.optimizeLineWidth = optimizeLineWidth; } public List getInterpolationOrdinals() { return interpolationOrdinals; } public List getInterpolations() { if ((interpolationOrdinals != null) && !interpolationOrdinals.isEmpty()) { return Lists.transform(interpolationOrdinals, input -> Interpolation.getInstance(input)); } return Collections.emptyList(); } public void setInterpolationOrdinals(final List interpolationOrdinals) { this.interpolationOrdinals = interpolationOrdinals; } public static boolean isUseGlobalRenderPool() { return USE_GLOBAL_RENDER_POOL; } public Style getStyle() { return style; } public void setStyle(final Style style) { this.style = style; } public int getWmsInterpolationOrdinal() { return wmsIterpolationOrdinal; } public void setWmsInterpolationOrdinal(final int wmsIterpolationOrdinal) { this.wmsIterpolationOrdinal = wmsIterpolationOrdinal; } public int getMaxRenderTime() { return maxRenderTime; } public void setMaxRenderTime(final int maxRenderTime) { this.maxRenderTime = maxRenderTime; } public boolean isRenderScaleMethodAccurate() { return renderScaleMethodAccurate; } public void setRenderScaleMethodAccurate(final boolean renderScaleMethodAccurate) { this.renderScaleMethodAccurate = renderScaleMethodAccurate; } public int getBuffer() { return buffer; } public void setBuffer(final int buffer) { this.buffer = buffer; } public void setPalette(final IndexColorModel palette) { this.palette = palette; } public String getAntialias() { return antialias; } public void setAntialias(final String antialias) { this.antialias = antialias; } public boolean isContinuousMapWrapping() { return continuousMapWrapping; } public void setContinuousMapWrapping(final boolean continuousMapWrapping) { this.continuousMapWrapping = continuousMapWrapping; } public boolean isAdvancedProjectionHandlingEnabled() { return advancedProjectionHandlingEnabled; } public void setAdvancedProjectionHandlingEnabled( final boolean advancedProjectionHandlingEnabled) { this.advancedProjectionHandlingEnabled = advancedProjectionHandlingEnabled; } public boolean isKmlPlacemark() { return kmlPlacemark; } public void setKmlPlacemark(final boolean kmlPlacemark) { this.kmlPlacemark = kmlPlacemark; } public boolean isTransparent() { return transparent; } public void setTransparent(final boolean transparent) { this.transparent = transparent; } public boolean isMetatile() { return isMetatile; } public void setMetatile(final boolean isMetatile) { this.isMetatile = isMetatile; } public Color getBgColor() { return bgColor; } public void setBgColor(final Color bgColor) { this.bgColor = bgColor; } public int getMapWidth() { return mapWidth; } public void setMapWidth(final int mapWidth) { this.mapWidth = mapWidth; } public int getMapHeight() { return mapHeight; } public void setMapHeight(final int mapHeight) { this.mapHeight = mapHeight; } public double getAngle() { return angle; } public void setAngle(final double angle) { this.angle = angle; } public int getMaxFilters() { return maxFilters; } public void setMaxFilters(final int maxFilters) { this.maxFilters = maxFilters; } public ReferencedEnvelope getEnvelope() { return envelope; } public void setEnvelope(final ReferencedEnvelope envelope) { this.envelope = envelope; } public IndexColorModel getPalette() { return palette; } @Override public byte[] toBinary() { // combine booleans into a bitset final BitSet bitSet = new BitSet(15); bitSet.set(0, continuousMapWrapping); bitSet.set(1, advancedProjectionHandlingEnabled); bitSet.set(2, optimizeLineWidth); bitSet.set(3, transparent); bitSet.set(4, isMetatile); bitSet.set(5, kmlPlacemark); bitSet.set(6, renderScaleMethodAccurate); final boolean storeInterpolationOrdinals = ((interpolationOrdinals != null) && !interpolationOrdinals.isEmpty()); bitSet.set(7, storeInterpolationOrdinals); bitSet.set(8, palette != null); bitSet.set(9, maxRenderTime > 0); bitSet.set(10, maxErrors > 0); bitSet.set(11, angle != 0); bitSet.set(12, buffer > 0); bitSet.set(13, bgColor != null); bitSet.set(14, style != null); final boolean storeCRS = !((envelope.getCoordinateReferenceSystem() == null) || GeometryUtils.getDefaultCRS().equals(envelope.getCoordinateReferenceSystem())); bitSet.set(15, storeCRS); final double minX = envelope.getMinX(); final double minY = envelope.getMinY(); final double maxX = envelope.getMaxX(); final double maxY = envelope.getMaxY(); // required bytes include 32 for envelope doubles, // 8 for map width and height ints, and 2 for the bitset int bufferSize = 32 + 2 + VarintUtils.unsignedIntByteLength(mapWidth) + VarintUtils.unsignedIntByteLength(mapHeight); final byte[] wktBinary; if (storeCRS) { final String wkt = envelope.getCoordinateReferenceSystem().toWKT(); wktBinary = StringUtils.stringToBinary(wkt); bufferSize += (wktBinary.length + VarintUtils.unsignedIntByteLength(wktBinary.length)); } else { wktBinary = null; } if (storeInterpolationOrdinals) { for (final Integer ordinal : interpolationOrdinals) { bufferSize += VarintUtils.unsignedIntByteLength(ordinal); } bufferSize += VarintUtils.unsignedIntByteLength(interpolationOrdinals.size()); } final byte[] paletteBinary; if (palette != null) { final SerializableState serializableColorModel = SerializerFactory.getState(palette); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); try { final ObjectOutputStream oos = new ObjectOutputStream(baos); oos.writeObject(serializableColorModel); } catch (final IOException e) { LOGGER.warn("Unable to serialize sample model", e); } paletteBinary = baos.toByteArray(); bufferSize += (paletteBinary.length + VarintUtils.unsignedIntByteLength(paletteBinary.length)); } else { paletteBinary = null; } if (maxRenderTime > 0) { bufferSize += VarintUtils.unsignedIntByteLength(maxRenderTime); } if (maxErrors > 0) { bufferSize += VarintUtils.unsignedIntByteLength(maxErrors); } if (angle != 0) { bufferSize += 8; } if (buffer > 0) { bufferSize += VarintUtils.unsignedIntByteLength(buffer); } if (bgColor != null) { bufferSize += 4; } final byte[] styleBinary; if (style != null) { final SLDTransformer transformer = new SLDTransformer(); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); try { transformer.transform(new Style[] {style}, baos); } catch (final TransformerException e) { LOGGER.warn("Unable to create SLD from style", e); } styleBinary = baos.toByteArray(); bufferSize += (styleBinary.length + VarintUtils.unsignedIntByteLength(styleBinary.length)); } else { styleBinary = null; } final ByteBuffer byteBuffer = ByteBuffer.allocate(bufferSize); byteBuffer.put(bitSet.toByteArray()); byteBuffer.putDouble(minX); byteBuffer.putDouble(minY); byteBuffer.putDouble(maxX); byteBuffer.putDouble(maxY); VarintUtils.writeUnsignedInt(mapWidth, byteBuffer); VarintUtils.writeUnsignedInt(mapHeight, byteBuffer); if (wktBinary != null) { VarintUtils.writeUnsignedInt(wktBinary.length, byteBuffer); byteBuffer.put(wktBinary); } if (storeInterpolationOrdinals) { VarintUtils.writeUnsignedInt(interpolationOrdinals.size(), byteBuffer); for (final Integer interpOrd : interpolationOrdinals) { VarintUtils.writeUnsignedInt(interpOrd, byteBuffer); } } if (paletteBinary != null) { VarintUtils.writeUnsignedInt(paletteBinary.length, byteBuffer); byteBuffer.put(paletteBinary); } if (maxRenderTime > 0) { VarintUtils.writeUnsignedInt(maxRenderTime, byteBuffer); } if (maxErrors > 0) { VarintUtils.writeUnsignedInt(maxErrors, byteBuffer); } if (angle != 0) { byteBuffer.putDouble(angle); } if (buffer > 0) { VarintUtils.writeUnsignedInt(buffer, byteBuffer); } if (bgColor != null) { byteBuffer.putInt(bgColor.getRGB()); } if (styleBinary != null) { VarintUtils.writeUnsignedInt(styleBinary.length, byteBuffer); byteBuffer.put(styleBinary); } return byteBuffer.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final byte[] bitSetBytes = new byte[2]; buf.get(bitSetBytes); final BitSet bitSet = BitSet.valueOf(bitSetBytes); continuousMapWrapping = bitSet.get(0); advancedProjectionHandlingEnabled = bitSet.get(1); optimizeLineWidth = bitSet.get(2); transparent = bitSet.get(3); isMetatile = bitSet.get(4); kmlPlacemark = bitSet.get(5); renderScaleMethodAccurate = bitSet.get(6); final boolean interpolationOrdinalsStored = bitSet.get(7); final boolean paletteStored = bitSet.get(8); final boolean maxRenderTimeStored = bitSet.get(9); final boolean maxErrorsStored = bitSet.get(10); final boolean angleStored = bitSet.get(11); final boolean bufferStored = bitSet.get(12); final boolean bgColorStored = bitSet.get(13); final boolean styleStored = bitSet.get(14); final boolean crsStored = bitSet.get(15); CoordinateReferenceSystem crs; final double minX = buf.getDouble(); final double minY = buf.getDouble(); final double maxX = buf.getDouble(); final double maxY = buf.getDouble(); mapWidth = VarintUtils.readUnsignedInt(buf); mapHeight = VarintUtils.readUnsignedInt(buf); if (crsStored) { final byte[] wktBinary = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); final String wkt = StringUtils.stringFromBinary(wktBinary); try { crs = CRS.parseWKT(wkt); } catch (final FactoryException e) { LOGGER.warn("Unable to parse coordinate reference system", e); crs = GeometryUtils.getDefaultCRS(); } } else { crs = GeometryUtils.getDefaultCRS(); } envelope = new ReferencedEnvelope(minX, maxX, minY, maxY, crs); if (interpolationOrdinalsStored) { final int interpolationsLength = VarintUtils.readUnsignedInt(buf); interpolationOrdinals = new ArrayList<>(interpolationsLength); for (int i = 0; i < interpolationsLength; i++) { interpolationOrdinals.add(VarintUtils.readUnsignedInt(buf)); } } else { interpolationOrdinals = Collections.emptyList(); } if (paletteStored) { final byte[] colorModelBinary = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); try { final ByteArrayInputStream bais = new ByteArrayInputStream(colorModelBinary); final ObjectInputStream ois = new ObjectInputStream(bais); final Object o = ois.readObject(); if ((o instanceof SerializableState) && (((SerializableState) o).getObject() instanceof IndexColorModel)) { palette = (IndexColorModel) ((SerializableState) o).getObject(); } } catch (final Exception e) { LOGGER.warn("Unable to deserialize color model", e); palette = null; } } else { palette = null; } if (maxRenderTimeStored) { maxRenderTime = VarintUtils.readUnsignedInt(buf); } else { maxRenderTime = 0; } if (maxErrorsStored) { maxErrors = VarintUtils.readUnsignedInt(buf); } else { maxErrors = 0; } if (angleStored) { angle = buf.getDouble(); } else { angle = 0; } if (bufferStored) { buffer = VarintUtils.readUnsignedInt(buf); } else { buffer = 0; } if (bgColorStored) { bgColor = new Color(buf.getInt()); } else { bgColor = null; } if (styleStored) { final byte[] styleBinary = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); final SLDParser parser = new SLDParser( CommonFactoryFinder.getStyleFactory(null), new ByteArrayInputStream(styleBinary)); final Style[] styles = parser.readXML(); if ((styles != null) && (styles.length > 0)) { style = styles[0]; } else { LOGGER.warn("Unable to deserialize style"); style = null; } } else { style = null; } } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/render/DistributedRenderResult.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.render; import java.awt.AlphaComposite; import java.awt.Graphics2D; import java.awt.Transparency; import java.awt.image.BufferedImage; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import org.apache.commons.lang3.tuple.Pair; import org.geoserver.wms.map.ImageUtils; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.Mergeable; import org.locationtech.geowave.core.index.VarintUtils; public class DistributedRenderResult implements Mergeable { public static class CompositeGroupResult implements Mergeable { private PersistableComposite composite; // keep each style separate so they can be composited together in the // original draw order private List> orderedStyles; public CompositeGroupResult() {} public CompositeGroupResult( final PersistableComposite composite, final List> orderedStyles) { this.composite = composite; this.orderedStyles = orderedStyles; } private void render(final Graphics2D parentGraphics, final int width, final int height) { Graphics2D graphics; BufferedImage compositeGroupImage = null; if ((composite != null) && (composite.getComposite() != null)) { // this will render to a back buffer so that compositeGroupImage = parentGraphics.getDeviceConfiguration().createCompatibleImage( width, height, Transparency.TRANSLUCENT); graphics = compositeGroupImage.createGraphics(); graphics.setRenderingHints(parentGraphics.getRenderingHints()); } else { graphics = parentGraphics; } for (final Pair currentStyle : orderedStyles) { if ((currentStyle == null) || (currentStyle.getKey() == null) || (currentStyle.getKey().image == null)) { continue; } if ((currentStyle.getValue() == null) || (currentStyle.getValue().getComposite() == null)) { graphics.setComposite(AlphaComposite.SrcOver); } else { graphics.setComposite(currentStyle.getValue().getComposite()); } graphics.drawImage(currentStyle.getKey().image, 0, 0, null); } if (compositeGroupImage != null) { if ((composite == null) || (composite.getComposite() == null)) { parentGraphics.setComposite(AlphaComposite.SrcOver); } else { parentGraphics.setComposite(composite.getComposite()); } parentGraphics.drawImage(compositeGroupImage, 0, 0, null); graphics.dispose(); } } @Override public byte[] toBinary() { final byte[] compositeBinary; if (composite != null) { compositeBinary = composite.toBinary(); } else { compositeBinary = new byte[] {}; } final List styleBinaries = new ArrayList<>(orderedStyles.size()); int bufferSize = compositeBinary.length + VarintUtils.unsignedIntByteLength(compositeBinary.length); for (final Pair style : orderedStyles) { byte[] styleBinary; if (style != null) { byte[] styleCompositeBinary; if (style.getRight() != null) { styleCompositeBinary = style.getRight().toBinary(); } else { styleCompositeBinary = new byte[] {}; } byte[] styleImageBinary; if (style.getLeft() != null) { styleImageBinary = style.getLeft().toBinary(); } else { styleImageBinary = new byte[] {}; } final ByteBuffer styleBuf = ByteBuffer.allocate( styleCompositeBinary.length + styleImageBinary.length + VarintUtils.unsignedIntByteLength(styleCompositeBinary.length)); VarintUtils.writeUnsignedInt(styleCompositeBinary.length, styleBuf); if (styleCompositeBinary.length > 0) { styleBuf.put(styleCompositeBinary); } if (styleImageBinary.length > 0) { styleBuf.put(styleImageBinary); } styleBinary = styleBuf.array(); } else { styleBinary = new byte[] {}; } styleBinaries.add(styleBinary); bufferSize += (styleBinary.length + VarintUtils.unsignedIntByteLength(styleBinary.length)); } bufferSize += VarintUtils.unsignedIntByteLength(styleBinaries.size()); final ByteBuffer buf = ByteBuffer.allocate(bufferSize); VarintUtils.writeUnsignedInt(compositeBinary.length, buf); if (compositeBinary.length > 0) { buf.put(compositeBinary); } VarintUtils.writeUnsignedInt(styleBinaries.size(), buf); for (final byte[] styleBinary : styleBinaries) { VarintUtils.writeUnsignedInt(styleBinary.length, buf); buf.put(styleBinary); } return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int compositeBinaryLength = VarintUtils.readUnsignedInt(buf); if (compositeBinaryLength > 0) { final byte[] compositeBinary = ByteArrayUtils.safeRead(buf, compositeBinaryLength); composite = new PersistableComposite(); composite.fromBinary(compositeBinary); } else { composite = null; } final int styleLength = VarintUtils.readUnsignedInt(buf); ByteArrayUtils.verifyBufferSize(buf, styleLength); orderedStyles = new ArrayList<>(styleLength); for (int i = 0; i < styleLength; i++) { final int styleBinaryLength = VarintUtils.readUnsignedInt(buf); if (styleBinaryLength > 0) { final byte[] styleBinary = ByteArrayUtils.safeRead(buf, styleBinaryLength); final ByteBuffer styleBuf = ByteBuffer.wrap(styleBinary); final int styleCompositeBinaryLength = VarintUtils.readUnsignedInt(styleBuf); PersistableComposite styleComposite; if (styleCompositeBinaryLength > 0) { final byte[] styleCompositeBinary = ByteArrayUtils.safeRead(styleBuf, styleCompositeBinaryLength); styleComposite = new PersistableComposite(); styleComposite.fromBinary(styleCompositeBinary); } else { styleComposite = null; } final int styleImageBinaryLength = styleBuf.remaining(); PersistableRenderedImage styleImage; if (styleImageBinaryLength > 0) { final byte[] styleImageBinary = new byte[styleImageBinaryLength]; styleBuf.get(styleImageBinary); styleImage = new PersistableRenderedImage(); styleImage.fromBinary(styleImageBinary); } else { styleImage = null; } orderedStyles.add(Pair.of(styleImage, styleComposite)); } else { orderedStyles.add(null); } } } @Override public void merge(final Mergeable merge) { if (merge instanceof CompositeGroupResult) { final CompositeGroupResult other = (CompositeGroupResult) merge; final List> newOrderedStyles = new ArrayList<>(); final int minStyles = Math.min(orderedStyles.size(), other.orderedStyles.size()); for (int i = 0; i < minStyles; i++) { final Pair thisStyle = orderedStyles.get(i); final Pair otherStyle = other.orderedStyles.get(i); // all composites should be the same, if they're not then // these composite groups got mis-ordered by style // keep in mind that they can be null if nothing was // rendered to this style or other style because of rules // applied to that specific subset of data not resulting in // anything rendered for the style if (thisStyle != null) { if (otherStyle != null) { // render the images together and just arbitrarily // grab "this" composite as they both should be the // same newOrderedStyles.add( Pair.of( mergeImage(thisStyle.getLeft(), otherStyle.getLeft()), thisStyle.getRight())); } else { newOrderedStyles.add(thisStyle); } } else { newOrderedStyles.add(otherStyle); } } if (orderedStyles.size() > minStyles) { // hopefully this is never the case, but just in case newOrderedStyles.addAll(orderedStyles.subList(minStyles, orderedStyles.size())); } if (other.orderedStyles.size() > minStyles) { // hopefully this is never the case, but just in case newOrderedStyles.addAll( other.orderedStyles.subList(minStyles, other.orderedStyles.size())); } orderedStyles = newOrderedStyles; } } } // geotools has a concept of composites, which we need to keep separate so // that they can be composited in the original draw order, by default there // is only a single composite private List orderedComposites; // the parent image essentially gets labels rendered to it private PersistableRenderedImage parentImage; public DistributedRenderResult() {} public DistributedRenderResult( final PersistableRenderedImage parentImage, final List orderedComposites) { this.parentImage = parentImage; this.orderedComposites = orderedComposites; } public BufferedImage renderComposite(final DistributedRenderOptions renderOptions) { final BufferedImage image = ImageUtils.createImage( renderOptions.getMapWidth(), renderOptions.getMapHeight(), renderOptions.getPalette(), renderOptions.isTransparent() || renderOptions.isMetatile()); final Graphics2D graphics = ImageUtils.prepareTransparency( renderOptions.isTransparent(), renderOptions.getBgColor(), image, null); for (final CompositeGroupResult compositeGroup : orderedComposites) { compositeGroup.render(graphics, renderOptions.getMapWidth(), renderOptions.getMapHeight()); } final BufferedImage img = parentImage.getImage(); graphics.drawImage(img, 0, 0, null); graphics.dispose(); return image; } @Override public byte[] toBinary() { // 4 bytes for the length as an int, and 4 bytes for the size of // parentImage final byte[] parentImageBinary = parentImage.toBinary(); int byteSize = VarintUtils.unsignedIntByteLength(parentImageBinary.length) + parentImageBinary.length + VarintUtils.unsignedIntByteLength(orderedComposites.size()); final List compositeBinaries = new ArrayList<>(orderedComposites.size()); for (final CompositeGroupResult compositeGroup : orderedComposites) { final byte[] compositeGroupBinary = compositeGroup.toBinary(); byteSize += (compositeGroupBinary.length + VarintUtils.unsignedIntByteLength(compositeGroupBinary.length)); compositeBinaries.add(compositeGroupBinary); } final ByteBuffer buf = ByteBuffer.allocate(byteSize); VarintUtils.writeUnsignedInt(parentImageBinary.length, buf); buf.put(parentImageBinary); VarintUtils.writeUnsignedInt(orderedComposites.size(), buf); for (final byte[] compositeGroupBinary : compositeBinaries) { VarintUtils.writeUnsignedInt(compositeGroupBinary.length, buf); buf.put(compositeGroupBinary); } return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final byte[] parentImageBinary = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); parentImage = new PersistableRenderedImage(); parentImage.fromBinary(parentImageBinary); final int numCompositeGroups = VarintUtils.readUnsignedInt(buf); orderedComposites = new ArrayList<>(numCompositeGroups); for (int i = 0; i < numCompositeGroups; i++) { final byte[] compositeGroupBinary = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); final CompositeGroupResult compositeGroup = new CompositeGroupResult(); compositeGroup.fromBinary(compositeGroupBinary); orderedComposites.add(compositeGroup); } } @Override public void merge(final Mergeable merge) { if (merge instanceof DistributedRenderResult) { final DistributedRenderResult other = ((DistributedRenderResult) merge); final int minComposites = Math.min(orderedComposites.size(), other.orderedComposites.size()); // first render parents together if ((parentImage != null) && (parentImage.image != null)) { if ((other.parentImage != null) && (other.parentImage.image != null)) { // all composites should be the same, if they're not // then these distributed results got mis-ordered by // composite group, so composite remains this.composite parentImage = mergeImage(parentImage, other.parentImage); } } else { parentImage = other.parentImage; } final List newOrderedComposites = new ArrayList<>(); for (int c = 0; c < minComposites; c++) { final CompositeGroupResult thisCompositeGroup = orderedComposites.get(c); final CompositeGroupResult otherCompositeGroup = other.orderedComposites.get(c); thisCompositeGroup.merge(otherCompositeGroup); newOrderedComposites.add(thisCompositeGroup); } if (orderedComposites.size() > minComposites) { // hopefully this is never the case, but just in case newOrderedComposites.addAll( orderedComposites.subList(minComposites, orderedComposites.size())); } if (other.orderedComposites.size() > minComposites) { // hopefully this is never the case, but just in case newOrderedComposites.addAll( other.orderedComposites.subList(minComposites, other.orderedComposites.size())); } orderedComposites = newOrderedComposites; } } private static PersistableRenderedImage mergeImage( final PersistableRenderedImage image1, final PersistableRenderedImage image2) { final Graphics2D graphics = image1.image.createGraphics(); graphics.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER)); graphics.drawImage(image2.image, 0, 0, null); graphics.dispose(); return new PersistableRenderedImage(image1.image); } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/render/DistributedRenderWMSFacade.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.render; import org.geoserver.wms.WMS; import org.geoserver.wms.WMSInfo; import org.geoserver.wms.WMSInfo.WMSInterpolation; import org.geoserver.wms.WMSInfoImpl; public class DistributedRenderWMSFacade extends WMS { private final DistributedRenderOptions options; public DistributedRenderWMSFacade(final DistributedRenderOptions options) { super(null); this.options = options; } @Override public int getMaxBuffer() { return options.getBuffer(); } @Override public int getMaxRenderingTime() { return options.getMaxRenderTime(); } @Override public int getMaxRenderingErrors() { return options.getMaxErrors(); } @Override public WMSInterpolation getInterpolation() { return WMSInterpolation.values()[options.getWmsInterpolationOrdinal()]; } @Override public boolean isContinuousMapWrappingEnabled() { return options.isContinuousMapWrapping(); } @Override public boolean isAdvancedProjectionHandlingEnabled() { return options.isAdvancedProjectionHandlingEnabled(); } @Override public WMSInfo getServiceInfo() { return new WMSInfoImpl(); } @Override public int getMaxRequestMemory() { // bypass checking memory within distributed rendering return -1; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/render/InternalDistributedRenderProcess.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.render; import java.awt.image.BufferedImage; import org.geotools.coverage.CoverageFactoryFinder; import org.geotools.coverage.grid.GridCoverage2D; import org.geotools.coverage.grid.GridCoverageFactory; import org.geotools.data.Query; import org.geotools.data.simple.SimpleFeatureCollection; import org.geotools.data.simple.SimpleFeatureIterator; import org.geotools.process.ProcessException; import org.geotools.process.factory.DescribeParameter; import org.geotools.process.factory.DescribeProcess; import org.geotools.process.factory.DescribeResult; import org.geotools.util.factory.GeoTools; import org.opengis.coverage.grid.GridGeometry; import org.opengis.feature.simple.SimpleFeature; /** * This class can be used as a GeoTools Render Transform ('geowave:DistributedRender') within an SLD * on any layer that uses the GeoWave Data Store. An example SLD is provided * (example-slds/DecimatePoints.sld). The pixel-size allows you to skip more than a single pixel. * For example, a pixel size of 3 would skip an estimated 3x3 pixel cell in GeoWave's row IDs. Note * that rows are only skipped when a feature successfully passes filters. */ @DescribeProcess( title = "InternalDistributedRender", description = "This process will enable GeoWave to render WMS requests within the server and then this will be responsible for compositing the result client-side.") public class InternalDistributedRenderProcess { @DescribeResult( name = "result", description = "This is just a pass-through, the key is to provide enough information within invertQuery to perform a map to screen transform") public GridCoverage2D execute( @DescribeParameter( name = "data", description = "Feature collection containing the rendered image") final SimpleFeatureCollection features) throws ProcessException { // vector-to-raster render transform that take a single feature that // wraps a distributed render result and converts it to a GridCoverage2D if (features != null) { final SimpleFeatureIterator it = features.features(); if (it.hasNext()) { final SimpleFeature resultFeature = it.next(); final DistributedRenderResult actualResult = (DistributedRenderResult) resultFeature.getAttribute(0); final DistributedRenderOptions renderOptions = (DistributedRenderOptions) resultFeature.getAttribute(1); // convert to the GridCoverage2D required for output final GridCoverageFactory gcf = CoverageFactoryFinder.getGridCoverageFactory(GeoTools.getDefaultHints()); final BufferedImage result = actualResult.renderComposite(renderOptions); final GridCoverage2D gridCov = gcf.create("Process Results", result, renderOptions.getEnvelope()); return gridCov; } } return null; } public Query invertQuery(final Query targetQuery, final GridGeometry targetGridGeometry) throws ProcessException { // it seems that without invertQuery returning the targetQuery, the geom // property field does not get set in the filter (line 205 of // org.geotools.renderer.lite.RenderingTransformationHelper in geotools // v15.1) return targetQuery; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/render/PersistableComposite.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.render; import java.awt.AlphaComposite; import java.awt.Composite; import java.nio.ByteBuffer; import org.geotools.renderer.composite.BlendComposite; import org.geotools.renderer.composite.BlendComposite.BlendingMode; import org.locationtech.geowave.core.index.persist.Persistable; public class PersistableComposite implements Persistable { private boolean isBlend = true; private int blendModeOrAlphaRule = 0; private float alpha = 1f; public PersistableComposite() {} public PersistableComposite(final Composite composite) { if (composite instanceof BlendComposite) { isBlend = true; blendModeOrAlphaRule = ((BlendComposite) composite).getBlend().ordinal(); alpha = ((BlendComposite) composite).getAlpha(); } else if (composite instanceof AlphaComposite) { isBlend = false; blendModeOrAlphaRule = ((AlphaComposite) composite).getRule(); alpha = ((AlphaComposite) composite).getAlpha(); } } public Composite getComposite() { if (isBlend) { return BlendComposite.getInstance(BlendingMode.values()[blendModeOrAlphaRule], alpha); } else { return AlphaComposite.getInstance(blendModeOrAlphaRule, alpha); } } @Override public byte[] toBinary() { final ByteBuffer buf = ByteBuffer.allocate(9); buf.put(isBlend ? (byte) 0 : (byte) 1); buf.putInt(blendModeOrAlphaRule); buf.putFloat(alpha); return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); isBlend = (buf.get() == 0); blendModeOrAlphaRule = buf.getInt(); alpha = buf.getFloat(); } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/render/PersistableRenderedImage.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.render; import java.awt.image.BufferedImage; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import javax.imageio.ImageIO; import org.locationtech.geowave.core.index.persist.Persistable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ar.com.hjg.pngj.FilterType; import it.geosolutions.imageio.plugins.png.PNGWriter; /** * This class wraps a rendered image as a GeoWave Persistable object. It serializes and deserializes * the BufferedImage as a png using ImageIO. */ public class PersistableRenderedImage implements Persistable { private static final Logger LOGGER = LoggerFactory.getLogger(PersistableRenderedImage.class); private static final float DEFAULT_PNG_QUALITY = 0.8f; public BufferedImage image; public PersistableRenderedImage() {} public PersistableRenderedImage(final BufferedImage image) { this.image = image; } public BufferedImage getImage() { return image; } @Override public byte[] toBinary() { if (image == null) { return new byte[0]; } final ByteArrayOutputStream baos = new ByteArrayOutputStream(); try { // we could just use the expected output format, but that may not be // correct, instead we use PNG // it seems that even though the requested image may be jpeg // example, the individual styles may need to retain transparency to // be composited correctly final PNGWriter writer = new PNGWriter(); image = (BufferedImage) writer.writePNG(image, baos, DEFAULT_PNG_QUALITY, FilterType.FILTER_NONE); } catch (final Exception e) { LOGGER.warn("Unable to serialize image", e); } return baos.toByteArray(); } @Override public void fromBinary(final byte[] bytes) { if (bytes.length == 0) { return; } final ByteArrayInputStream bais = new ByteArrayInputStream(bytes); try { image = ImageIO.read(bais); } catch (final IOException e) { LOGGER.warn("Unable to deserialize image", e); } } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/util/DateUtilities.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.util; import java.text.SimpleDateFormat; import java.util.Date; import org.locationtech.geowave.core.geotime.store.query.TemporalRange; import org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic; import org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic.TimeRangeValue; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; public class DateUtilities { public static Date parseISO(String input) throws java.text.ParseException { // NOTE: SimpleDateFormat uses GMT[-+]hh:mm for the TZ which breaks // things a bit. Before we go on we have to repair this. final SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssz"); // this is zero time so we need to add that TZ indicator for if (input.endsWith("Z")) { input = input.substring(0, input.length() - 1) + "GMT-00:00"; } else { final int inset = 6; final String s0 = input.substring(0, input.length() - inset); final String s1 = input.substring(input.length() - inset, input.length()); input = s0 + "GMT" + s1; } return df.parse(input); } public static TemporalRange getTemporalRange( final DataStorePluginOptions dataStorePlugin, final String typeName, final String timeField) { final DataStatisticsStore statisticsStore = dataStorePlugin.createDataStatisticsStore(); final InternalAdapterStore internalAdapterStore = dataStorePlugin.createInternalAdapterStore(); final PersistentAdapterStore adapterStore = dataStorePlugin.createAdapterStore(); final short adapterId = internalAdapterStore.getAdapterId(typeName); final DataTypeAdapter adapter = adapterStore.getAdapter(adapterId); // if this is a ranged schema, we have to get complete bounds if (timeField.contains("|")) { final int pipeIndex = timeField.indexOf("|"); final String startField = timeField.substring(0, pipeIndex); final String endField = timeField.substring(pipeIndex + 1); Date start = null; Date end = null; try (CloseableIterator>> statIter = statisticsStore.getFieldStatistics( adapter, TimeRangeStatistic.STATS_TYPE, startField, null)) { if (statIter.hasNext()) { TimeRangeStatistic statistic = (TimeRangeStatistic) statIter.next(); if (statistic != null) { TimeRangeValue value = statisticsStore.getStatisticValue(statistic); if (value != null) { start = value.getMinTime(); } } } } try (CloseableIterator>> statIter = statisticsStore.getFieldStatistics( adapter, TimeRangeStatistic.STATS_TYPE, endField, null)) { if (statIter.hasNext()) { TimeRangeStatistic statistic = (TimeRangeStatistic) statIter.next(); if (statistic != null) { TimeRangeValue value = statisticsStore.getStatisticValue(statistic); if (value != null) { end = value.getMaxTime(); } } } } if ((start != null) && (end != null)) { return new TemporalRange(start, end); } } else { // Look up the time range stat for this adapter try (CloseableIterator>> statIter = statisticsStore.getFieldStatistics( adapter, TimeRangeStatistic.STATS_TYPE, timeField, null)) { if (statIter.hasNext()) { TimeRangeStatistic statistic = (TimeRangeStatistic) statIter.next(); if (statistic != null) { TimeRangeValue value = statisticsStore.getStatisticValue(statistic); if (value != null) { return value.asTemporalRange(); } } } } } return null; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/util/FeatureDataUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.util; import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.UUID; import org.apache.commons.lang3.tuple.Pair; import org.geotools.data.DataUtilities; import org.geotools.feature.SchemaException; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.feature.simple.SimpleFeatureTypeBuilder; import org.geotools.referencing.CRS; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.geotime.util.TimeDescriptors; import org.locationtech.geowave.core.geotime.util.TimeUtils; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.AttributeDescriptor; import org.opengis.referencing.FactoryException; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.opengis.referencing.cs.CoordinateSystem; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class FeatureDataUtils { private static final Logger LOGGER = LoggerFactory.getLogger(FeatureDataUtils.class); public static String getAxis(final CoordinateReferenceSystem crs) { // Some geometries do not have a CRS provided. Thus we default to // urn:ogc:def:crs:EPSG::4326 final CoordinateSystem cs = crs == null ? null : crs.getCoordinateSystem(); if ((cs != null) && (cs.getDimension() > 0)) { return cs.getAxis(0).getDirection().name().toString(); } return "EAST"; } public static SimpleFeatureType decodeType( final String nameSpace, final String typeName, final String typeDescriptor, final String axis) throws SchemaException { SimpleFeatureType featureType = (nameSpace != null) && (nameSpace.length() > 0) ? DataUtilities.createType(nameSpace, typeName, typeDescriptor) : DataUtilities.createType(typeName, typeDescriptor); final String lCaseAxis = axis.toLowerCase(Locale.ENGLISH); final CoordinateReferenceSystem crs = featureType.getCoordinateReferenceSystem(); final String typeAxis = getAxis(crs); // Default for EPSG:4326 is lat/long, If the provided type was // long/lat, then re-establish the order if ((crs != null) && crs.getIdentifiers().toString().contains("EPSG:4326") && !lCaseAxis.equalsIgnoreCase(typeAxis)) { final SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder(); builder.init(featureType); try { // truely no way to force lat first // but it is the default in later versions of GeoTools. // this all depends on the authority at the time of creation featureType = SimpleFeatureTypeBuilder.retype( featureType, CRS.decode("EPSG:4326", lCaseAxis.equals("east"))); } catch (final FactoryException e) { throw new SchemaException("Cannot decode EPSG:4326", e); } } return featureType; } public static SimpleFeature buildFeature( final SimpleFeatureType featureType, final Pair[] entries) { final List descriptors = featureType.getAttributeDescriptors(); final Object[] defaults = new Object[descriptors.size()]; int p = 0; for (final AttributeDescriptor descriptor : descriptors) { defaults[p++] = descriptor.getDefaultValue(); } final SimpleFeature newFeature = SimpleFeatureBuilder.build(featureType, defaults, UUID.randomUUID().toString()); for (final Pair entry : entries) { newFeature.setAttribute(entry.getKey(), entry.getValue()); } return newFeature; } public static SimpleFeatureType getFeatureType( final DataStorePluginOptions dataStore, String typeName) { // if no id provided, locate a single featureadapter if (typeName == null) { final List typeNameList = FeatureDataUtils.getFeatureTypeNames(dataStore); if (typeNameList.size() >= 1) { typeName = typeNameList.get(0); } else if (typeNameList.isEmpty()) { LOGGER.error("No feature adapters found for use with time param"); return null; } else { LOGGER.error("Multiple feature adapters found. Please specify one."); return null; } } final PersistentAdapterStore adapterStore = dataStore.createAdapterStore(); final InternalAdapterStore internalAdapterStore = dataStore.createInternalAdapterStore(); final DataTypeAdapter adapter = adapterStore.getAdapter(internalAdapterStore.getAdapterId(typeName)).getAdapter(); if ((adapter != null) && (adapter instanceof GeotoolsFeatureDataAdapter)) { final GeotoolsFeatureDataAdapter gtAdapter = (GeotoolsFeatureDataAdapter) adapter; return gtAdapter.getFeatureType(); } return null; } public static FeatureDataAdapter cloneFeatureDataAdapter( final DataStorePluginOptions storeOptions, final String originalTypeName, final String newTypeName) { // Get original feature type info final SimpleFeatureType oldType = FeatureDataUtils.getFeatureType(storeOptions, originalTypeName); // Build type using new name final SimpleFeatureTypeBuilder sftBuilder = new SimpleFeatureTypeBuilder(); sftBuilder.init(oldType); sftBuilder.setName(newTypeName); final SimpleFeatureType newType = sftBuilder.buildFeatureType(); // Create new adapter that will use new typename final FeatureDataAdapter newAdapter = new FeatureDataAdapter(newType); return newAdapter; } public static String getGeomField(final DataStorePluginOptions dataStore, final String typeName) { final PersistentAdapterStore adapterStore = dataStore.createAdapterStore(); final InternalAdapterStore internalAdapterStore = dataStore.createInternalAdapterStore(); final DataTypeAdapter adapter = adapterStore.getAdapter(internalAdapterStore.getAdapterId(typeName)).getAdapter(); if ((adapter != null) && (adapter instanceof GeotoolsFeatureDataAdapter)) { final GeotoolsFeatureDataAdapter gtAdapter = (GeotoolsFeatureDataAdapter) adapter; final SimpleFeatureType featureType = gtAdapter.getFeatureType(); if (featureType.getGeometryDescriptor() != null) { return featureType.getGeometryDescriptor().getLocalName(); } } return null; } public static String getTimeField(final DataStorePluginOptions dataStore, final String typeName) { final PersistentAdapterStore adapterStore = dataStore.createAdapterStore(); final InternalAdapterStore internalAdapterStore = dataStore.createInternalAdapterStore(); final DataTypeAdapter adapter = adapterStore.getAdapter(internalAdapterStore.getAdapterId(typeName)).getAdapter(); if ((adapter != null) && (adapter instanceof GeotoolsFeatureDataAdapter)) { final GeotoolsFeatureDataAdapter gtAdapter = (GeotoolsFeatureDataAdapter) adapter; final SimpleFeatureType featureType = gtAdapter.getFeatureType(); final TimeDescriptors timeDescriptors = gtAdapter.getTimeDescriptors(); // If not indexed, try to find a time field if ((timeDescriptors == null) || !timeDescriptors.hasTime()) { for (final AttributeDescriptor attrDesc : featureType.getAttributeDescriptors()) { final Class bindingClass = attrDesc.getType().getBinding(); if (TimeUtils.isTemporal(bindingClass)) { return attrDesc.getLocalName(); } } } else { if (timeDescriptors.getTime() != null) { return timeDescriptors.getTime().getLocalName(); } else if (timeDescriptors.getStartRange() != null) { // give back start|stop string return timeDescriptors.getStartRange().getLocalName() + "|" + timeDescriptors.getEndRange().getLocalName(); } } } return null; } public static int getFeatureAdapterCount(final DataStorePluginOptions dataStore) { final InternalDataAdapter[] adapters = dataStore.createAdapterStore().getAdapters(); int featureAdapters = 0; for (final DataTypeAdapter adapter : adapters) { if (adapter instanceof GeotoolsFeatureDataAdapter) { featureAdapters++; } } return featureAdapters; } public static List getFeatureTypeNames(final DataStorePluginOptions dataStore) { final ArrayList featureTypeNames = new ArrayList<>(); final InternalDataAdapter[] adapters = dataStore.createAdapterStore().getAdapters(); for (final InternalDataAdapter internalAdapter : adapters) { final DataTypeAdapter adapter = internalAdapter.getAdapter(); if (adapter instanceof GeotoolsFeatureDataAdapter) { featureTypeNames.add(adapter.getTypeName()); } } return featureTypeNames; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/util/FeatureGeometryUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.util; import org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic; import org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic.BoundingBoxValue; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.adapter.InternalAdapterStore; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.locationtech.jts.geom.Envelope; public class FeatureGeometryUtils { public static Envelope getGeoBounds( final DataStorePluginOptions dataStorePlugin, final String typeName, final String geomField) { final DataStatisticsStore statisticsStore = dataStorePlugin.createDataStatisticsStore(); final InternalAdapterStore internalAdapterStore = dataStorePlugin.createInternalAdapterStore(); final PersistentAdapterStore adapterStore = dataStorePlugin.createAdapterStore(); final short adapterId = internalAdapterStore.getAdapterId(typeName); final DataTypeAdapter adapter = adapterStore.getAdapter(adapterId); try (CloseableIterator>> statIter = statisticsStore.getFieldStatistics( adapter, BoundingBoxStatistic.STATS_TYPE, geomField, null)) { if (statIter.hasNext()) { BoundingBoxStatistic statistic = (BoundingBoxStatistic) statIter.next(); if (statistic != null) { BoundingBoxValue value = statisticsStore.getStatisticValue(statistic); if (value != null) { return value.getValue(); } } } } return null; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/util/FeatureTranslatingIterator.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.util; import java.util.Collection; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.feature.simple.SimpleFeatureTypeBuilder; import org.locationtech.geowave.core.store.CloseableIterator; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.AttributeDescriptor; public class FeatureTranslatingIterator implements CloseableIterator { private final SimpleFeatureTranslator translator; private final CloseableIterator iteratorDelegate; public FeatureTranslatingIterator( final SimpleFeatureType originalType, final Collection desiredFields, final CloseableIterator originalFeatures) { translator = new SimpleFeatureTranslator(originalType, desiredFields); iteratorDelegate = originalFeatures; } @Override public boolean hasNext() { return iteratorDelegate.hasNext(); } @Override public SimpleFeature next() { return translator.translate(iteratorDelegate.next()); } @Override public void remove() { iteratorDelegate.remove(); } @Override public void close() { iteratorDelegate.close(); } private static class SimpleFeatureTranslator { private final Collection fields; private SimpleFeatureType newType; private SimpleFeatureBuilder sfBuilder; public SimpleFeatureTranslator( final SimpleFeatureType originalType, final Collection fields) { this.fields = fields; initialize(originalType); } private void initialize(final SimpleFeatureType originalType) { final SimpleFeatureTypeBuilder sftBuilder = new SimpleFeatureTypeBuilder(); sftBuilder.setName(originalType.getName()); for (final AttributeDescriptor ad : originalType.getAttributeDescriptors()) { if (fields.contains(ad.getLocalName())) { sftBuilder.add(ad.getLocalName(), ad.getClass()); } } newType = sftBuilder.buildFeatureType(); sfBuilder = new SimpleFeatureBuilder(newType); } public SimpleFeature translate(final SimpleFeature original) { for (final String field : fields) { final Object value = original.getAttribute(field); if (value != null) { sfBuilder.set(field, value); } } return sfBuilder.buildFeature(original.getID()); } } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/util/PolygonAreaCalculator.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.util; import java.util.HashMap; import org.geotools.geometry.jts.JTS; import org.geotools.referencing.CRS; import org.geotools.referencing.crs.DefaultGeographicCRS; import org.locationtech.jts.densify.Densifier; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.Point; import org.opengis.referencing.FactoryException; import org.opengis.referencing.NoSuchAuthorityCodeException; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.opengis.referencing.operation.MathTransform; public class PolygonAreaCalculator { private static final double DEFAULT_DENSIFY_VERTEX_COUNT = 1000.0; private static final double SQM_2_SQKM = 1.0 / 1000000.0; private double densifyVertexCount = DEFAULT_DENSIFY_VERTEX_COUNT; private final HashMap crsMap = new HashMap<>(); public PolygonAreaCalculator() {} private CoordinateReferenceSystem lookupUtmCrs(final double centerLat, final double centerLon) throws NoSuchAuthorityCodeException, FactoryException { final int epsgCode = (32700 - (Math.round((45f + (float) centerLat) / 90f) * 100)) + Math.round((183f + (float) centerLon) / 6f); final String crsId = "EPSG:" + Integer.toString(epsgCode); CoordinateReferenceSystem crs = crsMap.get(crsId); if (crs == null) { crs = CRS.decode(crsId, true); crsMap.put(crsId, crs); } return crs; } public double getAreaSimple(final Geometry polygon) throws Exception { final Point centroid = polygon.getCentroid(); final CoordinateReferenceSystem equalAreaCRS = lookupUtmCrs(centroid.getY(), centroid.getX()); final MathTransform transform = CRS.findMathTransform(DefaultGeographicCRS.WGS84, equalAreaCRS, true); final Geometry transformedPolygon = JTS.transform(polygon, transform); return transformedPolygon.getArea() * SQM_2_SQKM; } public double getAreaDensify(final Geometry polygon) throws Exception { final Point centroid = polygon.getCentroid(); final CoordinateReferenceSystem equalAreaCRS = lookupUtmCrs(centroid.getY(), centroid.getX()); final double vertexSpacing = polygon.getLength() / densifyVertexCount; final Geometry densePolygon = Densifier.densify(polygon, vertexSpacing); final MathTransform transform = CRS.findMathTransform(DefaultGeographicCRS.WGS84, equalAreaCRS, true); final Geometry transformedPolygon = JTS.transform(densePolygon, transform); return transformedPolygon.getArea() * SQM_2_SQKM; } public void setDensifyVertexCount(final double densifyVertexCount) { this.densifyVertexCount = densifyVertexCount; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/util/QueryIndexHelper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.util; import java.util.HashMap; import java.util.Map; import org.geotools.geometry.jts.ReferencedEnvelope; import org.locationtech.geowave.adapter.vector.plugin.transaction.StatisticsCache; import org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition; import org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition; import org.locationtech.geowave.core.geotime.store.query.ExplicitSpatialTemporalQuery; import org.locationtech.geowave.core.geotime.store.query.TemporalConstraints; import org.locationtech.geowave.core.geotime.store.query.TemporalConstraintsSet; import org.locationtech.geowave.core.geotime.store.query.TemporalRange; import org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic; import org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic.BoundingBoxValue; import org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic; import org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic.TimeRangeValue; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.geotime.util.GeometryUtils.GeoConstraintsWrapper; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.geotime.util.TimeDescriptors; import org.locationtech.geowave.core.geotime.util.TimeUtils; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintData; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintSet; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintsByClass; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.AttributeDescriptor; import org.opengis.geometry.MismatchedDimensionException; import org.opengis.referencing.FactoryException; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.opengis.referencing.operation.TransformException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class QueryIndexHelper { private static final Logger LOGGER = LoggerFactory.getLogger(QueryIndexHelper.class); private static TemporalRange getTimeRange( final StatisticsCache statisticsCache, final AttributeDescriptor attr) { TemporalRange timeRange = null; if (attr != null) { TimeRangeValue value = statisticsCache.getFieldStatistic(TimeRangeStatistic.STATS_TYPE, attr.getLocalName()); if (value != null) { timeRange = value.asTemporalRange(); } } return timeRange; } private static BoundingBoxValue getBounds( final StatisticsCache statisticsCache, final AttributeDescriptor attr) { return statisticsCache.getFieldStatistic(BoundingBoxStatistic.STATS_TYPE, attr.getLocalName()); } /** * Clip the provided constraints using the statistics, if available. */ public static TemporalConstraintsSet clipIndexedTemporalConstraints( final StatisticsCache statisticsCache, final TimeDescriptors timeDescriptors, final TemporalConstraintsSet constraintsSet) { if ((timeDescriptors.getEndRange() != null) && (timeDescriptors.getStartRange() != null)) { final String ename = timeDescriptors.getEndRange().getLocalName(); final String sname = timeDescriptors.getStartRange().getLocalName(); if (constraintsSet.hasConstraintsForRange(sname, ename)) { final TemporalRange statsStartRange = getTimeRange(statisticsCache, timeDescriptors.getStartRange()); final TemporalRange statsEndRange = getTimeRange(statisticsCache, timeDescriptors.getEndRange()); final TemporalRange fullRange = new TemporalRange(statsStartRange.getStartTime(), statsEndRange.getEndTime()); final TemporalConstraints constraints = constraintsSet.getConstraintsForRange(sname, ename); constraints.replaceWithIntersections( new TemporalConstraints(fullRange, constraints.getName())); constraintsSet.removeAllConstraintsExcept(constraints.getName()); // this should be fixed to handle interwoven range. // specifically look for non-overlapping regions of time return constraintsSet; } } else if ((timeDescriptors.getTime() != null) && constraintsSet.hasConstraintsFor(timeDescriptors.getTime().getLocalName())) { final String name = timeDescriptors.getTime().getLocalName(); TemporalRange range = getTimeRange(statisticsCache, timeDescriptors.getTime()); final TemporalConstraints constraints = constraintsSet.getConstraintsFor(name); if (range != null) { constraints.replaceWithIntersections(new TemporalConstraints(range, name)); } constraintsSet.removeAllConstraintsExcept(name); return constraintsSet; } return constraintsSet; } /** * Clip the provided bounded box with the statistics for the index */ public static Geometry clipIndexedBBOXConstraints( final StatisticsCache statisticsCache, final SimpleFeatureType adapterFeatureType, final CoordinateReferenceSystem indexCRS, final Geometry bbox) { final BoundingBoxValue bounds = getBounds(statisticsCache, adapterFeatureType.getGeometryDescriptor()); if ((bounds != null) && bounds.isSet() && (bbox != null)) { CoordinateReferenceSystem bboxCRS = ((BoundingBoxStatistic) bounds.getStatistic()).getDestinationCrs(); if (bboxCRS == null) { bboxCRS = adapterFeatureType.getCoordinateReferenceSystem(); } try { final Geometry geo = new GeometryFactory().toGeometry( new ReferencedEnvelope(bounds.getValue(), bboxCRS).transform(indexCRS, true)); return geo.intersection(bbox); } catch (MismatchedDimensionException | TransformException | FactoryException e) { LOGGER.warn("Unable to transform bounding box statistic to index CRS"); } } return bbox; } public static ConstraintSet getTimeConstraintsFromIndex( final StatisticsCache statisticsCache, final TimeDescriptors timeDescriptors) { if ((timeDescriptors.getEndRange() != null) || (timeDescriptors.getStartRange() != null)) { final TemporalRange endRange = getTimeRange(statisticsCache, timeDescriptors.getEndRange()); final TemporalRange startRange = getTimeRange(statisticsCache, timeDescriptors.getStartRange()); if ((endRange != null) && (startRange != null)) { return ExplicitSpatialTemporalQuery.createConstraints(startRange.union(endRange), true); } else if (endRange != null) { return ExplicitSpatialTemporalQuery.createConstraints(endRange, true); } else if (startRange != null) { return ExplicitSpatialTemporalQuery.createConstraints(startRange, true); } } else if (timeDescriptors.getTime() != null) { final TemporalRange range = getTimeRange(statisticsCache, timeDescriptors.getTime()); if (range != null) { return ExplicitSpatialTemporalQuery.createConstraints(range, true); } } return new ConstraintSet(); } /** * Compose a time constraints. When the provided constraints do not fulfill the indexed * dimensions, compose constraints from statistics. */ public static ConstraintsByClass composeTimeConstraints( final StatisticsCache statisticsCache, final SimpleFeatureType featureType, final TimeDescriptors timeDescriptors, final TemporalConstraintsSet timeBoundsSet) { final TemporalConstraints timeBounds = TimeUtils.getTemporalConstraintsForDescriptors(timeDescriptors, timeBoundsSet); return (timeBounds != null) && !timeBounds.isEmpty() ? ExplicitSpatialTemporalQuery.createConstraints(timeBounds, false) : new ConstraintsByClass(getTimeConstraintsFromIndex(statisticsCache, timeDescriptors)); } /** * If composed constraints matched statistics constraints, are empty or null, then return empty * constraint set. */ public static ConstraintsByClass composeTimeBoundedConstraints( final SimpleFeatureType featureType, final TimeDescriptors timeDescriptors, final TemporalConstraintsSet timeBoundsSet) { if ((timeBoundsSet == null) || timeBoundsSet.isEmpty() || !timeDescriptors.hasTime()) { return new ConstraintsByClass(); } final TemporalConstraints boundsTemporalConstraints = TimeUtils.getTemporalConstraintsForDescriptors(timeDescriptors, timeBoundsSet); if (boundsTemporalConstraints.isEmpty()) { return new ConstraintsByClass(); } final ConstraintsByClass boundsTimeConstraints = ExplicitSpatialTemporalQuery.createConstraints(boundsTemporalConstraints, false); return boundsTimeConstraints; } /** * If composed constraints matched statistics constraints, are empty or null, then return empty * constraint set */ public static GeoConstraintsWrapper composeGeometricConstraints( final SimpleFeatureType featureType, final Geometry jtsBounds) { if (jtsBounds == null) { return new GeoConstraintsWrapper(new ConstraintsByClass(), true, null); } final GeoConstraintsWrapper geoConstraints = GeometryUtils.basicGeoConstraintsWrapperFromGeometry(jtsBounds); return geoConstraints; } /** * Compose a query from the set of constraints. When the provided constraints do not fulfill the * indexed dimensions, compose constraints from statistics. */ public static ConstraintsByClass composeConstraints( final StatisticsCache statisticsCache, final SimpleFeatureType featureType, final TimeDescriptors timeDescriptors, final Geometry jtsBounds, final TemporalConstraintsSet timeBoundsSet) { final ConstraintsByClass timeConstraints = composeTimeConstraints(statisticsCache, featureType, timeDescriptors, timeBoundsSet); final GeoConstraintsWrapper geoConstraints = composeGeometricConstraints(featureType, jtsBounds); return timeConstraints.merge(geoConstraints.getConstraints()); } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/util/SimpleFeatureUserDataConfigurationSet.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.util; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.Reader; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import org.locationtech.geowave.core.geotime.util.SimpleFeatureUserDataConfiguration; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.VarintUtils; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.index.persist.PersistenceUtils; import org.opengis.feature.simple.SimpleFeatureType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationFeature; /** * Represents a set of configurations maintained within the user data of a simple feature type and * is tracked by the type name. */ public class SimpleFeatureUserDataConfigurationSet implements java.io.Serializable, Persistable { private static final long serialVersionUID = -1266366263353595379L; private static Logger LOGGER = LoggerFactory.getLogger(SimpleFeatureUserDataConfigurationSet.class); public static final String SIMPLE_FEATURE_CONFIG_FILE_PROP = "SIMPLE_FEATURE_CONFIG_FILE"; /** * Name string accessed Map of SimpleFeatureUserDataConfiguration in this object. The name is the * SimpleFeatureType name that will have a configuration set. */ private Map> configurations = new HashMap<>(); /** * Default Constructor
*/ public SimpleFeatureUserDataConfigurationSet() {} /** * Constructor
Creates a new SimpleFeatureUserDataConfigurationSet configured using the passed * in SimpleFeature type. Will be accessed using the type name. * * @param type - SFT to be configured */ public SimpleFeatureUserDataConfigurationSet(final SimpleFeatureType type) { final List sfudc = getConfigurationsForType(type.getTypeName()); for (final SimpleFeatureUserDataConfiguration configuration : sfudc) { configuration.configureFromType(type); } } /** * Constructor
Creates a new SimpleFeatureUserDataConfigurationSet configured using the passed * in SimpleFeature type and adding the passed in configurations. Will be accessed using the type * name. * * @param type * @param configurations */ public SimpleFeatureUserDataConfigurationSet( final SimpleFeatureType type, final List configurations) { super(); getConfigurationsForType(type.getTypeName()).addAll(configurations); configureFromType(type); } /** @return a Map of all the SimpleFeatureUserDataConfiguration's by name */ public Map> getConfigurations() { return configurations; } /** * Gets a List of all the SimpleFeatureUserDataConfigurations for the SFT specified by the * 'typeName' string * * @param typeName - SFT configuration desired * @return - List */ public synchronized List getConfigurationsForType( final String typeName) { List configList = configurations.get(typeName); if (configList == null) { configList = new ArrayList<>(); configurations.put(typeName, configList); } return configList; } /** * Add the passed in configuration to the list of configurations for the specified type name * * @param typeName - name of type which will get an added configuration * @param config - configuration to be added */ public void addConfigurations( final String typeName, final SimpleFeatureUserDataConfiguration config) { getConfigurationsForType(typeName).add(config); } /** * Updates the entire list of SimpleFeatureUserDataConfiguration(s) with information from the * passed in SF type * * @param type - SF type to be updated */ public void configureFromType(final SimpleFeatureType type) { final List sfudc = getConfigurationsForType(type.getTypeName()); // Go through list of SFUD configurations and update each one with // information from the // passed in SF type for (final SimpleFeatureUserDataConfiguration configuration : sfudc) { configuration.configureFromType(type); } } /** * Updates the SFT with the entire list of SimpleFeatureUserDataConfiguration(s) * * @param type - SF type to be updated */ public void updateType(final SimpleFeatureType type) { final List sfudc = getConfigurationsForType(type.getTypeName()); // Go through list of SFUD configurations and update each one in the // passed in SF type for (final SimpleFeatureUserDataConfiguration configuration : sfudc) { configuration.updateType(type); } } /** * Method that reads user data configuration information from * {@value #SIMPLE_FEATURE_CONFIG_FILE_PROP} and updates the passed in SFT. * * @param type - SFT to be updated * @return the SFT passed in as a parameter */ @SuppressWarnings("deprecation") public static SimpleFeatureType configureType(final SimpleFeatureType type) { // HP Fortify "Path Manipulation" false positive // What Fortify considers "user input" comes only // from users with OS-level access anyway final String configFileName = System.getProperty(SIMPLE_FEATURE_CONFIG_FILE_PROP); if (configFileName != null) { final File configFile = new File(configFileName); if (configFile.exists() && configFile.canRead()) { try (FileInputStream input = new FileInputStream(configFile); Reader reader = new InputStreamReader(input, "UTF-8")) { final ObjectMapper mapper = new ObjectMapper().disable(SerializationFeature.FAIL_ON_EMPTY_BEANS); final SimpleFeatureUserDataConfigurationSet instance = mapper.readValue(reader, SimpleFeatureUserDataConfigurationSet.class); instance.updateType(type); } catch (final IOException e) { // HP Fortify "Log Forging" false positive // What Fortify considers "user input" comes only // from users with OS-level access anyway LOGGER.error("Cannot parse JSON congiguration file " + configFileName, e); } } } return type; } @Override public byte[] toBinary() { int size = 0; final List entries = new ArrayList<>(configurations.size()); for (final Entry> e : configurations.entrySet()) { final byte[] keyBytes = StringUtils.stringToBinary(e.getKey()); final List configs = new ArrayList<>(e.getValue().size()); int entrySize = VarintUtils.unsignedIntByteLength(keyBytes.length) + keyBytes.length + VarintUtils.unsignedIntByteLength(configs.size()); for (final SimpleFeatureUserDataConfiguration config : e.getValue()) { final byte[] confBytes = PersistenceUtils.toBinary(config); entrySize += VarintUtils.unsignedIntByteLength(confBytes.length); entrySize += confBytes.length; configs.add(confBytes); } size += entrySize; final ByteBuffer buf = ByteBuffer.allocate(entrySize); VarintUtils.writeUnsignedInt(keyBytes.length, buf); buf.put(keyBytes); VarintUtils.writeUnsignedInt(configs.size(), buf); for (final byte[] confBytes : configs) { VarintUtils.writeUnsignedInt(confBytes.length, buf); buf.put(confBytes); } entries.add(buf.array()); } size += VarintUtils.unsignedIntByteLength(configurations.size()); final ByteBuffer buf = ByteBuffer.allocate(size); VarintUtils.writeUnsignedInt(configurations.size(), buf); for (final byte[] e : entries) { buf.put(e); } return buf.array(); } @Override public void fromBinary(final byte[] bytes) { final ByteBuffer buf = ByteBuffer.wrap(bytes); final int entrySize = VarintUtils.readUnsignedInt(buf); final Map> internalConfigurations = new HashMap<>(entrySize); for (int i = 0; i < entrySize; i++) { final int keySize = VarintUtils.readUnsignedInt(buf); final byte[] keyBytes = ByteArrayUtils.safeRead(buf, keySize); final String key = StringUtils.stringFromBinary(keyBytes); final int numConfigs = VarintUtils.readUnsignedInt(buf); final List confList = new ArrayList<>(numConfigs); for (int c = 0; c < numConfigs; c++) { final byte[] entryBytes = ByteArrayUtils.safeRead(buf, VarintUtils.readUnsignedInt(buf)); confList.add((SimpleFeatureUserDataConfiguration) PersistenceUtils.fromBinary(entryBytes)); } internalConfigurations.put(key, confList); } configurations = internalConfigurations; } } ================================================ FILE: extensions/adapters/vector/src/main/java/org/locationtech/geowave/adapter/vector/util/SimpleFeatureWrapper.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.util; import java.util.Collection; import java.util.List; import java.util.Map; import org.locationtech.geowave.core.index.ByteArray; import org.opengis.feature.GeometryAttribute; import org.opengis.feature.IllegalAttributeException; import org.opengis.feature.Property; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.AttributeDescriptor; import org.opengis.feature.type.Name; import org.opengis.filter.identity.FeatureId; import org.opengis.geometry.BoundingBox; public class SimpleFeatureWrapper implements SimpleFeature { private final SimpleFeature simpleFeature; private final ByteArray insertionId; private final int subStratIdx; public SimpleFeatureWrapper( final SimpleFeature simpleFeature, final ByteArray insertionId, final int subStratIdx) { this.simpleFeature = simpleFeature; this.insertionId = insertionId; this.subStratIdx = subStratIdx; } public SimpleFeature getSimpleFeature() { return simpleFeature; } public ByteArray getInsertionId() { return insertionId; } public int getSubStratIdx() { return subStratIdx; } @Override public FeatureId getIdentifier() { return simpleFeature.getIdentifier(); } @Override public AttributeDescriptor getDescriptor() { return simpleFeature.getDescriptor(); } @Override public BoundingBox getBounds() { return simpleFeature.getBounds(); } @Override public String getID() { return simpleFeature.getID(); } @Override public SimpleFeatureType getType() { return simpleFeature.getType(); } @Override public SimpleFeatureType getFeatureType() { return simpleFeature.getFeatureType(); } @Override public void setValue(final Object newValue) { simpleFeature.setValue(newValue); } @Override public List getAttributes() { return simpleFeature.getAttributes(); } @Override public GeometryAttribute getDefaultGeometryProperty() { return simpleFeature.getDefaultGeometryProperty(); } @Override public void setValue(final Collection values) { simpleFeature.setValue(values); } @Override public void setAttributes(final List values) { simpleFeature.setAttributes(values); } @Override public void setDefaultGeometryProperty(final GeometryAttribute geometryAttribute) { simpleFeature.setDefaultGeometryProperty(geometryAttribute); } @Override public Collection getValue() { return simpleFeature.getValue(); } @Override public Collection getProperties(final Name name) { return simpleFeature.getProperties(name); } @Override public void setAttributes(final Object[] values) { simpleFeature.setAttributes(values); } @Override public Name getName() { return simpleFeature.getName(); } @Override public Property getProperty(final Name name) { return simpleFeature.getProperty(name); } @Override public Object getAttribute(final String name) { return simpleFeature.getAttribute(name); } @Override public boolean isNillable() { return simpleFeature.isNillable(); } @Override public Map getUserData() { return simpleFeature.getUserData(); } @Override public void setAttribute(final String name, final Object value) { simpleFeature.setAttribute(name, value); } @Override public Collection getProperties(final String name) { return simpleFeature.getProperties(name); } @Override public Object getAttribute(final Name name) { return simpleFeature.getAttribute(name); } @Override public void setAttribute(final Name name, final Object value) { simpleFeature.setAttribute(name, value); } @Override public Collection getProperties() { return simpleFeature.getProperties(); } @Override public Property getProperty(final String name) { return simpleFeature.getProperty(name); } @Override public Object getAttribute(final int index) throws IndexOutOfBoundsException { return simpleFeature.getAttribute(index); } @Override public void setAttribute(final int index, final Object value) throws IndexOutOfBoundsException { simpleFeature.setAttribute(index, value); } @Override public void validate() throws IllegalAttributeException { simpleFeature.validate(); } @Override public int getAttributeCount() { return simpleFeature.getAttributeCount(); } @Override public Object getDefaultGeometry() { return simpleFeature.getDefaultGeometry(); } @Override public void setDefaultGeometry(final Object geometry) { simpleFeature.setDefaultGeometry(geometry); } } ================================================ FILE: extensions/adapters/vector/src/main/protobuf/CqlHBaseQueryFilters.proto ================================================ option java_package = "org.locationtech.geowave.adapter.vector.query.hbase.generated"; option java_outer_classname = "FilterProtos"; option java_generic_services = true; option java_generate_equals_and_hash = true; option optimize_for = SPEED; message CqlHBaseQueryFilter { required string gtFilter = 1; required bytes model = 2; required bytes dataAdapter = 3; } ================================================ FILE: extensions/adapters/vector/src/main/resources/META-INF/services/org.geotools.data.DataStoreFactorySpi ================================================ org.locationtech.geowave.adapter.vector.plugin.GeoWaveGTDataStoreFactory ================================================ FILE: extensions/adapters/vector/src/main/resources/META-INF/services/org.geotools.process.ProcessFactory ================================================ org.locationtech.geowave.adapter.vector.plugin.GeoWaveGSProcessFactory ================================================ FILE: extensions/adapters/vector/src/main/resources/META-INF/services/org.locationtech.geowave.adapter.vector.index.IndexQueryStrategySPI ================================================ org.locationtech.geowave.adapter.vector.index.ChooseHeuristicMatchIndexQueryStrategy org.locationtech.geowave.adapter.vector.index.ChooseBestMatchIndexQueryStrategy org.locationtech.geowave.adapter.vector.index.ChooseLocalityPreservingQueryStrategy ================================================ FILE: extensions/adapters/vector/src/main/resources/META-INF/services/org.locationtech.geowave.adapter.vector.plugin.lock.LockingManagementFactory ================================================ org.locationtech.geowave.adapter.vector.plugin.lock.MemoryLockManagerFactory ================================================ FILE: extensions/adapters/vector/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi ================================================ org.locationtech.geowave.adapter.vector.cli.VectorCLIProvider ================================================ FILE: extensions/adapters/vector/src/main/resources/META-INF/services/org.locationtech.geowave.core.index.persist.PersistableRegistrySpi ================================================ org.locationtech.geowave.adapter.vector.FeatureAdapterPersistableRegistry ================================================ FILE: extensions/adapters/vector/src/main/resources/META-INF/services/org.locationtech.geowave.core.store.cli.query.QueryOutputFormatSpi ================================================ org.locationtech.geowave.adapter.vector.query.ShapefileQueryOutputFormat org.locationtech.geowave.adapter.vector.query.GeoJsonQueryOutputFormat ================================================ FILE: extensions/adapters/vector/src/main/resources/applicationContext.xml ================================================ ================================================ FILE: extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/BaseDataStoreTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector; import java.io.IOException; import java.io.Serializable; import java.util.HashMap; import java.util.Map; import org.geotools.data.DataStore; import org.junit.Rule; import org.junit.rules.TestName; import org.locationtech.geowave.adapter.vector.plugin.GeoWaveGTDataStoreFactory; import org.locationtech.geowave.adapter.vector.plugin.GeoWavePluginConfig; import org.locationtech.geowave.adapter.vector.plugin.GeoWavePluginException; import org.locationtech.geowave.core.store.StoreFactoryFamilySpi; import org.locationtech.geowave.core.store.api.QueryBuilder; import org.locationtech.geowave.core.store.memory.MemoryStoreFactoryFamily; public class BaseDataStoreTest { @Rule public TestName name = new TestName(); protected DataStore createDataStore() throws IOException, GeoWavePluginException { final Map params = new HashMap<>(); params.put("gwNamespace", "test_" + getClass().getName() + "_" + name.getMethodName()); final StoreFactoryFamilySpi storeFactoryFamily = new MemoryStoreFactoryFamily(); // delete existing data new GeoWavePluginConfig(storeFactoryFamily, params).getDataStore().delete( QueryBuilder.newBuilder().build()); return new GeoWaveGTDataStoreFactory(storeFactoryFamily).createNewDataStore(params); } } ================================================ FILE: extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/FeatureDataAdapterTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import java.text.ParseException; import java.util.Date; import java.util.List; import java.util.Map.Entry; import java.util.UUID; import org.apache.commons.lang3.tuple.Pair; import org.geotools.data.DataUtilities; import org.geotools.feature.SchemaException; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.feature.simple.SimpleFeatureTypeBuilder; import org.geotools.filter.text.cql2.CQLException; import org.junit.Before; import org.junit.Test; import org.locationtech.geowave.adapter.vector.util.DateUtilities; import org.locationtech.geowave.adapter.vector.util.FeatureDataUtils; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialTemporalOptions; import org.locationtech.geowave.core.geotime.store.dimension.SpatialField; import org.locationtech.geowave.core.geotime.store.dimension.TimeField; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.adapter.AdapterPersistenceEncoding; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.base.BaseDataStoreUtils; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.Point; import org.locationtech.jts.geom.PrecisionModel; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.AttributeDescriptor; import org.opengis.referencing.crs.CoordinateReferenceSystem; public class FeatureDataAdapterTest { private SimpleFeatureType schema; private SimpleFeature newFeature; private Date time1; private Date time2; GeometryFactory factory = new GeometryFactory(new PrecisionModel(PrecisionModel.FIXED)); @SuppressWarnings("unchecked") @Before public void setup() throws SchemaException, CQLException, ParseException { time1 = DateUtilities.parseISO("2005-05-19T18:33:55Z"); time2 = DateUtilities.parseISO("2005-05-19T19:33:55Z"); schema = DataUtilities.createType( "sp.geostuff", "geometry:Geometry:srid=4326,pop:java.lang.Long,when:Date,whennot:Date,pid:String"); newFeature = FeatureDataUtils.buildFeature( schema, new Pair[] { Pair.of("geometry", factory.createPoint(new Coordinate(27.25, 41.25))), Pair.of("pop", Long.valueOf(100)), Pair.of("when", time1), Pair.of("whennot", time2)}); } @Test public void testDifferentProjection() throws SchemaException { final SimpleFeatureType schema = DataUtilities.createType("sp.geostuff", "geometry:Geometry:srid=3005,pop:java.lang.Long"); final FeatureDataAdapter dataAdapter = new FeatureDataAdapter(schema); final Index spatialIndex = SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()); final AdapterToIndexMapping indexMapping = BaseDataStoreUtils.mapAdapterToIndex( dataAdapter.asInternalAdapter((short) -1), spatialIndex); final CoordinateReferenceSystem crs = dataAdapter.getFeatureType().getCoordinateReferenceSystem(); // assertTrue(crs.getIdentifiers().toString().contains("EPSG:4326")); @SuppressWarnings("unchecked") final SimpleFeature newFeature = FeatureDataUtils.buildFeature( schema, new Pair[] { Pair.of("geometry", factory.createPoint(new Coordinate(27.25, 41.25))), Pair.of("pop", Long.valueOf(100))}); final AdapterPersistenceEncoding persistenceEncoding = dataAdapter.asInternalAdapter((short) -1).encode(newFeature, indexMapping, spatialIndex); Geometry geom = null; for (final Entry pv : persistenceEncoding.getCommonData().getValues().entrySet()) { if (pv.getValue() instanceof Geometry) { geom = (Geometry) pv.getValue(); } } assertNotNull(geom); assertEquals(new Coordinate(-138.0, 44.0), geom.getCentroid().getCoordinate()); } @Test public void testSingleTime() { schema.getDescriptor("when").getUserData().clear(); schema.getDescriptor("whennot").getUserData().put("time", Boolean.TRUE); final FeatureDataAdapter dataAdapter = new FeatureDataAdapter(schema); final Index spatialIndex = SpatialTemporalDimensionalityTypeProvider.createIndexFromOptions( new SpatialTemporalOptions()); final AdapterToIndexMapping indexMapping = BaseDataStoreUtils.mapAdapterToIndex( dataAdapter.asInternalAdapter((short) -1), spatialIndex); final byte[] binary = dataAdapter.toBinary(); final FeatureDataAdapter dataAdapterCopy = new FeatureDataAdapter(); dataAdapterCopy.fromBinary(binary); assertEquals(dataAdapterCopy.getTypeName(), dataAdapter.getTypeName()); assertEquals(dataAdapterCopy.getFeatureType(), dataAdapter.getFeatureType()); assertEquals( Boolean.TRUE, dataAdapterCopy.getFeatureType().getDescriptor("whennot").getUserData().get("time")); assertEquals(2, indexMapping.getIndexFieldMappers().size()); assertNotNull(indexMapping.getMapperForIndexField(TimeField.DEFAULT_FIELD_ID)); assertEquals( 1, indexMapping.getMapperForIndexField(TimeField.DEFAULT_FIELD_ID).adapterFieldCount()); assertEquals( "whennot", indexMapping.getMapperForIndexField(TimeField.DEFAULT_FIELD_ID).getAdapterFields()[0]); assertNotNull(indexMapping.getMapperForIndexField(SpatialField.DEFAULT_GEOMETRY_FIELD_NAME)); assertEquals( 1, indexMapping.getMapperForIndexField( SpatialField.DEFAULT_GEOMETRY_FIELD_NAME).adapterFieldCount()); assertEquals( "geometry", indexMapping.getMapperForIndexField( SpatialField.DEFAULT_GEOMETRY_FIELD_NAME).getAdapterFields()[0]); } @Test public void testInferredTime() { schema.getDescriptor("when").getUserData().clear(); schema.getDescriptor("whennot").getUserData().clear(); final FeatureDataAdapter dataAdapter = new FeatureDataAdapter(schema); final Index spatialIndex = SpatialTemporalDimensionalityTypeProvider.createIndexFromOptions( new SpatialTemporalOptions()); final AdapterToIndexMapping indexMapping = BaseDataStoreUtils.mapAdapterToIndex( dataAdapter.asInternalAdapter((short) -1), spatialIndex); final byte[] binary = dataAdapter.toBinary(); final FeatureDataAdapter dataAdapterCopy = new FeatureDataAdapter(); dataAdapterCopy.fromBinary(binary); assertEquals(dataAdapterCopy.getTypeName(), dataAdapter.getTypeName()); assertEquals(dataAdapterCopy.getFeatureType(), dataAdapter.getFeatureType()); assertEquals( Boolean.TRUE, dataAdapterCopy.getFeatureType().getDescriptor("when").getUserData().get("time")); assertEquals(2, indexMapping.getIndexFieldMappers().size()); assertNotNull(indexMapping.getMapperForIndexField(TimeField.DEFAULT_FIELD_ID)); assertEquals( 1, indexMapping.getMapperForIndexField(TimeField.DEFAULT_FIELD_ID).adapterFieldCount()); assertEquals( "when", indexMapping.getMapperForIndexField(TimeField.DEFAULT_FIELD_ID).getAdapterFields()[0]); assertNotNull(indexMapping.getMapperForIndexField(SpatialField.DEFAULT_GEOMETRY_FIELD_NAME)); assertEquals( 1, indexMapping.getMapperForIndexField( SpatialField.DEFAULT_GEOMETRY_FIELD_NAME).adapterFieldCount()); assertEquals( "geometry", indexMapping.getMapperForIndexField( SpatialField.DEFAULT_GEOMETRY_FIELD_NAME).getAdapterFields()[0]); } @Test public void testRange() { schema.getDescriptor("when").getUserData().clear(); schema.getDescriptor("whennot").getUserData().clear(); schema.getDescriptor("when").getUserData().put("start", Boolean.TRUE); schema.getDescriptor("whennot").getUserData().put("end", Boolean.TRUE); final FeatureDataAdapter dataAdapter = new FeatureDataAdapter(schema); final Index spatialIndex = SpatialTemporalDimensionalityTypeProvider.createIndexFromOptions( new SpatialTemporalOptions()); final AdapterToIndexMapping indexMapping = BaseDataStoreUtils.mapAdapterToIndex( dataAdapter.asInternalAdapter((short) -1), spatialIndex); final byte[] binary = dataAdapter.toBinary(); final FeatureDataAdapter dataAdapterCopy = new FeatureDataAdapter(); dataAdapterCopy.fromBinary(binary); assertEquals(dataAdapterCopy.getTypeName(), dataAdapter.getTypeName()); assertEquals(dataAdapterCopy.getFeatureType(), dataAdapter.getFeatureType()); assertEquals( Boolean.TRUE, dataAdapterCopy.getFeatureType().getDescriptor("whennot").getUserData().get("end")); assertEquals( Boolean.TRUE, dataAdapterCopy.getFeatureType().getDescriptor("when").getUserData().get("start")); assertEquals(2, indexMapping.getIndexFieldMappers().size()); assertNotNull(indexMapping.getMapperForIndexField(TimeField.DEFAULT_FIELD_ID)); assertEquals( 2, indexMapping.getMapperForIndexField(TimeField.DEFAULT_FIELD_ID).adapterFieldCount()); assertEquals( "when", indexMapping.getMapperForIndexField(TimeField.DEFAULT_FIELD_ID).getAdapterFields()[0]); assertEquals( "whennot", indexMapping.getMapperForIndexField(TimeField.DEFAULT_FIELD_ID).getAdapterFields()[1]); assertNotNull(indexMapping.getMapperForIndexField(SpatialField.DEFAULT_GEOMETRY_FIELD_NAME)); assertEquals( 1, indexMapping.getMapperForIndexField( SpatialField.DEFAULT_GEOMETRY_FIELD_NAME).adapterFieldCount()); assertEquals( "geometry", indexMapping.getMapperForIndexField( SpatialField.DEFAULT_GEOMETRY_FIELD_NAME).getAdapterFields()[0]); } @Test public void testInferredRange() throws SchemaException { final SimpleFeatureType schema = DataUtilities.createType( "http://foo", "sp.geostuff", "geometry:Geometry:srid=4326,pop:java.lang.Long,start:Date,end:Date,pid:String"); final List descriptors = schema.getAttributeDescriptors(); final Object[] defaults = new Object[descriptors.size()]; int p = 0; for (final AttributeDescriptor descriptor : descriptors) { defaults[p++] = descriptor.getDefaultValue(); } final SimpleFeature newFeature = SimpleFeatureBuilder.build(schema, defaults, UUID.randomUUID().toString()); newFeature.setAttribute("pop", Long.valueOf(100)); newFeature.setAttribute("pid", UUID.randomUUID().toString()); newFeature.setAttribute("start", time1); newFeature.setAttribute("end", time2); newFeature.setAttribute("geometry", factory.createPoint(new Coordinate(27.25, 41.25))); final FeatureDataAdapter dataAdapter = new FeatureDataAdapter(schema); final Index spatialIndex = SpatialTemporalDimensionalityTypeProvider.createIndexFromOptions( new SpatialTemporalOptions()); final AdapterToIndexMapping indexMapping = BaseDataStoreUtils.mapAdapterToIndex( dataAdapter.asInternalAdapter((short) -1), spatialIndex); final byte[] binary = dataAdapter.toBinary(); final FeatureDataAdapter dataAdapterCopy = new FeatureDataAdapter(); dataAdapterCopy.fromBinary(binary); assertEquals("http://foo", dataAdapterCopy.getFeatureType().getName().getNamespaceURI()); assertEquals(dataAdapterCopy.getTypeName(), dataAdapter.getTypeName()); assertEquals(dataAdapterCopy.getFeatureType(), dataAdapter.getFeatureType()); assertEquals( Boolean.TRUE, dataAdapterCopy.getFeatureType().getDescriptor("end").getUserData().get("end")); assertEquals( Boolean.TRUE, dataAdapterCopy.getFeatureType().getDescriptor("start").getUserData().get("start")); assertEquals(2, indexMapping.getIndexFieldMappers().size()); assertNotNull(indexMapping.getMapperForIndexField(TimeField.DEFAULT_FIELD_ID)); assertEquals( 2, indexMapping.getMapperForIndexField(TimeField.DEFAULT_FIELD_ID).adapterFieldCount()); assertEquals( "start", indexMapping.getMapperForIndexField(TimeField.DEFAULT_FIELD_ID).getAdapterFields()[0]); assertEquals( "end", indexMapping.getMapperForIndexField(TimeField.DEFAULT_FIELD_ID).getAdapterFields()[1]); assertNotNull(indexMapping.getMapperForIndexField(SpatialField.DEFAULT_GEOMETRY_FIELD_NAME)); assertEquals( 1, indexMapping.getMapperForIndexField( SpatialField.DEFAULT_GEOMETRY_FIELD_NAME).adapterFieldCount()); assertEquals( "geometry", indexMapping.getMapperForIndexField( SpatialField.DEFAULT_GEOMETRY_FIELD_NAME).getAdapterFields()[0]); } @Test public void testCRSProjection() { final SimpleFeatureTypeBuilder typeBuilder = new SimpleFeatureTypeBuilder(); typeBuilder.setName("test"); typeBuilder.setCRS(GeometryUtils.getDefaultCRS()); // <- Coordinate // reference // add attributes in order typeBuilder.add("geom", Point.class); typeBuilder.add("name", String.class); typeBuilder.add("count", Long.class); // build the type final SimpleFeatureBuilder builder = new SimpleFeatureBuilder(typeBuilder.buildFeatureType()); final FeatureDataAdapter dataAdapter = new FeatureDataAdapter(builder.getFeatureType()); final Index spatialIndex = SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()); final byte[] binary = dataAdapter.toBinary(); final FeatureDataAdapter dataAdapterCopy = new FeatureDataAdapter(); dataAdapterCopy.fromBinary(binary); assertEquals( dataAdapterCopy.getFeatureType().getCoordinateReferenceSystem().getCoordinateSystem(), GeometryUtils.getDefaultCRS().getCoordinateSystem()); } } ================================================ FILE: extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/FeatureWritableTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector; import static org.junit.Assert.assertEquals; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import org.apache.commons.lang3.tuple.Pair; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.feature.simple.SimpleFeatureTypeBuilder; import org.junit.Test; import org.locationtech.geowave.adapter.vector.util.FeatureDataUtils; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.Point; import org.locationtech.jts.geom.PrecisionModel; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; public class FeatureWritableTest { GeometryFactory factory = new GeometryFactory(new PrecisionModel(PrecisionModel.FIXED)); @Test public void test() throws IOException { final SimpleFeatureTypeBuilder typeBuilder = new SimpleFeatureTypeBuilder(); typeBuilder.setName("test"); typeBuilder.setCRS(GeometryUtils.getDefaultCRS()); // <- Coordinate // reference // add attributes in order typeBuilder.add("geom", Point.class); typeBuilder.add("name", String.class); typeBuilder.add("count", Long.class); // build the type final SimpleFeatureBuilder builder = new SimpleFeatureBuilder(typeBuilder.buildFeatureType()); final SimpleFeatureType featureType = builder.getFeatureType(); @SuppressWarnings("unchecked") final SimpleFeature newFeature = FeatureDataUtils.buildFeature( featureType, new Pair[] { Pair.of("geom", factory.createPoint(new Coordinate(27.25, 41.25))), Pair.of("count", Long.valueOf(100))}); final FeatureWritable writable = new FeatureWritable(featureType, newFeature); final ByteArrayOutputStream bos = new ByteArrayOutputStream(); try (DataOutputStream dos = new DataOutputStream(bos)) { writable.write(dos); dos.flush(); } final ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray()); try (DataInputStream is = new DataInputStream(bis)) { writable.readFields(is); } assertEquals(newFeature.getDefaultGeometry(), writable.getFeature().getDefaultGeometry()); assertEquals( featureType.getCoordinateReferenceSystem().getCoordinateSystem(), writable.getFeature().getFeatureType().getCoordinateReferenceSystem().getCoordinateSystem()); } } ================================================ FILE: extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/index/ChooseBestMatchIndexQueryStrategyTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.index; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Random; import org.junit.Test; import org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder; import org.locationtech.geowave.core.geotime.index.api.SpatialTemporalIndexBuilder; import org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition; import org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition; import org.locationtech.geowave.core.geotime.index.dimension.TimeDefinition; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.ByteArrayRange; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.index.SinglePartitionInsertionIds; import org.locationtech.geowave.core.index.numeric.BasicNumericDataset; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.index.numeric.NumericValue; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.api.BinConstraints.ByteArrayConstraints; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.DataTypeStatistic; import org.locationtech.geowave.core.store.api.FieldStatistic; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.IndexStatistic; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.entities.GeoWaveKeyImpl; import org.locationtech.geowave.core.store.entities.GeoWaveRowImpl; import org.locationtech.geowave.core.store.entities.GeoWaveValue; import org.locationtech.geowave.core.store.index.NullIndex; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintData; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintSet; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintsByClass; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.locationtech.geowave.core.store.statistics.StatisticId; import org.locationtech.geowave.core.store.statistics.StatisticType; import org.locationtech.geowave.core.store.statistics.StatisticUpdateCallback; import org.locationtech.geowave.core.store.statistics.StatisticValueWriter; import org.locationtech.geowave.core.store.statistics.StatisticsIngestCallback; import org.locationtech.geowave.core.store.statistics.binning.CompositeBinningStrategy; import org.locationtech.geowave.core.store.statistics.binning.DataTypeBinningStrategy; import org.locationtech.geowave.core.store.statistics.binning.PartitionBinningStrategy; import org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic; import org.locationtech.geowave.core.store.statistics.index.RowRangeHistogramStatistic.RowRangeHistogramValue; import com.beust.jcommander.internal.Lists; import com.beust.jcommander.internal.Maps; public class ChooseBestMatchIndexQueryStrategyTest { final Index IMAGE_CHIP_INDEX1 = new NullIndex("IMAGERY_CHIPS1"); final Index IMAGE_CHIP_INDEX2 = new NullIndex("IMAGERY_CHIPS2"); private static long SEED = 12345; private static long ROWS = 1000000; @Test public void testChooseSpatialTemporalWithStats() { final Index temporalindex = new SpatialTemporalIndexBuilder().createIndex(); final Index spatialIndex = new SpatialIndexBuilder().createIndex(); final RowRangeHistogramStatistic rangeTempStats = new RowRangeHistogramStatistic(temporalindex.getName()); rangeTempStats.setBinningStrategy( new CompositeBinningStrategy( new DataTypeBinningStrategy(), new PartitionBinningStrategy())); rangeTempStats.setInternal(); final RowRangeHistogramStatistic rangeStats = new RowRangeHistogramStatistic(spatialIndex.getName()); rangeStats.setBinningStrategy( new CompositeBinningStrategy( new DataTypeBinningStrategy(), new PartitionBinningStrategy())); rangeStats.setInternal(); final Map, Map>> statsMap = new HashMap<>(); final ChooseBestMatchIndexQueryStrategy strategy = new ChooseBestMatchIndexQueryStrategy(); final ConstraintSet cs1 = new ConstraintSet(); cs1.addConstraint( LatitudeDefinition.class, new ConstraintData(new ConstrainedIndexValue(0.3, 0.5), true)); cs1.addConstraint( LongitudeDefinition.class, new ConstraintData(new ConstrainedIndexValue(0.4, 0.7), true)); final ConstraintSet cs2a = new ConstraintSet(); cs2a.addConstraint( TimeDefinition.class, new ConstraintData(new ConstrainedIndexValue(0.1, 0.2), true)); final ConstraintsByClass constraints = new ConstraintsByClass(Arrays.asList(cs2a)).merge(Collections.singletonList(cs1)); final BasicQueryByClass query = new BasicQueryByClass(constraints); final NumericIndexStrategy temporalIndexStrategy = new SpatialTemporalIndexBuilder().createIndex().getIndexStrategy(); final Random r = new Random(SEED); for (int i = 0; i < ROWS; i++) { final double x = r.nextDouble(); final double y = r.nextDouble(); final double t = r.nextDouble(); final InsertionIds id = temporalIndexStrategy.getInsertionIds( new BasicNumericDataset( new NumericData[] { new NumericValue(x), new NumericValue(y), new NumericValue(t)})); for (final SinglePartitionInsertionIds range : id.getPartitionKeys()) { Map> binValues = statsMap.get(rangeTempStats.getId()); if (binValues == null) { binValues = Maps.newHashMap(); statsMap.put(rangeTempStats.getId(), binValues); } final ByteArray bin = CompositeBinningStrategy.getBin( DataTypeBinningStrategy.getBin((String) null), PartitionBinningStrategy.getBin(range.getPartitionKey())); RowRangeHistogramValue value = (RowRangeHistogramValue) binValues.get(bin); if (value == null) { value = rangeTempStats.createEmpty(); value.setBin(bin); binValues.put(bin, value); } ((StatisticsIngestCallback) value).entryIngested( null, null, new GeoWaveRowImpl( new GeoWaveKeyImpl( new byte[] {1}, (short) 1, range.getPartitionKey(), range.getSortKeys().get(0), 0), new GeoWaveValue[] {})); } } final Index index = new SpatialIndexBuilder().createIndex(); final NumericIndexStrategy indexStrategy = index.getIndexStrategy(); for (int i = 0; i < ROWS; i++) { final double x = r.nextDouble(); final double y = r.nextDouble(); final double t = r.nextDouble(); final InsertionIds id = indexStrategy.getInsertionIds( new BasicNumericDataset( new NumericData[] { new NumericValue(x), new NumericValue(y), new NumericValue(t)})); for (final SinglePartitionInsertionIds range : id.getPartitionKeys()) { Map> binValues = statsMap.get(rangeStats.getId()); if (binValues == null) { binValues = Maps.newHashMap(); statsMap.put(rangeStats.getId(), binValues); } final ByteArray bin = CompositeBinningStrategy.getBin( DataTypeBinningStrategy.getBin((String) null), PartitionBinningStrategy.getBin(range.getPartitionKey())); RowRangeHistogramValue value = (RowRangeHistogramValue) binValues.get(bin); if (value == null) { value = rangeStats.createEmpty(); value.setBin(bin); binValues.put(bin, value); } ((StatisticsIngestCallback) value).entryIngested( null, null, new GeoWaveRowImpl( new GeoWaveKeyImpl( new byte[] {1}, (short) 1, range.getPartitionKey(), range.getSortKeys().get(0), 0), new GeoWaveValue[] {})); } } final Iterator it = getIndices( new TestDataStatisticsStore(Lists.newArrayList(rangeStats, rangeTempStats), statsMap), query, strategy); assertTrue(it.hasNext()); assertEquals(temporalindex.getName(), it.next().getName()); assertFalse(it.hasNext()); } public Iterator getIndices( final DataStatisticsStore statisticsStore, final BasicQueryByClass query, final ChooseBestMatchIndexQueryStrategy strategy) { return strategy.getIndices( statisticsStore, null, query, new Index[] { IMAGE_CHIP_INDEX1, new SpatialTemporalIndexBuilder().createIndex(), new SpatialIndexBuilder().createIndex(), IMAGE_CHIP_INDEX2}, null, Maps.newHashMap()); } public static class ConstrainedIndexValue extends NumericRange { /** */ private static final long serialVersionUID = 1L; public ConstrainedIndexValue(final double min, final double max) { super(min, max); // } } public static class TestDataStatisticsStore implements DataStatisticsStore { private final List> statistics; private final Map, Map>> statisticValues; public TestDataStatisticsStore( final List> statistics, final Map, Map>> statisticValues) { this.statistics = statistics; this.statisticValues = statisticValues; } @Override public boolean exists(final Statistic> statistic) { throw new UnsupportedOperationException(); } @Override public void addStatistic(final Statistic> statistic) { throw new UnsupportedOperationException(); } @Override public boolean removeStatistic(final Statistic> statistic) { throw new UnsupportedOperationException(); } @Override public boolean removeStatistics( final Iterator>> statistics) { throw new UnsupportedOperationException(); } @Override public boolean removeStatistics(final Index index) { throw new UnsupportedOperationException(); } @Override public boolean removeStatistics(final DataTypeAdapter type, final Index... indices) { throw new UnsupportedOperationException(); } @SuppressWarnings({"unchecked", "rawtypes"}) @Override public CloseableIterator>> getIndexStatistics( final Index index, final StatisticType> statisticType, final String name) { return new CloseableIterator.Wrapper( statistics.stream().filter( stat -> (stat instanceof IndexStatistic) && ((IndexStatistic) stat).getIndexName().equals(index.getName()) && ((statisticType == null) || statisticType.equals(stat.getStatisticType())) && ((name == null) || name.equals(stat.getTag()))).iterator()); } @Override public CloseableIterator>> getDataTypeStatistics( final DataTypeAdapter type, final StatisticType> statisticType, final String name) { throw new UnsupportedOperationException(); } @Override public CloseableIterator>> getFieldStatistics( final DataTypeAdapter type, final StatisticType> statisticType, final String fieldName, final String name) { throw new UnsupportedOperationException(); } @SuppressWarnings("unchecked") @Override public , R> Statistic getStatisticById( final StatisticId statisticId) { return (Statistic) statistics.stream().filter( s -> s.getId().equals(statisticId)).findFirst().orElse(null); } @Override public CloseableIterator>> getAllStatistics( final StatisticType> statisticType) { return new CloseableIterator.Wrapper<>( statistics.stream().filter( stat -> stat.getStatisticType().equals(statisticType)).iterator()); } @Override public CloseableIterator> getStatisticValues( final Iterator>> statistics, final ByteArrayConstraints bins, final String... authorizations) { throw new UnsupportedOperationException(); } @SuppressWarnings("unchecked") @Override public , R> V getStatisticValue( final Statistic statistic, final ByteArray bin, final String... authorizations) { final Map> values = statisticValues.get(statistic.getId()); if (values != null) { return (V) values.get(bin); } return null; } @Override public , R> CloseableIterator getStatisticValues( final Statistic statistic, final ByteArray binPrefix, final String... authorizations) { throw new UnsupportedOperationException(); } @SuppressWarnings("unchecked") @Override public , R> CloseableIterator getStatisticValues( final Statistic statistic, final String... authorizations) { final Map> values = statisticValues.get(statistic.getId()); if (values != null) { return new CloseableIterator.Wrapper<>((Iterator) values.values().iterator()); } return new CloseableIterator.Empty<>(); } @Override public , R> V getStatisticValue( final Statistic statistic, final String... authorizations) { throw new UnsupportedOperationException(); } @Override public void removeAll() { throw new UnsupportedOperationException(); } @Override public , R> void setStatisticValue( final Statistic statistic, final V value) { throw new UnsupportedOperationException(); } @Override public , R> void setStatisticValue( final Statistic statistic, final V value, final ByteArray bin) { throw new UnsupportedOperationException(); } @Override public , R> void incorporateStatisticValue( final Statistic statistic, final V value) { throw new UnsupportedOperationException(); } @Override public , R> void incorporateStatisticValue( final Statistic statistic, final V value, final ByteArray bin) { throw new UnsupportedOperationException(); } @Override public boolean removeStatisticValue(final Statistic> statistic) { throw new UnsupportedOperationException(); } @Override public boolean removeStatisticValue( final Statistic> statistic, final ByteArray bin) { throw new UnsupportedOperationException(); } @Override public boolean removeStatisticValues(final Statistic> statistic) { throw new UnsupportedOperationException(); } @Override public boolean removeTypeSpecificStatisticValues( final IndexStatistic> statistic, final String typeName) { throw new UnsupportedOperationException(); } @Override public , R> StatisticValueWriter createStatisticValueWriter( final Statistic statistic) { throw new UnsupportedOperationException(); } @Override public StatisticUpdateCallback createUpdateCallback( final Index index, final AdapterToIndexMapping indexMapping, final InternalDataAdapter adapter, final boolean updateAdapterStats) { throw new UnsupportedOperationException(); } @Override public boolean mergeStats() { return false; } @Override public , R> CloseableIterator getStatisticValues( final Statistic statistic, final ByteArrayRange[] ranges, final String... authorizations) { throw new UnsupportedOperationException(); } } } ================================================ FILE: extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/index/ChooseHeuristicMatchQueryStrategyTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.index; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.List; import org.junit.Test; import org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder; import org.locationtech.geowave.core.geotime.index.api.SpatialTemporalIndexBuilder; import org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition; import org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition; import org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.geotime.index.dimension.TimeDefinition; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.index.NullIndex; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintData; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintSet; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintsByClass; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import com.beust.jcommander.internal.Maps; public class ChooseHeuristicMatchQueryStrategyTest { private static final double HOUR = 3600000; private static final double DAY = HOUR * 24; private static final double WEEK = DAY * 7; private static final double HOUSE = 0.005; private static final double BLOCK = 0.07; private static final double CITY = 1.25; final Index IMAGE_CHIP_INDEX1 = new NullIndex("IMAGERY_CHIPS1"); final Index IMAGE_CHIP_INDEX2 = new NullIndex("IMAGERY_CHIPS2"); protected final List indices = Arrays.asList( IMAGE_CHIP_INDEX1, new SpatialTemporalIndexBuilder().setNumPartitions(5).setBias( SpatialTemporalDimensionalityTypeProvider.Bias.BALANCED).setPeriodicity( Unit.YEAR).createIndex(), new SpatialTemporalIndexBuilder().setNumPartitions(10).setBias( SpatialTemporalDimensionalityTypeProvider.Bias.BALANCED).setPeriodicity( Unit.DAY).createIndex(), new SpatialIndexBuilder().createIndex(), IMAGE_CHIP_INDEX2); @Test public void testChooseTemporalWithoutStatsHouseHour() { final ChooseHeuristicMatchIndexQueryStrategy strategy = new ChooseHeuristicMatchIndexQueryStrategy(); final Iterator it = getIndices(null, new BasicQueryByClass(createConstraints(HOUSE, HOUSE, HOUR)), strategy); assertTrue(it.hasNext()); assertEquals(indices.get(1).getName(), it.next().getName()); assertFalse(it.hasNext()); } @Test public void testChooseSpatialWithoutStatsHouseDay() { final ChooseHeuristicMatchIndexQueryStrategy strategy = new ChooseHeuristicMatchIndexQueryStrategy(); final Iterator it = getIndices(null, new BasicQueryByClass(createConstraints(HOUSE, HOUSE, DAY)), strategy); assertTrue(it.hasNext()); assertEquals(indices.get(1).getName(), it.next().getName()); assertFalse(it.hasNext()); } @Test public void testChooseSpatialWithoutStatsHouseWeek() { final ChooseHeuristicMatchIndexQueryStrategy strategy = new ChooseHeuristicMatchIndexQueryStrategy(); final Iterator it = getIndices(null, new BasicQueryByClass(createConstraints(HOUSE, HOUSE, WEEK)), strategy); assertTrue(it.hasNext()); assertEquals(indices.get(1).getName(), it.next().getName()); assertFalse(it.hasNext()); } @Test public void testChooseTemporalWithoutStatsBlockHour() { final ChooseHeuristicMatchIndexQueryStrategy strategy = new ChooseHeuristicMatchIndexQueryStrategy(); final Iterator it = getIndices(null, new BasicQueryByClass(createConstraints(BLOCK, BLOCK, HOUR)), strategy); assertTrue(it.hasNext()); assertEquals(indices.get(1).getName(), it.next().getName()); assertFalse(it.hasNext()); } @Test public void testChooseSpatialWithoutStatsBlockDay() { final ChooseHeuristicMatchIndexQueryStrategy strategy = new ChooseHeuristicMatchIndexQueryStrategy(); final Iterator it = getIndices(null, new BasicQueryByClass(createConstraints(BLOCK, BLOCK, DAY)), strategy); assertTrue(it.hasNext()); assertEquals(indices.get(1).getName(), it.next().getName()); assertFalse(it.hasNext()); } @Test public void testChooseSpatialWithoutStatsBlockWeek() { final ChooseHeuristicMatchIndexQueryStrategy strategy = new ChooseHeuristicMatchIndexQueryStrategy(); final Iterator it = getIndices(null, new BasicQueryByClass(createConstraints(BLOCK, BLOCK, WEEK)), strategy); assertTrue(it.hasNext()); assertEquals(indices.get(1).getName(), it.next().getName()); assertFalse(it.hasNext()); } @Test public void testChooseTemporalWithoutStatsCityHour() { final ChooseHeuristicMatchIndexQueryStrategy strategy = new ChooseHeuristicMatchIndexQueryStrategy(); final Iterator it = getIndices(null, new BasicQueryByClass(createConstraints(CITY, CITY, HOUR)), strategy); assertTrue(it.hasNext()); assertEquals(indices.get(1).getName(), it.next().getName()); assertFalse(it.hasNext()); } @Test public void testChooseTemporalWithoutStatsCityDay() { final ChooseHeuristicMatchIndexQueryStrategy strategy = new ChooseHeuristicMatchIndexQueryStrategy(); final Iterator it = getIndices(null, new BasicQueryByClass(createConstraints(CITY, CITY, DAY)), strategy); assertTrue(it.hasNext()); assertEquals(indices.get(1).getName(), it.next().getName()); assertFalse(it.hasNext()); } @Test public void testChooseSpatialWithoutStatsCityWeek() { final ChooseHeuristicMatchIndexQueryStrategy strategy = new ChooseHeuristicMatchIndexQueryStrategy(); final Iterator it = getIndices(null, new BasicQueryByClass(createConstraints(CITY, CITY, WEEK)), strategy); assertTrue(it.hasNext()); assertEquals(indices.get(1).getName(), it.next().getName()); assertFalse(it.hasNext()); } public Iterator getIndices( final DataStatisticsStore statsStore, final BasicQueryByClass query, final ChooseHeuristicMatchIndexQueryStrategy strategy) { return strategy.getIndices( statsStore, null, query, indices.toArray(new Index[indices.size()]), null, Maps.newHashMap()); } public static class ConstrainedIndexValue extends NumericRange { /** */ private static final long serialVersionUID = 1L; public ConstrainedIndexValue(final double min, final double max) { super(min, max); // } } private ConstraintsByClass createConstraints( final double lat, final double lon, final double time) { final ConstraintSet cs1 = new ConstraintSet(); cs1.addConstraint( LatitudeDefinition.class, new ConstraintData(new ConstrainedIndexValue(0, lat), true)); cs1.addConstraint( LongitudeDefinition.class, new ConstraintData(new ConstrainedIndexValue(0, lon), true)); final ConstraintSet cs2a = new ConstraintSet(); cs2a.addConstraint( TimeDefinition.class, new ConstraintData(new ConstrainedIndexValue(0, time), true)); return new ConstraintsByClass(Arrays.asList(cs2a)).merge(Collections.singletonList(cs1)); } } ================================================ FILE: extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/index/ChooseLocalityPreservingQueryStrategyTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.index; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.List; import org.junit.Test; import org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder; import org.locationtech.geowave.core.geotime.index.api.SpatialTemporalIndexBuilder; import org.locationtech.geowave.core.geotime.index.dimension.LatitudeDefinition; import org.locationtech.geowave.core.geotime.index.dimension.LongitudeDefinition; import org.locationtech.geowave.core.geotime.index.dimension.TemporalBinningStrategy.Unit; import org.locationtech.geowave.core.index.numeric.NumericRange; import org.locationtech.geowave.core.geotime.index.dimension.TimeDefinition; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.index.NullIndex; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintData; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintSet; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass.ConstraintsByClass; import com.beust.jcommander.internal.Maps; public class ChooseLocalityPreservingQueryStrategyTest { private static final double HOUR = 3600000; private static final double DAY = HOUR * 24; private static final double WEEK = DAY * 7; private static final double HOUSE = 0.005; private static final double BLOCK = 0.07; private static final double CITY = 1.25; final Index IMAGE_CHIP_INDEX1 = new NullIndex("IMAGERY_CHIPS1"); final Index IMAGE_CHIP_INDEX2 = new NullIndex("IMAGERY_CHIPS2"); protected final List indices = Arrays.asList( IMAGE_CHIP_INDEX1, new SpatialTemporalIndexBuilder().setNumPartitions(5).setBias( SpatialTemporalDimensionalityTypeProvider.Bias.BALANCED).setPeriodicity( Unit.YEAR).createIndex(), new SpatialTemporalIndexBuilder().setNumPartitions(10).setBias( SpatialTemporalDimensionalityTypeProvider.Bias.BALANCED).setPeriodicity( Unit.DAY).createIndex(), new SpatialIndexBuilder().createIndex(), IMAGE_CHIP_INDEX2); @Test public void testChooseTemporalWithoutStatsHouseHour() { final ChooseLocalityPreservingQueryStrategy strategy = new ChooseLocalityPreservingQueryStrategy(); final Iterator it = getIndices(new BasicQueryByClass(createConstraints(HOUSE, HOUSE, HOUR)), strategy); assertTrue(it.hasNext()); assertEquals(indices.get(1).getName(), it.next().getName()); assertFalse(it.hasNext()); } @Test public void testChooseSpatialWithoutStatsHouseDay() { final ChooseLocalityPreservingQueryStrategy strategy = new ChooseLocalityPreservingQueryStrategy(); final Iterator it = getIndices(new BasicQueryByClass(createConstraints(HOUSE, HOUSE, DAY)), strategy); assertTrue(it.hasNext()); assertEquals(indices.get(3).getName(), it.next().getName()); assertFalse(it.hasNext()); } @Test public void testChooseSpatialWithoutStatsHouseWeek() { final ChooseLocalityPreservingQueryStrategy strategy = new ChooseLocalityPreservingQueryStrategy(); final Iterator it = getIndices(new BasicQueryByClass(createConstraints(HOUSE, HOUSE, WEEK)), strategy); assertTrue(it.hasNext()); assertEquals(indices.get(3).getName(), it.next().getName()); assertFalse(it.hasNext()); } @Test public void testChooseTemporalWithoutStatsBlockHour() { final ChooseLocalityPreservingQueryStrategy strategy = new ChooseLocalityPreservingQueryStrategy(); final Iterator it = getIndices(new BasicQueryByClass(createConstraints(BLOCK, BLOCK, HOUR)), strategy); assertTrue(it.hasNext()); assertEquals(indices.get(1).getName(), it.next().getName()); assertFalse(it.hasNext()); } @Test public void testChooseTemporalWithoutStatsBlockDay() { final ChooseLocalityPreservingQueryStrategy strategy = new ChooseLocalityPreservingQueryStrategy(); final Iterator it = getIndices(new BasicQueryByClass(createConstraints(BLOCK, BLOCK, DAY)), strategy); assertTrue(it.hasNext()); assertEquals(indices.get(1).getName(), it.next().getName()); assertFalse(it.hasNext()); } @Test public void testChooseSpatialWithoutStatsBlockWeek() { final ChooseLocalityPreservingQueryStrategy strategy = new ChooseLocalityPreservingQueryStrategy(); final Iterator it = getIndices(new BasicQueryByClass(createConstraints(BLOCK, BLOCK, WEEK)), strategy); assertTrue(it.hasNext()); assertEquals(indices.get(3).getName(), it.next().getName()); assertFalse(it.hasNext()); } @Test public void testChooseTemporalWithoutStatsCityHour() { final ChooseLocalityPreservingQueryStrategy strategy = new ChooseLocalityPreservingQueryStrategy(); final Iterator it = getIndices(new BasicQueryByClass(createConstraints(CITY, CITY, HOUR)), strategy); assertTrue(it.hasNext()); assertEquals(indices.get(1).getName(), it.next().getName()); assertFalse(it.hasNext()); } @Test public void testChooseTemporalWithoutStatsCityDay() { final ChooseLocalityPreservingQueryStrategy strategy = new ChooseLocalityPreservingQueryStrategy(); final Iterator it = getIndices(new BasicQueryByClass(createConstraints(CITY, CITY, DAY)), strategy); assertTrue(it.hasNext()); assertEquals(indices.get(1).getName(), it.next().getName()); assertFalse(it.hasNext()); } @Test public void testChooseTemporalWithoutStatsCityWeek() { final ChooseLocalityPreservingQueryStrategy strategy = new ChooseLocalityPreservingQueryStrategy(); final Iterator it = getIndices(new BasicQueryByClass(createConstraints(CITY, CITY, WEEK)), strategy); assertTrue(it.hasNext()); assertEquals(indices.get(1).getName(), it.next().getName()); assertFalse(it.hasNext()); } public Iterator getIndices( final BasicQueryByClass query, final ChooseLocalityPreservingQueryStrategy strategy) { return strategy.getIndices( null, null, query, indices.toArray(new Index[indices.size()]), null, Maps.newHashMap()); } public static class ConstrainedIndexValue extends NumericRange { /** */ private static final long serialVersionUID = 1L; public ConstrainedIndexValue(final double min, final double max) { super(min, max); // } } private ConstraintsByClass createConstraints( final double lat, final double lon, final double time) { final ConstraintSet cs1 = new ConstraintSet(); cs1.addConstraint( LatitudeDefinition.class, new ConstraintData(new ConstrainedIndexValue(0, lat), true)); cs1.addConstraint( LongitudeDefinition.class, new ConstraintData(new ConstrainedIndexValue(0, lon), true)); final ConstraintSet cs2a = new ConstraintSet(); cs2a.addConstraint( TimeDefinition.class, new ConstraintData(new ConstrainedIndexValue(0, time), true)); return new ConstraintsByClass(Arrays.asList(cs2a)).merge(Collections.singletonList(cs1)); } } ================================================ FILE: extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/plugin/ExtractGeometryFilterVisitorTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.text.ParseException; import org.geotools.data.Query; import org.geotools.filter.text.cql2.CQL; import org.geotools.filter.text.cql2.CQLException; import org.geotools.geometry.jts.JTS; import org.junit.Test; import org.locationtech.geowave.core.geotime.store.query.filter.SpatialQueryFilter.CompareOperation; import org.locationtech.geowave.core.geotime.util.ExtractGeometryFilterVisitor; import org.locationtech.geowave.core.geotime.util.ExtractGeometryFilterVisitorResult; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; import org.opengis.filter.Filter; import org.opengis.referencing.operation.TransformException; public class ExtractGeometryFilterVisitorTest { final String geomAttributeName = "geom"; final ExtractGeometryFilterVisitor visitorWithDescriptor = new ExtractGeometryFilterVisitor(GeometryUtils.getDefaultCRS(), geomAttributeName); @Test public void testDWithin() throws CQLException, TransformException, ParseException { final Filter filter = CQL.toFilter( String.format( "DWITHIN(%s, POINT(-122.7668 0.4979), 233.7, meters)", geomAttributeName)); final Query query = new Query("type", filter); final ExtractGeometryFilterVisitorResult result = (ExtractGeometryFilterVisitorResult) query.getFilter().accept(visitorWithDescriptor, null); final Geometry geometry = result.getGeometry(); assertNotNull(geometry); for (final Coordinate coord : geometry.getCoordinates()) { assertEquals( 233.7, JTS.orthodromicDistance( coord, new Coordinate(-122.7668, 0.4979), GeometryUtils.getDefaultCRS()), 2); } } @Test public void testDWithinDateLine() throws CQLException, TransformException, ParseException { final Filter filter = CQL.toFilter( String.format( "DWITHIN(%s, POINT(179.9998 0.79), 13.7, kilometers)", geomAttributeName)); final Query query = new Query("type", filter); final ExtractGeometryFilterVisitorResult result = (ExtractGeometryFilterVisitorResult) query.getFilter().accept(visitorWithDescriptor, null); final Geometry geometry = result.getGeometry(); assertNotNull(geometry); for (final Coordinate coord : geometry.getCoordinates()) { assertEquals( 13707.1, JTS.orthodromicDistance( coord, new Coordinate(179.9999, 0.79), GeometryUtils.getDefaultCRS()), 2000); } } @Test public void testBBOX() throws CQLException, TransformException, ParseException { final Filter filter = CQL.toFilter(String.format("BBOX(%s, 0, 0, 10, 25)", geomAttributeName)); final Query query = new Query("type", filter); final ExtractGeometryFilterVisitorResult result = (ExtractGeometryFilterVisitorResult) query.getFilter().accept(visitorWithDescriptor, null); final Envelope bounds = new Envelope(0, 10, 0, 25); final Geometry bbox = new GeometryFactory().toGeometry(bounds); assertTrue(bbox.equalsTopo(result.getGeometry())); assertTrue(result.getCompareOp() == CompareOperation.INTERSECTS); } @Test public void testIntersects() throws CQLException, TransformException, ParseException { final Filter filter = CQL.toFilter( String.format( "INTERSECTS(%s, POLYGON((0 0, 0 25, 10 25, 10 0, 0 0)))", geomAttributeName)); final Query query = new Query("type", filter); final ExtractGeometryFilterVisitorResult result = (ExtractGeometryFilterVisitorResult) query.getFilter().accept(visitorWithDescriptor, null); final Envelope bounds = new Envelope(0, 10, 0, 25); final Geometry bbox = new GeometryFactory().toGeometry(bounds); assertTrue(bbox.equalsTopo(result.getGeometry())); assertTrue(result.getCompareOp() == CompareOperation.INTERSECTS); } @Test public void testOverlaps() throws CQLException, TransformException, ParseException { final Filter filter = CQL.toFilter( String.format( "OVERLAPS(%s, POLYGON((0 0, 0 25, 10 25, 10 0, 0 0)))", geomAttributeName)); final Query query = new Query("type", filter); final ExtractGeometryFilterVisitorResult result = (ExtractGeometryFilterVisitorResult) query.getFilter().accept(visitorWithDescriptor, null); final Envelope bounds = new Envelope(0, 10, 0, 25); final Geometry bbox = new GeometryFactory().toGeometry(bounds); assertTrue(bbox.equalsTopo(result.getGeometry())); assertTrue(result.getCompareOp() == CompareOperation.OVERLAPS); } @Test public void testEquals() throws CQLException, TransformException, ParseException { final Filter filter = CQL.toFilter( String.format( "EQUALS(geom, POLYGON((0 0, 0 25, 10 25, 10 0, 0 0)))", geomAttributeName)); final Query query = new Query("type", filter); final ExtractGeometryFilterVisitorResult result = (ExtractGeometryFilterVisitorResult) query.getFilter().accept(visitorWithDescriptor, null); final Envelope bounds = new Envelope(0, 10, 0, 25); final Geometry bbox = new GeometryFactory().toGeometry(bounds); assertTrue(bbox.equalsTopo(result.getGeometry())); assertTrue(result.getCompareOp() == CompareOperation.EQUALS); } @Test public void testCrosses() throws CQLException, TransformException, ParseException { final Filter filter = CQL.toFilter( String.format( "CROSSES(%s, POLYGON((0 0, 0 25, 10 25, 10 0, 0 0)))", geomAttributeName)); final Query query = new Query("type", filter); final ExtractGeometryFilterVisitorResult result = (ExtractGeometryFilterVisitorResult) query.getFilter().accept(visitorWithDescriptor, null); final Envelope bounds = new Envelope(0, 10, 0, 25); final Geometry bbox = new GeometryFactory().toGeometry(bounds); assertTrue(bbox.equalsTopo(result.getGeometry())); assertTrue(result.getCompareOp() == CompareOperation.CROSSES); } @Test public void testTouches() throws CQLException, TransformException, ParseException { final Filter filter = CQL.toFilter( String.format( "TOUCHES(%s, POLYGON((0 0, 0 25, 10 25, 10 0, 0 0)))", geomAttributeName)); final Query query = new Query("type", filter); final ExtractGeometryFilterVisitorResult result = (ExtractGeometryFilterVisitorResult) query.getFilter().accept(visitorWithDescriptor, null); final Envelope bounds = new Envelope(0, 10, 0, 25); final Geometry bbox = new GeometryFactory().toGeometry(bounds); assertTrue(bbox.equalsTopo(result.getGeometry())); assertTrue(result.getCompareOp() == CompareOperation.TOUCHES); } @Test public void testWithin() throws CQLException, TransformException, ParseException { final Filter filter = CQL.toFilter( String.format("WITHIN(%s, POLYGON((0 0, 0 25, 10 25, 10 0, 0 0)))", geomAttributeName)); final Query query = new Query("type", filter); final ExtractGeometryFilterVisitorResult result = (ExtractGeometryFilterVisitorResult) query.getFilter().accept(visitorWithDescriptor, null); final Envelope bounds = new Envelope(0, 10, 0, 25); final Geometry bbox = new GeometryFactory().toGeometry(bounds); assertTrue(bbox.equalsTopo(result.getGeometry())); assertTrue(result.getCompareOp() == CompareOperation.CONTAINS); } @Test public void testContains() throws CQLException, TransformException, ParseException { final Filter filter = CQL.toFilter( String.format( "CONTAINS(geom, POLYGON((0 0, 0 25, 10 25, 10 0, 0 0)))", geomAttributeName)); final Query query = new Query("type", filter); final ExtractGeometryFilterVisitorResult result = (ExtractGeometryFilterVisitorResult) query.getFilter().accept(visitorWithDescriptor, null); final Envelope bounds = new Envelope(0, 10, 0, 25); final Geometry bbox = new GeometryFactory().toGeometry(bounds); assertTrue(bbox.equalsTopo(result.getGeometry())); assertTrue(result.getCompareOp() == CompareOperation.WITHIN); } @Test public void testDisjoint() throws CQLException, TransformException, ParseException { final Filter filter = CQL.toFilter( String.format( "DISJOINT(%s, POLYGON((0 0, 0 25, 10 25, 10 0, 0 0)))", geomAttributeName)); final Query query = new Query("type", filter); final ExtractGeometryFilterVisitorResult result = (ExtractGeometryFilterVisitorResult) query.getFilter().accept(visitorWithDescriptor, null); // for non-inclusive filters we can't extract query geometry and // predicate // assertTrue(Double.isNaN(result.getGeometry().getArea())); assertTrue(result.getCompareOp() == null); } @Test public void testIntesectAndBBox() throws CQLException, TransformException, ParseException { // BBOX geometry is completely contained within Intersects geometry // we are testing to see if we are able to combine simple geometric // relations with similar predicates // into a single query geometry/predicate final Filter filter = CQL.toFilter( String.format( "INTERSECTS(%s, POLYGON((0 0, 0 50, 20 50, 20 0, 0 0))) AND BBOX(%s, 0, 0, 10, 25)", geomAttributeName, geomAttributeName)); final Query query = new Query("type", filter); final ExtractGeometryFilterVisitorResult result = (ExtractGeometryFilterVisitorResult) query.getFilter().accept(visitorWithDescriptor, null); final Envelope bounds = new Envelope(0, 10, 0, 25); final Geometry bbox = new GeometryFactory().toGeometry(bounds); assertTrue(bbox.equalsTopo(result.getGeometry())); assertTrue(result.getCompareOp() == CompareOperation.INTERSECTS); } @Test public void testIntesectAndCrosses() throws CQLException, TransformException, ParseException { // CROSSES geometry is completely contained within INTERSECT geometry // we are testing to see if we are able to combine dissimilar geometric // relations correctly // to extract query geometry. Note, we can't combine two different // predicates into one but // we can combine geometries for the purpose of deriving linear // constraints final Filter filter = CQL.toFilter( String.format( "INTERSECTS(%s, POLYGON((0 0, 0 50, 20 50, 20 0, 0 0))) AND CROSSES(%s, POLYGON((0 0, 0 25, 10 25, 10 0, 0 0)))", geomAttributeName, geomAttributeName)); final Query query = new Query("type", filter); final ExtractGeometryFilterVisitorResult result = (ExtractGeometryFilterVisitorResult) query.getFilter().accept(visitorWithDescriptor, null); final Envelope bounds = new Envelope(0, 10, 0, 25); final Geometry bbox = new GeometryFactory().toGeometry(bounds); assertTrue(bbox.equalsTopo(result.getGeometry())); assertTrue(result.getCompareOp() == null); } @Test public void testOverlapsOrCrosses() throws CQLException, TransformException, ParseException { // TOUCHES geometry is completely contained within OVERLAPS geometry // we are testing to see if we are able to combine dissimilar geometric // relations correctly // to extract query geometry. Note, we can't combine two different // predicates into one but // we can combine geometries for the purpose of deriving linear // constraints final Filter filter = CQL.toFilter( String.format( "OVERLAPS(%s, POLYGON((0 0, 0 50, 20 50, 20 0, 0 0))) OR TOUCHES(%s, POLYGON((0 0, 0 25, 10 25, 10 0, 0 0)))", geomAttributeName, geomAttributeName)); final Query query = new Query("type", filter); final ExtractGeometryFilterVisitorResult result = (ExtractGeometryFilterVisitorResult) query.getFilter().accept(visitorWithDescriptor, null); final Envelope bounds = new Envelope(0, 20, 0, 50); final Geometry bbox = new GeometryFactory().toGeometry(bounds); assertTrue(bbox.equalsTopo(result.getGeometry())); assertTrue(result.getCompareOp() == null); } @Test public void testIntesectAndCrossesAndLike() throws CQLException, TransformException, ParseException { // we are testing to see if we are able to combine dissimilar geometric // relations correctly // to extract query geometry. Note, that returned predicate is null // since we can't represent // CQL expression fully into single query geometry and predicate final Filter filter = CQL.toFilter( String.format( "CROSSES(%s, POLYGON((0 0, 0 25, 10 25, 10 0, 0 0))) AND location == 'abc'", geomAttributeName)); final Query query = new Query("type", filter); final ExtractGeometryFilterVisitorResult result = (ExtractGeometryFilterVisitorResult) query.getFilter().accept(visitorWithDescriptor, null); final Envelope bounds = new Envelope(0, 10, 0, 25); final Geometry bbox = new GeometryFactory().toGeometry(bounds); assertTrue(bbox.equalsTopo(result.getGeometry())); assertTrue(result.getCompareOp() == null); } @Test public void testWithMultipleAttributes() throws CQLException, TransformException, ParseException { // In this test query, we have constrains over multiple geometric // attributes. // The ExtractGeometryFilterVisitor class should only extracts // geometric constrains associated with the specified attribute name and // ignore others. final Filter filter = CQL.toFilter( String.format( "INTERSECTS(%s, POLYGON((0 0, 0 25, 10 25, 10 0, 0 0))) AND INTERSECTS(geomOtherAttr, POLYGON((0 0, 0 5, 5 5, 5 0, 0 0)))", geomAttributeName)); final Query query = new Query("type", filter); final ExtractGeometryFilterVisitorResult result = (ExtractGeometryFilterVisitorResult) query.getFilter().accept(visitorWithDescriptor, null); final Envelope bounds = new Envelope(0, 10, 0, 25); final Geometry bbox = new GeometryFactory().toGeometry(bounds); assertTrue(bbox.equalsTopo(result.getGeometry())); assertTrue(result.getCompareOp() == null); } } ================================================ FILE: extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/plugin/ExtractTimeFilterVisitorTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.text.ParseException; import java.util.Date; import org.geotools.data.Query; import org.geotools.filter.FilterFactoryImpl; import org.geotools.filter.text.cql2.CQL; import org.geotools.filter.text.cql2.CQLException; import org.geotools.filter.text.ecql.ECQL; import org.junit.Before; import org.junit.Test; import org.locationtech.geowave.adapter.vector.util.DateUtilities; import org.locationtech.geowave.core.geotime.store.query.TemporalConstraints; import org.locationtech.geowave.core.geotime.store.query.TemporalConstraintsSet; import org.locationtech.geowave.core.geotime.store.query.TemporalRange; import org.locationtech.geowave.core.geotime.util.ExtractTimeFilterVisitor; import org.opengis.filter.Filter; import org.opengis.filter.FilterFactory; public class ExtractTimeFilterVisitorTest { final ExtractTimeFilterVisitor visitorWithDescriptor = new ExtractTimeFilterVisitor(); final ExtractTimeFilterVisitor visitorWithDescriptorForRange = new ExtractTimeFilterVisitor(); @Before public void setup() { visitorWithDescriptorForRange.addRangeVariables("start", "end"); } @Test public void testAfter() throws CQLException, ParseException { final ExtractTimeFilterVisitor visitor = new ExtractTimeFilterVisitor(); final Date time = DateUtilities.parseISO("2005-05-19T20:32:56Z"); final Filter filter = CQL.toFilter("when after 2005-05-19T20:32:56Z"); final Query query = new Query("type", filter); TemporalConstraints range = (TemporalConstraints) query.getFilter().accept(visitor, null); assertNotNull(range); assertEquals(new Date(time.getTime() + 1), range.getStartRange().getStartTime()); range = (TemporalConstraints) query.getFilter().accept(visitorWithDescriptor, null); assertNotNull(range); assertEquals(new Date(time.getTime() + 1), range.getStartRange().getStartTime()); assertEquals("when", range.getName()); } @Test public void testGreaterThan() throws CQLException, ParseException { final ExtractTimeFilterVisitor visitor = new ExtractTimeFilterVisitor(); final Date stimeNotEqual = new Date(DateUtilities.parseISO("2005-05-19T20:32:56Z").getTime() + 1); final Date stime = DateUtilities.parseISO("2005-05-19T20:32:56Z"); Filter filter = ECQL.toFilter("when > 2005-05-19T20:32:56Z"); Query query = new Query("type", filter); TemporalConstraints range = (TemporalConstraints) query.getFilter().accept(visitor, null); assertNotNull(range); assertEquals(stimeNotEqual, range.getStartRange().getStartTime()); assertEquals(TemporalRange.END_TIME, range.getEndRange().getEndTime()); assertEquals("when", range.getName()); filter = ECQL.toFilter("2005-05-19T20:32:56Z < when"); query = new Query("type", filter); range = (TemporalConstraints) query.getFilter().accept(visitor, null); assertNotNull(range); assertEquals(stimeNotEqual, range.getStartRange().getStartTime()); assertEquals(TemporalRange.END_TIME, range.getEndRange().getEndTime()); assertEquals("when", range.getName()); filter = ECQL.toFilter("2005-05-19T20:32:56Z <= when"); query = new Query("type", filter); range = (TemporalConstraints) query.getFilter().accept(visitor, null); assertNotNull(range); assertEquals(stime, range.getStartRange().getStartTime()); assertEquals(TemporalRange.END_TIME, range.getEndRange().getEndTime()); assertEquals("when", range.getName()); } @Test public void testMixedRanges() throws CQLException, ParseException { final Date stime = new Date(DateUtilities.parseISO("2005-05-19T20:32:56Z").getTime() + 1); final Date etime = new Date(DateUtilities.parseISO("2005-05-20T20:32:56Z").getTime() - 1); Filter filter = ECQL.toFilter("start > 2005-05-19T20:32:56Z and end < 2005-05-20T20:32:56Z"); final FilterFactory factory = new FilterFactoryImpl(); filter = factory.and(Filter.INCLUDE, filter); Query query = new Query("type", filter); TemporalConstraintsSet rangeSet = (TemporalConstraintsSet) query.getFilter().accept(visitorWithDescriptor, null); assertNotNull(rangeSet); assertTrue(!rangeSet.isEmpty()); assertEquals(stime, rangeSet.getConstraintsFor("start").getStartRange().getStartTime()); assertEquals(etime, rangeSet.getConstraintsFor("end").getEndRange().getEndTime()); final Date stime1 = new Date(DateUtilities.parseISO("2005-05-17T20:32:56Z").getTime() + 1); final Date etime1 = new Date(DateUtilities.parseISO("2005-05-18T20:32:56Z").getTime() - 1); filter = ECQL.toFilter( "(start < 2005-05-18T20:32:56Z and end > 2005-05-17T20:32:56Z) or (start < 2005-05-20T20:32:56Z and end > 2005-05-19T20:32:56Z)"); filter = factory.and(Filter.INCLUDE, filter); query = new Query("type", filter); rangeSet = (TemporalConstraintsSet) query.getFilter().accept(visitorWithDescriptorForRange, null); assertNotNull(rangeSet); assertTrue(!rangeSet.isEmpty()); assertEquals(stime1, rangeSet.getConstraintsFor("start_end").getStartRange().getStartTime()); assertEquals(etime1, rangeSet.getConstraintsFor("start_end").getStartRange().getEndTime()); assertEquals(stime, rangeSet.getConstraintsFor("start_end").getEndRange().getStartTime()); assertEquals(etime, rangeSet.getConstraintsFor("start_end").getEndRange().getEndTime()); // Open ended query filter = ECQL.toFilter("start < 2005-05-20T20:32:56Z and end > 2005-05-19T20:32:56Z"); filter = factory.and(Filter.INCLUDE, filter); query = new Query("type", filter); rangeSet = (TemporalConstraintsSet) query.getFilter().accept(visitorWithDescriptor, null); assertNotNull(rangeSet); assertTrue(!rangeSet.isEmpty()); assertEquals( TemporalRange.START_TIME, rangeSet.getConstraintsFor("start_end").getStartRange().getStartTime()); assertEquals( TemporalRange.END_TIME, rangeSet.getConstraintsFor("start_end").getEndRange().getEndTime()); } @Test public void testLessThan() throws CQLException, ParseException { final ExtractTimeFilterVisitor visitor = new ExtractTimeFilterVisitor(); final Date etimeNotEqual = new Date(DateUtilities.parseISO("2005-05-19T21:32:56Z").getTime() - 1); final Date etime = DateUtilities.parseISO("2005-05-19T21:32:56Z"); Filter filter = ECQL.toFilter("when < 2005-05-19T21:32:56Z"); Query query = new Query("type", filter); TemporalConstraints range = (TemporalConstraints) query.getFilter().accept(visitor, null); assertNotNull(range); assertEquals(TemporalRange.START_TIME, range.getStartRange().getStartTime()); assertEquals(etimeNotEqual, range.getEndRange().getEndTime()); assertEquals("when", range.getName()); filter = ECQL.toFilter(" 2005-05-19T21:32:56Z > when"); query = new Query("type", filter); range = (TemporalConstraints) query.getFilter().accept(visitor, null); assertNotNull(range); assertEquals(TemporalRange.START_TIME, range.getStartRange().getStartTime()); assertEquals(etimeNotEqual, range.getEndRange().getEndTime()); assertEquals("when", range.getName()); filter = ECQL.toFilter(" 2005-05-19T21:32:56Z >= when"); query = new Query("type", filter); range = (TemporalConstraints) query.getFilter().accept(visitor, null); assertNotNull(range); assertEquals(TemporalRange.START_TIME, range.getStartRange().getStartTime()); assertEquals(etime, range.getEndRange().getEndTime()); assertEquals("when", range.getName()); } @Test public void testLessAndGreaterThan() throws CQLException, ParseException { final ExtractTimeFilterVisitor visitor = new ExtractTimeFilterVisitor(); final Date etime = new Date(DateUtilities.parseISO("2005-05-19T21:32:56Z").getTime() - 1); final Date stime = new Date(DateUtilities.parseISO("2005-05-19T20:32:56Z").getTime() + 1); Filter filter = ECQL.toFilter("when > 2005-05-19T21:32:56Z and when < 2005-05-19T20:32:56Z"); Query query = new Query("type", filter); TemporalConstraintsSet rangeSet = (TemporalConstraintsSet) query.getFilter().accept(visitor, null); assertNotNull(rangeSet); assertEquals( TemporalRange.START_TIME, rangeSet.getConstraintsFor("when").getStartRange().getStartTime()); assertEquals( TemporalRange.END_TIME, rangeSet.getConstraintsFor("when").getEndRange().getEndTime()); filter = ECQL.toFilter("when < 2005-05-19T21:32:56Z and when > 2005-05-19T20:32:56Z"); query = new Query("type", filter); rangeSet = (TemporalConstraintsSet) query.getFilter().accept(visitor, null); assertNotNull(rangeSet); assertEquals(stime, rangeSet.getConstraintsFor("when").getStartRange().getStartTime()); assertEquals(etime, rangeSet.getConstraintsFor("when").getEndRange().getEndTime()); filter = ECQL.toFilter("sometime < 2005-05-19T21:32:56Z and when > 2005-05-19T20:32:56Z"); query = new Query("type", filter); rangeSet = (TemporalConstraintsSet) query.getFilter().accept(visitorWithDescriptor, null); assertEquals(stime, rangeSet.getConstraintsFor("when").getStartRange().getStartTime()); assertEquals( TemporalRange.END_TIME, rangeSet.getConstraintsFor("when").getEndRange().getEndTime()); assertEquals( TemporalRange.START_TIME, rangeSet.getConstraintsFor("sometime").getStartRange().getStartTime()); assertEquals(etime, rangeSet.getConstraintsFor("sometime").getEndRange().getEndTime()); filter = ECQL.toFilter("when < 2005-05-19T21:32:56Z and sometime > 2005-05-19T20:32:56Z"); query = new Query("type", filter); rangeSet = (TemporalConstraintsSet) query.getFilter().accept(visitorWithDescriptor, null); assertEquals( TemporalRange.START_TIME, rangeSet.getConstraintsFor("when").getStartRange().getStartTime()); assertEquals(etime, rangeSet.getConstraintsFor("when").getEndRange().getEndTime()); assertEquals(stime, rangeSet.getConstraintsFor("sometime").getStartRange().getStartTime()); assertEquals( TemporalRange.END_TIME, rangeSet.getConstraintsFor("sometime").getEndRange().getEndTime()); filter = ECQL.toFilter("2005-05-19T21:32:56Z > when and 2005-05-19T20:32:56Z < sometime"); query = new Query("type", filter); rangeSet = (TemporalConstraintsSet) query.getFilter().accept(visitorWithDescriptor, null); assertEquals( TemporalRange.START_TIME, rangeSet.getConstraintsFor("when").getStartRange().getStartTime()); assertEquals(etime, rangeSet.getConstraintsFor("when").getEndRange().getEndTime()); assertEquals(stime, rangeSet.getConstraintsFor("sometime").getStartRange().getStartTime()); assertEquals( TemporalRange.END_TIME, rangeSet.getConstraintsFor("sometime").getEndRange().getEndTime()); } @Test public void testEqual() throws CQLException, ParseException { final ExtractTimeFilterVisitor visitor = new ExtractTimeFilterVisitor(); final Date etime = DateUtilities.parseISO("2005-05-19T21:32:56Z"); final Filter filter = ECQL.toFilter("when = 2005-05-19T21:32:56Z"); final Query query = new Query("type", filter); final TemporalConstraints range = (TemporalConstraints) query.getFilter().accept(visitor, null); assertNotNull(range); assertEquals(etime, range.getStartRange().getStartTime()); assertEquals(etime, range.getEndRange().getEndTime()); } @Test public void testDuring() throws CQLException, ParseException { final ExtractTimeFilterVisitor visitor = new ExtractTimeFilterVisitor(); final Date stime = new Date(DateUtilities.parseISO("2005-05-19T20:32:56Z").getTime() + 1); final Date etime = new Date(DateUtilities.parseISO("2005-05-19T21:32:56Z").getTime() - 1); final Filter filter = CQL.toFilter("when during 2005-05-19T20:32:56Z/2005-05-19T21:32:56Z"); final Query query = new Query("type", filter); TemporalConstraints range = (TemporalConstraints) query.getFilter().accept(visitor, null); assertNotNull(range); assertEquals(stime, range.getStartRange().getStartTime()); assertEquals(etime, range.getStartRange().getEndTime()); range = (TemporalConstraints) query.getFilter().accept(visitor, null); assertNotNull(range); assertEquals(stime, range.getStartRange().getStartTime()); assertEquals(etime, range.getStartRange().getEndTime()); } @Test public void testBefore() throws CQLException, ParseException { final ExtractTimeFilterVisitor visitor = new ExtractTimeFilterVisitor(); final Date etime = new Date(DateUtilities.parseISO("2005-05-19T20:32:56Z").getTime() - 1); final Filter filter = CQL.toFilter("when before 2005-05-19T20:32:56Z"); final Query query = new Query("type", filter); TemporalConstraints range = (TemporalConstraints) query.getFilter().accept(visitor, null); assertNotNull(range); assertEquals(TemporalRange.START_TIME, range.getStartRange().getStartTime()); assertEquals(etime, range.getStartRange().getEndTime()); range = (TemporalConstraints) query.getFilter().accept(visitorWithDescriptor, null); assertNotNull(range); assertEquals(TemporalRange.START_TIME, range.getStartRange().getStartTime()); assertEquals(etime, range.getStartRange().getEndTime()); } @Test public void testBeforeOrDuring() throws CQLException, ParseException { final ExtractTimeFilterVisitor visitor = new ExtractTimeFilterVisitor(); final Date stime = new Date(DateUtilities.parseISO("2005-05-19T21:32:56Z").getTime() - 1); final Filter filter = CQL.toFilter("when BEFORE OR DURING 2005-05-19T20:32:56Z/2005-05-19T21:32:56Z"); final Query query = new Query("type", filter); TemporalConstraintsSet rangeSet = (TemporalConstraintsSet) query.getFilter().accept(visitor, null); assertNotNull(rangeSet); assertEquals( TemporalRange.START_TIME, rangeSet.getConstraintsFor("when").getStartRange().getStartTime()); assertEquals(stime, rangeSet.getConstraintsFor("when").getEndRange().getEndTime()); rangeSet = (TemporalConstraintsSet) query.getFilter().accept(visitorWithDescriptor, null); assertNotNull(rangeSet); assertEquals( TemporalRange.START_TIME, rangeSet.getConstraintsFor("when").getStartRange().getStartTime()); assertEquals(stime, rangeSet.getConstraintsFor("when").getEndRange().getEndTime()); } @Test public void testDuringOrAfter() throws CQLException, ParseException { final ExtractTimeFilterVisitor visitor = new ExtractTimeFilterVisitor(); final Date stime = new Date(DateUtilities.parseISO("2005-05-19T20:32:56Z").getTime() + 1); final Filter filter = CQL.toFilter("when DURING OR AFTER 2005-05-19T20:32:56Z/2005-05-19T21:32:56Z"); final Query query = new Query("type", filter); TemporalConstraintsSet rangeSet = (TemporalConstraintsSet) query.getFilter().accept(visitor, null); assertNotNull(rangeSet); assertEquals(stime, rangeSet.getConstraintsFor("when").getStartRange().getStartTime()); assertEquals( TemporalRange.END_TIME, rangeSet.getConstraintsFor("when").getEndRange().getEndTime()); rangeSet = (TemporalConstraintsSet) query.getFilter().accept(visitorWithDescriptor, null); assertNotNull(rangeSet); assertEquals(stime, rangeSet.getConstraintsFor("when").getStartRange().getStartTime()); assertEquals( TemporalRange.END_TIME, rangeSet.getConstraintsFor("when").getEndRange().getEndTime()); } @Test public void testAndOverlap() throws CQLException, ParseException { final ExtractTimeFilterVisitor visitor = new ExtractTimeFilterVisitor(); final Date sTime = new Date(DateUtilities.parseISO("2005-05-19T20:32:56Z").getTime() + 1); final Date eTime = new Date(DateUtilities.parseISO("2005-05-20T20:32:56Z").getTime() - 1); Filter filter = CQL.toFilter("when before 2005-05-20T20:32:56Z and when after 2005-05-19T20:32:56Z"); Query query = new Query("type", filter); TemporalConstraintsSet rangeSet = (TemporalConstraintsSet) query.getFilter().accept(visitor, null); assertNotNull(rangeSet); assertEquals(sTime, rangeSet.getConstraintsFor("when").getStartRange().getStartTime()); assertEquals(eTime, rangeSet.getConstraintsFor("when").getStartRange().getEndTime()); filter = CQL.toFilter("sometime before 2005-05-20T20:32:56Z and when after 2005-05-19T20:32:56Z"); query = new Query("type", filter); rangeSet = (TemporalConstraintsSet) query.getFilter().accept(visitorWithDescriptor, null); assertNotNull(rangeSet); assertEquals(sTime, rangeSet.getConstraintsFor("when").getStartRange().getStartTime()); assertEquals( TemporalRange.END_TIME, rangeSet.getConstraintsFor("when").getStartRange().getEndTime()); filter = CQL.toFilter("when before 2005-05-20T20:32:56Z and sometime after 2005-05-19T20:32:56Z"); query = new Query("type", filter); rangeSet = (TemporalConstraintsSet) query.getFilter().accept(visitorWithDescriptor, null); assertNotNull(rangeSet); assertFalse(rangeSet.isEmpty()); assertEquals( TemporalRange.START_TIME, rangeSet.getConstraintsFor("when").getStartRange().getStartTime()); assertEquals(eTime, rangeSet.getConstraintsFor("when").getStartRange().getEndTime()); } @Test public void testAndNoOverlap() throws CQLException, ParseException { final ExtractTimeFilterVisitor visitor = new ExtractTimeFilterVisitor(); final Filter filter = CQL.toFilter("when before 2005-05-17T20:32:56Z and when after 2005-05-19T20:32:56Z"); final Query query = new Query("type", filter); final TemporalConstraintsSet rangeSet = (TemporalConstraintsSet) query.getFilter().accept(visitor, null); assertNotNull(rangeSet); assertTrue(rangeSet.isEmpty()); } @Test public void testOr() throws CQLException, ParseException { final ExtractTimeFilterVisitor visitor = new ExtractTimeFilterVisitor(); final Date sTime2 = new Date(DateUtilities.parseISO("2005-05-19T20:32:56Z").getTime() + 1); final Date eTime1 = new Date(DateUtilities.parseISO("2005-05-17T20:32:56Z").getTime() - 1); Filter filter = CQL.toFilter("when before 2005-05-17T20:32:56Z or when after 2005-05-19T20:32:56Z"); Query query = new Query("type", filter); TemporalConstraintsSet rangeSet = (TemporalConstraintsSet) query.getFilter().accept(visitor, null); assertNotNull(rangeSet); assertEquals(eTime1, rangeSet.getConstraintsFor("when").getStartRange().getEndTime()); assertEquals(sTime2, rangeSet.getConstraintsFor("when").getRanges().get(1).getStartTime()); // test mixed filter = CQL.toFilter("when before 2005-05-17T20:32:56Z or sometime after 2005-05-19T20:32:56Z"); query = new Query("type", filter); rangeSet = (TemporalConstraintsSet) query.getFilter().accept(visitorWithDescriptor, null); assertNotNull(rangeSet); assertEquals(eTime1, rangeSet.getConstraintsFor("when").getStartRange().getEndTime()); assertEquals( TemporalRange.START_TIME, rangeSet.getConstraintsFor("when").getEndRange().getStartTime()); assertEquals( TemporalRange.END_TIME, rangeSet.getConstraintsFor("sometime").getStartRange().getEndTime()); assertEquals(sTime2, rangeSet.getConstraintsFor("sometime").getEndRange().getStartTime()); } @Test public void testNotBetween() throws CQLException, ParseException { final ExtractTimeFilterVisitor visitor = new ExtractTimeFilterVisitor(); final Date sTime2 = new Date(DateUtilities.parseISO("2005-05-19T20:32:56Z").getTime() + 1); final Date eTime1 = new Date(DateUtilities.parseISO("2005-05-17T20:32:56Z").getTime() - 1); final Filter filter = CQL.toFilter("not (when before 2005-05-17T20:32:56Z or when after 2005-05-19T20:32:56Z)"); final Query query = new Query("type", filter); final TemporalConstraintsSet rangeSet = (TemporalConstraintsSet) query.getFilter().accept(visitor, null); assertNotNull(rangeSet); assertEquals(eTime1, rangeSet.getConstraintsFor("when").getStartRange().getStartTime()); assertEquals( new Date(sTime2.getTime() - 1), rangeSet.getConstraintsFor("when").getStartRange().getEndTime()); } @Test public void testNotOutliers() throws CQLException, ParseException { final ExtractTimeFilterVisitor visitor = new ExtractTimeFilterVisitor(); final Date sTime = new Date(DateUtilities.parseISO("2005-05-19T20:32:56Z").getTime() + 1); final Date eTime = new Date(DateUtilities.parseISO("2005-05-20T20:32:56Z").getTime() - 1); Filter filter = CQL.toFilter("not (when before 2005-05-20T20:32:56Z and when after 2005-05-19T20:32:56Z)"); Query query = new Query("type", filter); TemporalConstraintsSet rangeSet = (TemporalConstraintsSet) query.getFilter().accept(visitor, null); assertNotNull(rangeSet); assertEquals( TemporalRange.START_TIME, rangeSet.getConstraintsFor("when").getStartRange().getStartTime()); assertEquals( new Date(sTime.getTime() - 1), rangeSet.getConstraintsFor("when").getStartRange().getEndTime()); assertEquals(eTime, rangeSet.getConstraintsFor("when").getRanges().get(1).getStartTime()); assertEquals( TemporalRange.END_TIME, rangeSet.getConstraintsFor("when").getRanges().get(1).getEndTime()); filter = CQL.toFilter( "not (sometime before 2005-05-20T20:32:56Z and when after 2005-05-19T20:32:56Z)"); query = new Query("type", filter); rangeSet = (TemporalConstraintsSet) query.getFilter().accept(visitorWithDescriptor, null); assertNotNull(rangeSet); assertEquals( new Date(sTime.getTime() - 1), rangeSet.getConstraintsFor("when").getEndRange().getEndTime()); assertEquals( TemporalRange.START_TIME, rangeSet.getConstraintsFor("when").getEndRange().getStartTime()); } } ================================================ FILE: extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/plugin/GeoToolsAttributesSubsetTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin; import static org.junit.Assert.assertFalse; import java.io.IOException; import org.geotools.data.DataStore; import org.geotools.data.DataUtilities; import org.geotools.data.DefaultTransaction; import org.geotools.data.FeatureReader; import org.geotools.data.FeatureWriter; import org.geotools.data.Query; import org.geotools.data.Transaction; import org.geotools.feature.SchemaException; import org.geotools.filter.text.cql2.CQL; import org.geotools.filter.text.cql2.CQLException; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.locationtech.geowave.adapter.vector.BaseDataStoreTest; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.jts.geom.Coordinate; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; public class GeoToolsAttributesSubsetTest extends BaseDataStoreTest { private DataStore geotoolsDataStore; private SimpleFeatureType type; private static final String typeName = "testStuff"; private static final String typeSpec = "geometry:Geometry:srid=4326,aLong:java.lang.Long,aString:String"; private static final String cqlPredicate = "BBOX(geometry,40,40,42,42)"; private static final String geometry_attribute = "geometry"; private static final String long_attribute = "aLong"; private static final String string_attribute = "aString"; @Before public void setup() throws IOException, GeoWavePluginException, SchemaException { geotoolsDataStore = createDataStore(); type = DataUtilities.createType(typeName, typeSpec); geotoolsDataStore.createSchema(type); final Transaction transaction = new DefaultTransaction(); final FeatureWriter writer = geotoolsDataStore.getFeatureWriter(type.getTypeName(), transaction); assertFalse(writer.hasNext()); SimpleFeature newFeature = writer.next(); newFeature.setAttribute( geometry_attribute, GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(41.25, 41.25))); newFeature.setAttribute(long_attribute, 1l); newFeature.setAttribute(string_attribute, "string1"); writer.write(); newFeature = writer.next(); newFeature.setAttribute( geometry_attribute, GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(41.5, 41.5))); newFeature.setAttribute(long_attribute, 2l); newFeature.setAttribute(string_attribute, "string2"); writer.write(); newFeature = writer.next(); newFeature.setAttribute( geometry_attribute, GeometryUtils.GEOMETRY_FACTORY.createPoint(new Coordinate(41.75, 41.75))); newFeature.setAttribute(long_attribute, 3l); newFeature.setAttribute(string_attribute, "string3"); writer.write(); writer.close(); transaction.commit(); transaction.close(); } @Test public void testAllAttributes() throws CQLException, IOException { final Query query = new Query(typeName, CQL.toFilter(cqlPredicate), Query.ALL_PROPERTIES); final FeatureReader reader = geotoolsDataStore.getFeatureReader(query, Transaction.AUTO_COMMIT); int count = 0; while (reader.hasNext()) { final SimpleFeature feature = reader.next(); count++; Assert.assertTrue(feature.getAttribute(geometry_attribute) != null); Assert.assertTrue(feature.getAttribute(long_attribute) != null); Assert.assertTrue(feature.getAttribute(string_attribute) != null); } Assert.assertTrue(count == 3); } @Test public void testSubsetAttributes() throws CQLException, IOException { final Query query = new Query( typeName, CQL.toFilter(cqlPredicate), new String[] {geometry_attribute, string_attribute}); final FeatureReader reader = geotoolsDataStore.getFeatureReader(query, Transaction.AUTO_COMMIT); int count = 0; while (reader.hasNext()) { final SimpleFeature feature = reader.next(); count++; Assert.assertTrue(feature.getAttribute(geometry_attribute) != null); Assert.assertTrue(feature.getAttribute(long_attribute) == null); Assert.assertTrue(feature.getAttribute(string_attribute) != null); } Assert.assertTrue(count == 3); } } ================================================ FILE: extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/plugin/GeoWaveFeatureReaderTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.NoSuchElementException; import java.util.UUID; import org.geotools.data.DataStore; import org.geotools.data.DataUtilities; import org.geotools.data.DefaultTransaction; import org.geotools.data.DelegatingFeatureReader; import org.geotools.data.FeatureReader; import org.geotools.data.FeatureWriter; import org.geotools.data.Query; import org.geotools.data.Transaction; import org.geotools.feature.SchemaException; import org.geotools.feature.visitor.MaxVisitor; import org.geotools.feature.visitor.MinVisitor; import org.geotools.filter.FilterFactoryImpl; import org.geotools.filter.text.cql2.CQL; import org.geotools.filter.text.cql2.CQLException; import org.geotools.filter.text.ecql.ECQL; import org.junit.Before; import org.junit.Test; import org.locationtech.geowave.adapter.vector.BaseDataStoreTest; import org.locationtech.geowave.adapter.vector.util.DateUtilities; import org.locationtech.geowave.core.geotime.index.api.SpatialIndexBuilder; import org.locationtech.geowave.core.geotime.index.api.SpatialTemporalIndexBuilder; import org.locationtech.geowave.core.store.index.AttributeDimensionalityTypeProvider; import org.locationtech.geowave.core.store.index.AttributeIndexOptions; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.PrecisionModel; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; public class GeoWaveFeatureReaderTest extends BaseDataStoreTest { DataStore dataStore; SimpleFeatureType schema; SimpleFeatureType type; final GeometryFactory factory = new GeometryFactory(new PrecisionModel(PrecisionModel.FIXED)); Query query = null; List fids = new ArrayList<>(); List pids = new ArrayList<>(); Date stime, mtime, etime; @Before public void setup() throws SchemaException, CQLException, Exception { dataStore = createDataStore(); type = DataUtilities.createType( "GeoWaveFeatureReaderTest", "geometry:Geometry:srid=4326,start:Date,end:Date,pop:java.lang.Long,pid:String"); ((GeoWaveGTDataStore) dataStore).getDataStore().addIndex( new SpatialIndexBuilder().createIndex()); ((GeoWaveGTDataStore) dataStore).getDataStore().addIndex( new SpatialTemporalIndexBuilder().createIndex()); dataStore.createSchema(type); ((GeoWaveGTDataStore) dataStore).getDataStore().addIndex( type.getTypeName(), AttributeDimensionalityTypeProvider.createIndexFromOptions( ((GeoWaveGTDataStore) dataStore).getDataStore(), new AttributeIndexOptions(type.getTypeName(), "pop"))); stime = DateUtilities.parseISO("2005-05-15T20:32:56Z"); mtime = DateUtilities.parseISO("2005-05-20T20:32:56Z"); etime = DateUtilities.parseISO("2005-05-25T20:32:56Z"); final Transaction transaction1 = new DefaultTransaction(); final FeatureWriter writer = dataStore.getFeatureWriter(type.getTypeName(), transaction1); assertFalse(writer.hasNext()); SimpleFeature newFeature = writer.next(); newFeature.setAttribute("pop", Long.valueOf(100)); newFeature.setAttribute("pid", "a" + UUID.randomUUID().toString()); newFeature.setAttribute("start", stime); newFeature.setAttribute("end", mtime); newFeature.setAttribute("geometry", factory.createPoint(new Coordinate(27.25, 41.25))); fids.add(newFeature.getID()); pids.add(newFeature.getAttribute("pid").toString()); writer.write(); newFeature = writer.next(); newFeature.setAttribute("pop", Long.valueOf(101)); newFeature.setAttribute("pid", "b" + UUID.randomUUID().toString()); newFeature.setAttribute("start", mtime); newFeature.setAttribute("end", etime); newFeature.setAttribute("geometry", factory.createPoint(new Coordinate(28.25, 41.25))); fids.add(newFeature.getID()); pids.add(newFeature.getAttribute("pid").toString()); writer.write(); writer.close(); transaction1.commit(); transaction1.close(); query = new Query( "GeoWaveFeatureReaderTest", ECQL.toFilter("IN ('" + fids.get(0) + "')"), new String[] {"geometry", "pid"}); } @Test public void testFID() throws IllegalArgumentException, NoSuchElementException, IOException, CQLException { final FeatureReader reader = dataStore.getFeatureReader(query, Transaction.AUTO_COMMIT); int count = 0; while (reader.hasNext()) { final SimpleFeature feature = reader.next(); assertTrue(fids.contains(feature.getID())); count++; } assertTrue(count > 0); } @Test public void testAttributeIndex() throws CQLException, IOException { final Query ecqlQuery = new Query( "GeoWaveFeatureReaderTest", ECQL.toFilter("pop > 100"), new String[] {"geometry", "pid", "pop"}); FeatureReader reader = dataStore.getFeatureReader(ecqlQuery, Transaction.AUTO_COMMIT); int count = 0; while (reader.hasNext()) { final SimpleFeature feature = reader.next(); assertEquals(fids.get(1), feature.getID()); count++; } reader.close(); assertEquals(1, count); final Query cqlQuery = new Query( "GeoWaveFeatureReaderTest", CQL.toFilter("pop >= 100"), new String[] {"geometry", "pid", "pop"}); reader = dataStore.getFeatureReader(cqlQuery, Transaction.AUTO_COMMIT); count = 0; while (reader.hasNext()) { final SimpleFeature feature = reader.next(); assertTrue(fids.contains(feature.getID())); count++; } reader.close(); assertEquals(2, count); } @Test public void testTemporal() throws IllegalArgumentException, NoSuchElementException, IOException, CQLException { // This tests performs both CQL and ECQL queries on a time-based attribute because different // geometry visitors are used to extract the geometry portion of the query. Under normal // circumstances this is fine except for when there is no geometry constraint specified. Using // CQL will result in a default geometry with infinite area. ECQL results in a null geometry. // This test checks both code paths to ensure there are no unintended errors. final Query ecqlQuery = new Query( "GeoWaveFeatureReaderTest", ECQL.toFilter("start AFTER 2005-05-16T20:32:56Z"), new String[] {"geometry", "pid"}); FeatureReader reader = dataStore.getFeatureReader(ecqlQuery, Transaction.AUTO_COMMIT); int count = 0; while (reader.hasNext()) { final SimpleFeature feature = reader.next(); assertTrue(fids.contains(feature.getID())); count++; } reader.close(); assertEquals(1, count); final Query cqlQuery = new Query( "GeoWaveFeatureReaderTest", CQL.toFilter("start >= '2005-05-16 20:32:56+0000'"), new String[] {"geometry", "pid"}); reader = dataStore.getFeatureReader(cqlQuery, Transaction.AUTO_COMMIT); count = 0; while (reader.hasNext()) { final SimpleFeature feature = reader.next(); assertTrue(fids.contains(feature.getID())); count++; } reader.close(); assertEquals(1, count); } @Test public void testSmallBBOX() throws IllegalArgumentException, NoSuchElementException, IOException { final FilterFactoryImpl factory = new FilterFactoryImpl(); final Query query = new Query( "GeoWaveFeatureReaderTest", factory.bbox("geometry", 28, 41, 28.5, 41.5, "EPSG:4326"), new String[] {"geometry", "pid"}); final FeatureReader reader = dataStore.getFeatureReader(query, Transaction.AUTO_COMMIT); int count = 0; while (reader.hasNext()) { final SimpleFeature feature = reader.next(); assertTrue(fids.contains(feature.getID())); count++; } assertEquals(1, count); } @Test public void testBBOX() throws IllegalArgumentException, NoSuchElementException, IOException { final FilterFactoryImpl factory = new FilterFactoryImpl(); final Query query = new Query( "GeoWaveFeatureReaderTest", factory.bbox("geometry", -180, -90, 180, 90, "EPSG:4326"), new String[] {"geometry", "pid"}); final FeatureReader reader = dataStore.getFeatureReader(query, Transaction.AUTO_COMMIT); int count = 0; while (reader.hasNext()) { final SimpleFeature feature = reader.next(); assertTrue(fids.contains(feature.getID())); count++; } assertTrue(count > 0); } @Test public void testRangeIndex() throws IllegalArgumentException, NoSuchElementException, IOException { final FeatureReader reader = dataStore.getFeatureReader(query, Transaction.AUTO_COMMIT); int count = 0; while (reader.hasNext()) { final SimpleFeature feature = reader.next(); assertTrue(fids.contains(feature.getID())); count++; } assertEquals(1, count); } @Test public void testLike() throws IllegalArgumentException, NoSuchElementException, IOException, CQLException { System.out.println(pids); final Query query = new Query( "GeoWaveFeatureReaderTest", ECQL.toFilter("pid like '" + pids.get(0).substring(0, 1) + "%'"), new String[] {"geometry", "pid"}); final FeatureReader reader = dataStore.getFeatureReader(query, Transaction.AUTO_COMMIT); int count = 0; while (reader.hasNext()) { final SimpleFeature feature = reader.next(); assertTrue(fids.contains(feature.getID())); count++; } assertEquals(1, count); } @Test public void testMax() throws IllegalArgumentException, NoSuchElementException, IOException { final FeatureReader reader = dataStore.getFeatureReader(query, Transaction.AUTO_COMMIT); final MaxVisitor visitor = new MaxVisitor("start", type); unwrapDelegatingFeatureReader(reader).getFeatureCollection().accepts(visitor, null); assertTrue(visitor.getMax().equals(mtime)); } @Test public void testMin() throws IllegalArgumentException, NoSuchElementException, IOException { final FeatureReader reader = dataStore.getFeatureReader(query, Transaction.AUTO_COMMIT); final MinVisitor visitor = new MinVisitor("start", type); unwrapDelegatingFeatureReader(reader).getFeatureCollection().accepts(visitor, null); assertTrue(visitor.getMin().equals(stime)); } private GeoWaveFeatureReader unwrapDelegatingFeatureReader( final FeatureReader reader) { // GeoTools uses decorator pattern to wrap FeatureReaders // we need to get down to the inner GeoWaveFeatureReader FeatureReader currReader = reader; while (!(currReader instanceof GeoWaveFeatureReader)) { currReader = ((DelegatingFeatureReader) currReader).getDelegate(); } return (GeoWaveFeatureReader) currReader; } } ================================================ FILE: extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/plugin/GeoWaveFeatureSourceTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.text.ParseException; import java.util.UUID; import org.geotools.data.DataStore; import org.geotools.data.DataUtilities; import org.geotools.data.DefaultTransaction; import org.geotools.data.FeatureWriter; import org.geotools.data.Query; import org.geotools.data.Transaction; import org.geotools.data.simple.SimpleFeatureSource; import org.geotools.data.simple.SimpleFeatureStore; import org.geotools.filter.text.cql2.CQL; import org.geotools.filter.text.cql2.CQLException; import org.geotools.geometry.jts.ReferencedEnvelope; import org.junit.Test; import org.locationtech.geowave.adapter.vector.BaseDataStoreTest; import org.locationtech.geowave.adapter.vector.util.DateUtilities; import org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic; import org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic.BoundingBoxValue; import org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic; import org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic.TimeRangeValue; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.api.Statistic; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.statistics.DataStatisticsStore; import org.locationtech.geowave.core.store.statistics.InternalStatisticsHelper; import org.locationtech.geowave.core.store.statistics.adapter.CountStatistic; import org.locationtech.geowave.core.store.statistics.adapter.CountStatistic.CountValue; import org.locationtech.geowave.core.store.statistics.field.NumericRangeStatistic; import org.locationtech.geowave.core.store.statistics.field.NumericRangeStatistic.NumericRangeValue; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.PrecisionModel; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.filter.Filter; public class GeoWaveFeatureSourceTest extends BaseDataStoreTest { static final GeometryFactory factory = new GeometryFactory(new PrecisionModel(PrecisionModel.FIXED)); @Test public void test() throws Exception { testEmpty(); testFull(new FWPopulater(), "fw"); testPartial(new FWPopulater(), "fw"); // test different populate methods testFull(new SourcePopulater(), "s"); testPartial(new SourcePopulater(), "s"); } public void testEmpty() throws Exception { final SimpleFeatureType type = DataUtilities.createType( "GeoWaveFeatureSourceTest_e", "geometry:Geometry:srid=4326,pop:java.lang.Long,pid:String,when:Date"); final DataStore dataStore = createDataStore(); dataStore.createSchema(type); final SimpleFeatureSource source = dataStore.getFeatureSource("GeoWaveFeatureSourceTest_e"); final ReferencedEnvelope env = source.getBounds(); assertEquals(90.0, env.getMaxX(), 0.0001); assertEquals(-180.0, env.getMinY(), 0.0001); final Query query = new Query("GeoWaveFeatureSourceTest_e", Filter.INCLUDE); assertEquals(0, source.getCount(query)); } public void testFull(final Populater populater, final String ext) throws Exception { final String typeName = "GeoWaveFeatureSourceTest_full" + ext; final SimpleFeatureType type = DataUtilities.createType( typeName, "geometry:Geometry:srid=4326,pop:java.lang.Long,pid:String,when:Date"); final DataStore dataStore = createDataStore(); final GeoWaveGTDataStore gwgtDataStore = (GeoWaveGTDataStore) dataStore; gwgtDataStore.dataStatisticsStore.addStatistic(new NumericRangeStatistic(typeName, "pop")); populater.populate(type, dataStore); final SimpleFeatureSource source = dataStore.getFeatureSource(typeName); final ReferencedEnvelope env = source.getBounds(); assertEquals(43.454, env.getMaxX(), 0.0001); assertEquals(27.232, env.getMinY(), 0.0001); assertEquals(28.242, env.getMaxY(), 0.0001); final Query query = new Query(typeName, Filter.INCLUDE); assertTrue(source.getCount(query) > 2); final short internalAdapterId = ((GeoWaveGTDataStore) dataStore).getInternalAdapterStore().addTypeName(typeName); final DataStatisticsStore statsStore = ((GeoWaveGTDataStore) dataStore).getDataStatisticsStore(); final DataTypeAdapter adapter = ((GeoWaveGTDataStore) dataStore).getAdapterStore().getAdapter(internalAdapterId); BoundingBoxValue bboxStats = null; CountValue cStats = null; TimeRangeValue timeRangeStats = null; NumericRangeValue popStats = null; int count = 1; cStats = InternalStatisticsHelper.getDataTypeStatistic( statsStore, CountStatistic.STATS_TYPE, typeName); assertNotNull(cStats); try (final CloseableIterator>> stats = statsStore.getFieldStatistics(adapter, null, null, null)) { assertTrue(stats.hasNext()); while (stats.hasNext()) { final Statistic stat = stats.next(); if (stat instanceof BoundingBoxStatistic) { bboxStats = statsStore.getStatisticValue((BoundingBoxStatistic) stat); } else if (stat instanceof TimeRangeStatistic) { timeRangeStats = statsStore.getStatisticValue((TimeRangeStatistic) stat); } else if (stat instanceof NumericRangeStatistic) { popStats = statsStore.getStatisticValue((NumericRangeStatistic) stat); } count++; } } // rather than maintain an exact count on stats as we should be able // to add them more dynamically, just make sure that there is some // set of base stats found assertTrue("Unexpectedly few stats found", count >= 4); assertEquals(66, popStats.getMin(), 0.001); assertEquals(100, popStats.getMax(), 0.001); assertEquals( DateUtilities.parseISO("2005-05-17T20:32:56Z"), timeRangeStats.asTemporalRange().getStartTime()); assertEquals( DateUtilities.parseISO("2005-05-19T20:32:56Z"), timeRangeStats.asTemporalRange().getEndTime()); assertEquals(43.454, bboxStats.getMaxX(), 0.0001); assertEquals(27.232, bboxStats.getMinY(), 0.0001); assertEquals(3, (long) cStats.getValue()); } public void testPartial(final Populater populater, final String ext) throws CQLException, Exception { final String typeName = "GeoWaveFeatureSourceTest_p" + ext; final SimpleFeatureType type = DataUtilities.createType( typeName, "geometry:Geometry:srid=4326,pop:java.lang.Long,pid:String,when:Date"); final DataStore dataStore = createDataStore(); populater.populate(type, dataStore); final SimpleFeatureSource source = dataStore.getFeatureSource(typeName); final Query query = new Query( typeName, CQL.toFilter( "BBOX(geometry,42,28,44,30) and when during 2005-05-01T20:32:56Z/2005-05-29T21:32:56Z"), new String[] {"geometry", "when", "pid"}); final ReferencedEnvelope env = source.getBounds(query); assertEquals(43.454, env.getMaxX(), 0.0001); assertEquals(28.232, env.getMinY(), 0.0001); assertEquals(28.242, env.getMaxY(), 0.0001); assertEquals(2, source.getCount(query)); } public interface Populater { void populate(final SimpleFeatureType type, final DataStore dataStore) throws IOException, CQLException, ParseException; } private static class FWPopulater implements Populater { @Override public void populate(final SimpleFeatureType type, final DataStore dataStore) throws IOException, CQLException, ParseException { dataStore.createSchema(type); final Transaction transaction1 = new DefaultTransaction(); final FeatureWriter writer = dataStore.getFeatureWriter(type.getTypeName(), transaction1); assertFalse(writer.hasNext()); SimpleFeature newFeature = writer.next(); newFeature.setAttribute("pop", Long.valueOf(77)); newFeature.setAttribute("pid", UUID.randomUUID().toString()); newFeature.setAttribute("when", DateUtilities.parseISO("2005-05-19T20:32:56Z")); newFeature.setAttribute("geometry", factory.createPoint(new Coordinate(43.454, 28.232))); writer.write(); newFeature = writer.next(); newFeature.setAttribute("pop", Long.valueOf(66)); newFeature.setAttribute("pid", UUID.randomUUID().toString()); newFeature.setAttribute("when", DateUtilities.parseISO("2005-05-18T20:32:56Z")); newFeature.setAttribute("geometry", factory.createPoint(new Coordinate(43.454, 27.232))); writer.write(); newFeature = writer.next(); newFeature.setAttribute("pop", Long.valueOf(100)); newFeature.setAttribute("pid", UUID.randomUUID().toString()); newFeature.setAttribute("when", DateUtilities.parseISO("2005-05-17T20:32:56Z")); newFeature.setAttribute("geometry", factory.createPoint(new Coordinate(43.454, 28.242))); writer.write(); writer.close(); transaction1.commit(); transaction1.close(); } } private static class SourcePopulater implements Populater { @Override public void populate(final SimpleFeatureType type, final DataStore dataStore) throws IOException, CQLException, ParseException { dataStore.createSchema(type); final Transaction transaction1 = new DefaultTransaction(); final SimpleFeatureStore source = (SimpleFeatureStore) dataStore.getFeatureSource(type.getName()); final FeatureWriter writer = dataStore.getFeatureWriter(type.getTypeName(), transaction1); assertFalse(writer.hasNext()); SimpleFeature newFeature = writer.next(); newFeature.setAttribute("pop", Long.valueOf(77)); newFeature.setAttribute("pid", UUID.randomUUID().toString()); newFeature.setAttribute("when", DateUtilities.parseISO("2005-05-19T20:32:56Z")); newFeature.setAttribute("geometry", factory.createPoint(new Coordinate(43.454, 28.232))); source.addFeatures(DataUtilities.collection(newFeature)); newFeature = writer.next(); newFeature.setAttribute("pop", Long.valueOf(66)); newFeature.setAttribute("pid", UUID.randomUUID().toString()); newFeature.setAttribute("when", DateUtilities.parseISO("2005-05-18T20:32:56Z")); newFeature.setAttribute("geometry", factory.createPoint(new Coordinate(43.454, 27.232))); source.addFeatures(DataUtilities.collection(newFeature)); newFeature = writer.next(); newFeature.setAttribute("pop", Long.valueOf(100)); newFeature.setAttribute("pid", UUID.randomUUID().toString()); newFeature.setAttribute("when", DateUtilities.parseISO("2005-05-17T20:32:56Z")); newFeature.setAttribute("geometry", factory.createPoint(new Coordinate(43.454, 28.242))); source.addFeatures(DataUtilities.collection(newFeature)); transaction1.commit(); transaction1.close(); } } } ================================================ FILE: extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/plugin/GeoWavePluginConfigTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.io.Serializable; import java.net.URI; import java.net.URISyntaxException; import java.util.HashMap; import java.util.List; import org.geotools.data.DataAccessFactory.Param; import org.geotools.data.Parameter; import org.junit.Assert; import org.junit.Test; import org.locationtech.geowave.core.store.memory.MemoryStoreFactoryFamily; public class GeoWavePluginConfigTest { @Test public void test() throws GeoWavePluginException, URISyntaxException { final List params = GeoWavePluginConfig.getPluginParams(new MemoryStoreFactoryFamily()); final HashMap paramValues = new HashMap<>(); for (final Param param : params) { if (param.getName().equals(GeoWavePluginConfig.LOCK_MGT_KEY)) { final List options = (List) param.metadata.get(Parameter.OPTIONS); assertNotNull(options); assertTrue(options.size() > 0); paramValues.put(param.getName(), options.get(0)); } else if (param.getName().equals(GeoWavePluginConfig.FEATURE_NAMESPACE_KEY)) { paramValues.put(param.getName(), new URI("http://test/test")); } else if (param.getName().equals(GeoWavePluginConfig.TRANSACTION_BUFFER_SIZE)) { paramValues.put(param.getName(), 1000); } else if (!param.getName().equals(GeoWavePluginConfig.AUTH_URL_KEY)) { paramValues.put( param.getName(), (Serializable) (param.getDefaultValue() == null ? "" : param.getDefaultValue())); } } final GeoWavePluginConfig config = new GeoWavePluginConfig(new MemoryStoreFactoryFamily(), paramValues); Assert.assertEquals(1000, (int) config.getTransactionBufferSize()); assertNotNull(config.getLockingManagementFactory()); assertNotNull(config.getLockingManagementFactory().createLockingManager(config)); } } ================================================ FILE: extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/plugin/WFSBoundedQueryTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import java.io.IOException; import java.text.ParseException; import java.util.UUID; import org.geotools.data.DataStore; import org.geotools.data.DataUtilities; import org.geotools.data.DefaultTransaction; import org.geotools.data.FeatureReader; import org.geotools.data.FeatureWriter; import org.geotools.data.Query; import org.geotools.data.Transaction; import org.geotools.feature.SchemaException; import org.geotools.filter.text.cql2.CQL; import org.geotools.filter.text.cql2.CQLException; import org.junit.Before; import org.junit.Test; import org.locationtech.geowave.adapter.vector.BaseDataStoreTest; import org.locationtech.geowave.adapter.vector.util.DateUtilities; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.PrecisionModel; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; public class WFSBoundedQueryTest extends BaseDataStoreTest { DataStore dataStore; SimpleFeatureType schema; SimpleFeatureType type; final GeometryFactory factory = new GeometryFactory(new PrecisionModel(PrecisionModel.FIXED)); @Before public void setup() throws SchemaException, CQLException, IOException, GeoWavePluginException { dataStore = createDataStore(); type = DataUtilities.createType( "geostuff", "geometry:Geometry:srid=4326,pop:java.lang.Long,pid:String,when:Date"); dataStore.createSchema(type); } public void populate() throws IOException, CQLException, ParseException { final Transaction transaction1 = new DefaultTransaction(); final FeatureWriter writer = dataStore.getFeatureWriter(type.getTypeName(), transaction1); assertFalse(writer.hasNext()); SimpleFeature newFeature = writer.next(); newFeature.setAttribute("pop", Long.valueOf(100)); newFeature.setAttribute("pid", UUID.randomUUID().toString()); newFeature.setAttribute("when", DateUtilities.parseISO("2005-05-19T20:32:56Z")); newFeature.setAttribute("geometry", factory.createPoint(new Coordinate(43.454, 28.232))); writer.write(); newFeature = writer.next(); newFeature.setAttribute("pop", Long.valueOf(100)); newFeature.setAttribute("pid", UUID.randomUUID().toString()); newFeature.setAttribute("when", DateUtilities.parseISO("2005-05-18T20:32:56Z")); newFeature.setAttribute("geometry", factory.createPoint(new Coordinate(43.454, 27.232))); writer.write(); newFeature = writer.next(); newFeature.setAttribute("pop", Long.valueOf(100)); newFeature.setAttribute("pid", UUID.randomUUID().toString()); newFeature.setAttribute("when", DateUtilities.parseISO("2005-05-17T20:32:56Z")); newFeature.setAttribute("geometry", factory.createPoint(new Coordinate(43.454, 28.232))); writer.write(); writer.close(); transaction1.commit(); transaction1.close(); } @Test public void testGeo() throws CQLException, IOException, ParseException { populate(); Transaction transaction2 = new DefaultTransaction(); Query query = new Query( "geostuff", CQL.toFilter( "BBOX(geometry,44,27,42,30) and when during 2005-05-01T20:32:56Z/2005-05-29T21:32:56Z"), new String[] {"geometry", "when", "pid"}); FeatureReader reader = dataStore.getFeatureReader(query, transaction2); int c = 0; while (reader.hasNext()) { reader.next(); c++; } reader.close(); transaction2.commit(); transaction2.close(); assertEquals(3, c); transaction2 = new DefaultTransaction(); query = new Query( "geostuff", CQL.toFilter( "BBOX(geometry,42,28,44,30) and when during 2005-05-01T20:32:56Z/2005-05-29T21:32:56Z"), new String[] {"geometry", "when", "pid"}); reader = dataStore.getFeatureReader(query, transaction2); c = 0; while (reader.hasNext()) { reader.next(); c++; } reader.close(); transaction2.commit(); transaction2.close(); assertEquals(2, c); } } ================================================ FILE: extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/plugin/WFSBoundedSpatialQueryTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import java.io.IOException; import java.text.ParseException; import java.util.UUID; import org.geotools.data.DataStore; import org.geotools.data.DataUtilities; import org.geotools.data.DefaultTransaction; import org.geotools.data.FeatureReader; import org.geotools.data.FeatureWriter; import org.geotools.data.Query; import org.geotools.data.Transaction; import org.geotools.feature.SchemaException; import org.geotools.filter.text.cql2.CQL; import org.geotools.filter.text.cql2.CQLException; import org.junit.Before; import org.junit.Test; import org.locationtech.geowave.adapter.vector.BaseDataStoreTest; import org.locationtech.geowave.adapter.vector.util.DateUtilities; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.PrecisionModel; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; /** Test with a default spatial index rather than geo-temporal */ public class WFSBoundedSpatialQueryTest extends BaseDataStoreTest { DataStore dataStore; SimpleFeatureType schema; SimpleFeatureType type; final GeometryFactory factory = new GeometryFactory(new PrecisionModel(PrecisionModel.FIXED)); @Before public void setup() throws SchemaException, CQLException, IOException, GeoWavePluginException { dataStore = createDataStore(); type = DataUtilities.createType( "geostuff", "geometry:Geometry:srid=4326,pop:java.lang.Long,pid:String,when:Date"); type.getDescriptor("when").getUserData().put("time", false); dataStore.createSchema(type); } public void populate() throws IOException, CQLException, ParseException { final Transaction transaction1 = new DefaultTransaction(); final FeatureWriter writer = dataStore.getFeatureWriter(type.getTypeName(), transaction1); assertFalse(writer.hasNext()); SimpleFeature newFeature = writer.next(); newFeature.setAttribute("pop", Long.valueOf(100)); newFeature.setAttribute("pid", UUID.randomUUID().toString()); newFeature.setAttribute("when", DateUtilities.parseISO("2005-05-19T20:32:56Z")); newFeature.setAttribute("geometry", factory.createPoint(new Coordinate(43.454, 28.232))); writer.write(); newFeature = writer.next(); newFeature.setAttribute("pop", Long.valueOf(100)); newFeature.setAttribute("pid", UUID.randomUUID().toString()); newFeature.setAttribute("when", DateUtilities.parseISO("2005-05-18T20:32:56Z")); newFeature.setAttribute("geometry", factory.createPoint(new Coordinate(43.454, 27.232))); writer.write(); newFeature = writer.next(); newFeature.setAttribute("pop", Long.valueOf(100)); newFeature.setAttribute("pid", UUID.randomUUID().toString()); newFeature.setAttribute("when", DateUtilities.parseISO("2005-05-17T20:32:56Z")); newFeature.setAttribute("geometry", factory.createPoint(new Coordinate(43.454, 28.232))); writer.write(); writer.close(); transaction1.commit(); transaction1.close(); } @Test public void testGeo() throws CQLException, IOException, ParseException { populate(); Transaction transaction2 = new DefaultTransaction(); Query query = new Query( "geostuff", CQL.toFilter( "BBOX(geometry,44,27,42,30) and when during 2005-05-01T20:32:56Z/2005-05-29T21:32:56Z"), new String[] {"geometry", "when", "pid"}); FeatureReader reader = dataStore.getFeatureReader(query, transaction2); int c = 0; while (reader.hasNext()) { reader.next(); c++; } reader.close(); transaction2.commit(); transaction2.close(); assertEquals(3, c); transaction2 = new DefaultTransaction(); query = new Query( "geostuff", CQL.toFilter( "BBOX(geometry,42,28,44,30) and when during 2005-05-01T20:32:56Z/2005-05-29T21:32:56Z"), new String[] {"geometry", "when", "pid"}); reader = dataStore.getFeatureReader(query, transaction2); c = 0; while (reader.hasNext()) { reader.next(); c++; } reader.close(); transaction2.commit(); transaction2.close(); assertEquals(2, c); } } ================================================ FILE: extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/plugin/WFSSpatialTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.text.ParseException; import java.util.UUID; import org.geotools.data.DataStore; import org.geotools.data.DataUtilities; import org.geotools.data.DefaultTransaction; import org.geotools.data.FeatureReader; import org.geotools.data.FeatureWriter; import org.geotools.data.Query; import org.geotools.data.Transaction; import org.geotools.feature.SchemaException; import org.geotools.filter.text.cql2.CQL; import org.geotools.filter.text.cql2.CQLException; import org.junit.Before; import org.junit.Test; import org.locationtech.geowave.adapter.vector.BaseDataStoreTest; import org.locationtech.geowave.adapter.vector.util.DateUtilities; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.PrecisionModel; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; public class WFSSpatialTest extends BaseDataStoreTest { DataStore dataStore; SimpleFeatureType schema; SimpleFeatureType type; final GeometryFactory factory = new GeometryFactory(new PrecisionModel(PrecisionModel.FIXED)); Query query = null; @Before public void setup() throws SchemaException, CQLException, IOException, GeoWavePluginException { dataStore = createDataStore(); type = DataUtilities.createType( "geostuff", "geometry:Geometry:srid=4326,pop:java.lang.Long,when:Date,pid:String"); dataStore.createSchema(type); query = new Query( "geostuff", CQL.toFilter( "BBOX(geometry,27.20,41.30,27.30,41.20) and when during 2005-05-19T20:32:56Z/2005-05-19T21:32:56Z"), new String[] {"geometry", "pid"}); } @Test public void test() throws IOException, CQLException, ParseException { final Transaction transaction1 = new DefaultTransaction(); final FeatureWriter writer = dataStore.getFeatureWriter(type.getTypeName(), transaction1); assertFalse(writer.hasNext()); SimpleFeature newFeature = writer.next(); newFeature.setAttribute("pop", Long.valueOf(100)); newFeature.setAttribute("pid", UUID.randomUUID().toString()); newFeature.setAttribute("when", DateUtilities.parseISO("2005-05-19T18:33:55Z")); newFeature.setAttribute("geometry", factory.createPoint(new Coordinate(27.25, 41.25))); newFeature = writer.next(); newFeature.setAttribute("pop", Long.valueOf(100)); newFeature.setAttribute("pid", UUID.randomUUID().toString()); newFeature.setAttribute("when", DateUtilities.parseISO("2005-05-19T20:33:55Z")); newFeature.setAttribute("geometry", factory.createPoint(new Coordinate(27.25, 41.25))); writer.write(); writer.close(); final FeatureReader reader = dataStore.getFeatureReader(query, transaction1); assertTrue(reader.hasNext()); final SimpleFeature priorFeature = reader.next(); assertEquals(newFeature.getAttribute("pid"), priorFeature.getAttribute("pid")); assertFalse(reader.hasNext()); reader.close(); transaction1.commit(); transaction1.close(); } } ================================================ FILE: extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/plugin/WFSTemporalQueryTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import java.io.IOException; import java.text.ParseException; import java.util.UUID; import org.geotools.data.DataStore; import org.geotools.data.DataUtilities; import org.geotools.data.DefaultTransaction; import org.geotools.data.FeatureReader; import org.geotools.data.FeatureWriter; import org.geotools.data.Query; import org.geotools.data.Transaction; import org.geotools.feature.SchemaException; import org.geotools.filter.text.cql2.CQL; import org.geotools.filter.text.cql2.CQLException; import org.junit.Before; import org.junit.Test; import org.locationtech.geowave.adapter.vector.BaseDataStoreTest; import org.locationtech.geowave.adapter.vector.util.DateUtilities; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.PrecisionModel; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; public class WFSTemporalQueryTest extends BaseDataStoreTest { DataStore dataStore; SimpleFeatureType schema; SimpleFeatureType type; final GeometryFactory factory = new GeometryFactory(new PrecisionModel(PrecisionModel.FIXED)); @Before public void setup() throws SchemaException, CQLException, IOException, GeoWavePluginException { dataStore = createDataStore(); ((GeoWaveGTDataStore) dataStore).getDataStore().addIndex( SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions())); type = DataUtilities.createType( "geostuff", "geometry:Geometry:srid=4326,pop:java.lang.Long,pid:String,start:Date,end:Date"); dataStore.createSchema(type); } public void populate() throws IOException, CQLException, ParseException { final Transaction transaction1 = new DefaultTransaction(); final FeatureWriter writer = dataStore.getFeatureWriter(type.getTypeName(), transaction1); assertFalse(writer.hasNext()); SimpleFeature newFeature = writer.next(); newFeature.setAttribute("pop", Long.valueOf(100)); newFeature.setAttribute("pid", UUID.randomUUID().toString()); newFeature.setAttribute("start", DateUtilities.parseISO("2005-05-17T20:32:56Z")); newFeature.setAttribute("end", DateUtilities.parseISO("2005-05-19T20:32:56Z")); newFeature.setAttribute("geometry", factory.createPoint(new Coordinate(43.454, 28.232))); writer.write(); newFeature = writer.next(); newFeature.setAttribute("pop", Long.valueOf(100)); newFeature.setAttribute("pid", UUID.randomUUID().toString()); newFeature.setAttribute("start", DateUtilities.parseISO("2005-05-18T20:32:56Z")); newFeature.setAttribute("end", DateUtilities.parseISO("2005-05-20T20:32:56Z")); newFeature.setAttribute("geometry", factory.createPoint(new Coordinate(43.454, 27.232))); writer.write(); newFeature = writer.next(); newFeature.setAttribute("pop", Long.valueOf(100)); newFeature.setAttribute("pid", UUID.randomUUID().toString()); newFeature.setAttribute("start", DateUtilities.parseISO("2005-05-21T20:32:56Z")); newFeature.setAttribute("end", DateUtilities.parseISO("2005-05-22T20:32:56Z")); newFeature.setAttribute("geometry", factory.createPoint(new Coordinate(43.454, 28.232))); writer.write(); writer.close(); transaction1.commit(); transaction1.close(); } @Test public void testTemporal() throws CQLException, IOException, ParseException { populate(); final Transaction transaction2 = new DefaultTransaction(); final Query query = new Query( "geostuff", CQL.toFilter( "BBOX(geometry,44,27,42,30) and start during 2005-05-16T20:32:56Z/2005-05-20T21:32:56Z and end during 2005-05-18T20:32:56Z/2005-05-22T21:32:56Z"), new String[] {"geometry", "start", "end", "pid"}); final FeatureReader reader = dataStore.getFeatureReader(query, transaction2); int c = 0; while (reader.hasNext()) { reader.next(); c++; } reader.close(); transaction2.commit(); transaction2.close(); assertEquals(2, c); } } ================================================ FILE: extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/plugin/WFSTransactionTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.UUID; import org.geotools.data.DataStore; import org.geotools.data.DataUtilities; import org.geotools.data.DefaultTransaction; import org.geotools.data.FeatureReader; import org.geotools.data.FeatureWriter; import org.geotools.data.Query; import org.geotools.data.Transaction; import org.geotools.feature.SchemaException; import org.geotools.filter.text.cql2.CQL; import org.geotools.filter.text.cql2.CQLException; import org.junit.Before; import org.junit.Test; import org.locationtech.geowave.adapter.vector.BaseDataStoreTest; import org.locationtech.geowave.adapter.vector.plugin.transaction.StatisticsCache; import org.locationtech.geowave.core.store.statistics.field.NumericRangeStatistic; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.PrecisionModel; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; public class WFSTransactionTest extends BaseDataStoreTest { DataStore dataStore; SimpleFeatureType schema; SimpleFeatureType type; final GeometryFactory factory = new GeometryFactory(new PrecisionModel(PrecisionModel.FIXED)); Query query = null; @Before public void setup() throws SchemaException, CQLException, IOException, GeoWavePluginException { dataStore = createDataStore(); type = DataUtilities.createType( "geostuff", "geometry:Geometry:srid=4326,pop:java.lang.Long,pid:String"); dataStore.createSchema(type); query = new Query( "geostuff", CQL.toFilter("BBOX(geometry,27.20,41.20,27.30,41.30)"), new String[] {"geometry", "pid"}); if (dataStore instanceof GeoWaveGTDataStore) { ((GeoWaveGTDataStore) dataStore).dataStore.addEmptyStatistic( new NumericRangeStatistic(type.getTypeName(), "pop")); } } @Test public void testInsertIsolation() throws IOException, CQLException { final Transaction transaction1 = new DefaultTransaction(); final FeatureWriter writer = dataStore.getFeatureWriter(type.getTypeName(), transaction1); assertFalse(writer.hasNext()); final SimpleFeature newFeature = writer.next(); newFeature.setAttribute("pop", Long.valueOf(100)); newFeature.setAttribute("pid", UUID.randomUUID().toString()); newFeature.setAttribute("geometry", factory.createPoint(new Coordinate(27.25, 41.25))); writer.write(); writer.close(); FeatureReader reader = dataStore.getFeatureReader(query, transaction1); assertTrue(reader.hasNext()); final SimpleFeature priorFeature = reader.next(); assertEquals(newFeature.getAttribute("pid"), priorFeature.getAttribute("pid")); reader.close(); // uncommitted at this point, so this next transaction should not see // it. final Transaction transaction2 = new DefaultTransaction(); reader = dataStore.getFeatureReader(query, transaction2); assertFalse(reader.hasNext()); reader.close(); transaction1.commit(); reader = dataStore.getFeatureReader(query, transaction1); assertTrue(reader.hasNext()); reader.next(); assertFalse(reader.hasNext()); reader.close(); transaction1.close(); // since this implementation does not support serializable, transaction2 // can see the changes even though // it started after transaction1 and before the commit. reader = dataStore.getFeatureReader(query, transaction2); assertTrue(reader.hasNext()); reader.next(); assertFalse(reader.hasNext()); reader.close(); transaction2.commit(); transaction2.close(); // stats check final Transaction transaction3 = new DefaultTransaction(); reader = ((GeoWaveFeatureSource) ((GeoWaveGTDataStore) dataStore).getFeatureSource( "geostuff", transaction3)).getReaderInternal(query); final StatisticsCache transStats = ((GeoWaveFeatureReader) reader).getTransaction().getDataStatistics(); assertNotNull(transStats.getFieldStatistic(NumericRangeStatistic.STATS_TYPE, "pop")); transaction3.close(); } // ============== // DELETION TEST @Test public void testDelete() throws IOException { Transaction transaction1 = new DefaultTransaction(); FeatureWriter writer = dataStore.getFeatureWriter(type.getTypeName(), transaction1); assertFalse(writer.hasNext()); SimpleFeature newFeature = writer.next(); newFeature.setAttribute("pop", Long.valueOf(100)); newFeature.setAttribute("pid", UUID.randomUUID().toString()); newFeature.setAttribute("geometry", factory.createPoint(new Coordinate(27.25, 41.25))); writer.write(); writer.close(); transaction1.commit(); transaction1.close(); Transaction transaction2 = new DefaultTransaction(); FeatureReader reader = dataStore.getFeatureReader(query, transaction2); assertTrue(reader.hasNext()); SimpleFeature priorFeature = reader.next(); reader.close(); transaction2.commit(); transaction2.close(); // Add one more in this transaction and remove the // prior feature. final String idToRemove = priorFeature.getID(); transaction1 = new DefaultTransaction(); writer = dataStore.getFeatureWriter(type.getTypeName(), transaction1); while (writer.hasNext()) { writer.next(); } newFeature = writer.next(); newFeature.setAttribute("pop", new Long(200)); newFeature.setAttribute("pid", UUID.randomUUID().toString()); newFeature.setAttribute("geometry", factory.createPoint(new Coordinate(27.25, 41.25))); writer.write(); writer.close(); // Find the the prior one to remove writer = dataStore.getFeatureWriter(type.getTypeName(), transaction1); assertTrue(writer.hasNext()); do { priorFeature = writer.next(); } while (!priorFeature.getID().equals(idToRemove) && writer.hasNext()); // make sure it is found assertTrue(priorFeature.getID().equals(idToRemove)); writer.remove(); writer.close(); // make sure a new transaction can see (not committed) transaction2 = new DefaultTransaction(); reader = dataStore.getFeatureReader(query, transaction2); assertTrue(reader.hasNext()); priorFeature = reader.next(); assertFalse(reader.hasNext()); assertTrue(priorFeature.getID().equals(idToRemove)); reader.close(); transaction2.commit(); transaction2.close(); // make sure existing transaction cannot see (not committed) reader = dataStore.getFeatureReader(query, transaction1); assertTrue(reader.hasNext()); priorFeature = reader.next(); assertFalse(reader.hasNext()); assertTrue(!priorFeature.getID().equals(idToRemove)); reader.close(); transaction1.commit(); transaction1.close(); // make sure a new transaction can not see (committed) transaction2 = new DefaultTransaction(); reader = dataStore.getFeatureReader(query, transaction2); assertTrue(reader.hasNext()); priorFeature = reader.next(); assertFalse(reader.hasNext()); assertTrue(!priorFeature.getID().equals(idToRemove)); reader.close(); transaction2.commit(); transaction2.close(); } @Test public void testUpdate() throws IOException { Transaction transaction1 = new DefaultTransaction(); FeatureWriter writer = dataStore.getFeatureWriter(type.getTypeName(), transaction1); assertFalse(writer.hasNext()); final SimpleFeature newFeature = writer.next(); newFeature.setAttribute("pop", Long.valueOf(100)); newFeature.setAttribute("pid", UUID.randomUUID().toString()); newFeature.setAttribute("geometry", factory.createPoint(new Coordinate(27.25, 41.25))); writer.write(); writer.close(); transaction1.commit(); transaction1.close(); // change the pid transaction1 = new DefaultTransaction(); writer = dataStore.getFeatureWriter(type.getTypeName(), transaction1); assertTrue(writer.hasNext()); SimpleFeature priorFeature = writer.next(); final String pid = UUID.randomUUID().toString(); priorFeature.setAttribute("pid", pid); writer.write(); writer.close(); // check update FeatureReader reader = dataStore.getFeatureReader(query, transaction1); assertTrue(reader.hasNext()); priorFeature = reader.next(); assertEquals(pid, priorFeature.getAttribute("pid")); reader.close(); // check isolation Transaction transaction2 = new DefaultTransaction(); reader = dataStore.getFeatureReader(query, transaction2); assertTrue(reader.hasNext()); priorFeature = reader.next(); assertFalse(reader.hasNext()); assertTrue(!priorFeature.getAttribute("pid").equals(pid)); reader.close(); transaction2.commit(); transaction2.close(); // commit change transaction1.commit(); transaction1.close(); // verify change transaction2 = new DefaultTransaction(); reader = dataStore.getFeatureReader(query, transaction2); assertTrue(reader.hasNext()); priorFeature = reader.next(); assertFalse(reader.hasNext()); assertTrue(priorFeature.getAttribute("pid").equals(pid)); reader.close(); transaction2.commit(); transaction2.close(); } } ================================================ FILE: extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/plugin/lock/MemoryLockManagerTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.plugin.lock; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.UUID; import org.geotools.data.DefaultTransaction; import org.geotools.data.FeatureLock; import org.geotools.data.Transaction; import org.junit.Test; public class MemoryLockManagerTest { @Test public void testRelockLock() throws InterruptedException, IOException { final LockingManagement memoryLockManager = new MemoryLockManager("default"); final DefaultTransaction t1 = new DefaultTransaction(); memoryLockManager.lock(t1, "f8"); memoryLockManager.lock(t1, "f8"); t1.commit(); t1.close(); } @Test public void testLockWithProperAuth() throws InterruptedException, IOException { final LockingManagement memoryLockManager = new MemoryLockManager("default"); final Transaction t1 = Transaction.AUTO_COMMIT; final DefaultTransaction t2 = new DefaultTransaction(); t2.addAuthorization("auth5"); final FeatureLock lock = new FeatureLock("auth5", 1 /* minute */); memoryLockManager.lockFeatureID("sometime", "f5", t1, lock); final Thread commiter = new Thread(new Runnable() { @Override public void run() { try { Thread.sleep(4000); memoryLockManager.release("auth5", t1); } catch (final InterruptedException e) { e.printStackTrace(); throw new RuntimeException(e); } catch (final IOException e) { e.printStackTrace(); throw new RuntimeException(e); } } }); final long currentTime = System.currentTimeMillis(); commiter.start(); memoryLockManager.lock(t2, "f5"); assertTrue((System.currentTimeMillis() - currentTime) < 4000); commiter.join(); } @Test public void testLockReleaseOfBulkAuthLock() throws InterruptedException, IOException { final LockingManagement memoryLockManager = new MemoryLockManager("default"); final Transaction t1 = Transaction.AUTO_COMMIT; final DefaultTransaction t2 = new DefaultTransaction(); t2.addAuthorization("auth1"); final FeatureLock lock = new FeatureLock("auth1", 1 /* minute */); memoryLockManager.lockFeatureID("sometime", "f4", t1, lock); memoryLockManager.lock(t2, "f4"); t2.commit(); // commit should not take away the lock assertTrue(memoryLockManager.exists("auth1")); memoryLockManager.release("auth1", t1); assertFalse(memoryLockManager.exists("auth1")); t1.close(); } @Test public void testReset() throws InterruptedException, IOException { final LockingManagement memoryLockManager = new MemoryLockManager("default"); final Transaction t1 = Transaction.AUTO_COMMIT; final FeatureLock lock = new FeatureLock("auth2", 1 /* minute */); memoryLockManager.lockFeatureID("sometime", "f2", t1, lock); memoryLockManager.refresh("auth2", t1); assertTrue(memoryLockManager.exists("auth2")); memoryLockManager.release("auth2", t1); assertFalse(memoryLockManager.exists("auth2")); } @Test public void testBlockinLock() throws InterruptedException, IOException { final LockingManagement memoryLockManager = new MemoryLockManager(UUID.randomUUID().toString()); final DefaultTransaction t1 = new DefaultTransaction(); memoryLockManager.lock(t1, "f3"); final DefaultTransaction t2 = new DefaultTransaction(); final Thread commiter = new Thread(new Runnable() { @Override public void run() { try { Thread.sleep(4000); // System.out.println("commit"); t1.commit(); } catch (final InterruptedException e) { e.printStackTrace(); throw new RuntimeException(e); } catch (final IOException e) { e.printStackTrace(); throw new RuntimeException(e); } } }); final long currentTime = System.currentTimeMillis(); commiter.start(); // will block\ // System.out.println("t2"); memoryLockManager.lock(t2, "f3"); final long endTime = System.currentTimeMillis(); // System.out.println(endTime + " > " + currentTime); assertTrue((endTime - currentTime) >= 3800); commiter.join(); t2.commit(); t2.close(); t1.close(); } } ================================================ FILE: extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/query/CqlQueryFilterIteratorTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.query; public class CqlQueryFilterIteratorTest { // TODO figure out if we need this test, I don't think it was really testing // what it seems to intend to test though because MockAccumulo is not going // to use the VFSClassloader, we can test URLstreamhandlerfactory without a // dependency on cql or a dependency on accumulo // private DataStore createDataStore() // throws IOException { // final Map params = new HashMap(); // params.put( // "gwNamespace", // "test_" + getClass().getName()); // return new GeoWaveGTDataStoreFactory( // new MemoryStoreFactoryFamily()).createNewDataStore(params); // } // // @Test // public void test() // throws SchemaException, // IOException, // ParseException { // final DataStore dataStore = createDataStore(); // // final SimpleFeatureType type = DataUtilities.createType( // "CqlQueryFilterIteratorTest", // "geometry:Geometry:srid=4326,pop:java.lang.Long,pid:String"); // // dataStore.createSchema(type); // // final Transaction transaction1 = new DefaultTransaction(); // // final FeatureWriter writer = // dataStore.getFeatureWriter( // "CqlQueryFilterIteratorTest", // transaction1); // final SimpleFeature newFeature = writer.next(); // newFeature.setAttribute( // "pop", // Long.valueOf(100)); // newFeature.setAttribute( // "pid", // "a89dhd-123-dxc"); // newFeature.setAttribute( // "geometry", // new WKTReader().read("LINESTRING (30 10, 10 30, 40 40)")); // writer.write(); // writer.close(); // // transaction1.commit(); // // final FilterFactoryImpl factory = new FilterFactoryImpl(); // final Expression exp1 = factory.property("pid"); // final Expression exp2 = factory.literal("a89dhd-123-dxc"); // final Filter f = factory.equal( // exp1, // exp2, // false); // // final MockInstance mockDataInstance = new MockInstance( // "CqlQueryFilterIteratorTest"); // final Connector mockDataConnector = mockDataInstance.getConnector( // "root", // new PasswordToken( // new byte[0])); // final BasicAccumuloOperations dataOps = new BasicAccumuloOperations( // mockDataConnector); // // final AccumuloIndexStore indexStore = new AccumuloIndexStore( // dataOps); // // final String tableName = IndexType.SPATIAL_VECTOR.getDefaultId(); // final ScannerBase scanner = dataOps.createScanner(tableName); // // final AccumuloAdapterStore adapterStore = new AccumuloAdapterStore( // dataOps); // // initScanner( // scanner, // indexStore.getIndex(new ByteArrayId( // IndexType.SPATIAL_VECTOR.getDefaultId())), // (DataAdapter) adapterStore.getAdapter(new ByteArrayId( // "CqlQueryFilterIteratorTest")), // f); // // final Iterator> it = scanner.iterator(); // assertTrue(it.hasNext()); // int count = 0; // while (it.hasNext()) { // it.next(); // count++; // } // // line string covers more than one tile // assertTrue(count >= 1); // // } // // @Test // public void testStreamHandlerFactoryConflictResolution() { // unsetURLStreamHandlerFactory(); // URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory()); // try { // Class.forName(CqlQueryFilterIterator.class.getName()); // } // catch (final Exception e) { // Assert.fail("Iterator did not handle an alread loaded URLStreamHandler, exception was: " // + e.getLocalizedMessage()); // } // catch (final Error e) { // Assert.fail("Iterator did not handle an alread loaded URLStreamHandler, error was: " // + e.getLocalizedMessage()); // } // Assert.assertEquals( // unsetURLStreamHandlerFactory(), // FsUrlStreamHandlerFactory.class.getName()); // URL.setURLStreamHandlerFactory(new UnitTestCustomStreamHandlerFactory()); // try { // final Method m = CqlQueryFilterIterator.class.getDeclaredMethod( // "initialize", // null); // m.setAccessible(true); // m.invoke(null); // } // catch (final NoSuchMethodException e) { // Assert.fail("Error changing scope of CqlQueryFilterIterator init() method"); // } // catch (final InvocationTargetException e) { // if (e.getTargetException().getMessage().equals( // "factory already defined")) { // Assert.assertEquals( // unsetURLStreamHandlerFactory(), // UnitTestCustomStreamHandlerFactory.class.getName()); // URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory()); // return; // } // Assert.fail("Error invoking scope of CqlQueryFilterIterator init() method"); // } // catch (final IllegalAccessException e) { // Assert.fail("Error accessing scope of CqlQueryFilterIterator init() method"); // } // Assert.fail("Loading conflicting duplicate StreamHandler factories did not throw an error"); // } // // private static String unsetURLStreamHandlerFactory() { // try { // final Field f = URL.class.getDeclaredField("factory"); // f.setAccessible(true); // final Object curFac = f.get(null); // f.set( // null, // null); // URL.setURLStreamHandlerFactory(null); // return curFac.getClass().getName(); // } // catch (final Exception e) { // return null; // } // } // // public class UnitTestCustomStreamHandlerFactory implements // java.net.URLStreamHandlerFactory // { // public UnitTestCustomStreamHandlerFactory() {} // // @Override // public URLStreamHandler createURLStreamHandler( // final String protocol ) { // if (protocol.equals("http")) { // return new sun.net.www.protocol.http.Handler(); // } // else if (protocol.equals("https")) { // return new sun.net.www.protocol.https.Handler(); // } // return null; // } // } // // private void initScanner( // final ScannerBase scanner, // final Index index, // final DataAdapter dataAdapter, // final Filter cqlFilter ) { // final IteratorSetting iteratorSettings = new IteratorSetting( // CqlQueryFilterIterator.CQL_QUERY_ITERATOR_PRIORITY, // CqlQueryFilterIterator.CQL_QUERY_ITERATOR_NAME, // CqlQueryFilterIterator.class); // iteratorSettings.addOption( // CqlQueryFilterIterator.CQL_FILTER, // FilterToCQLTool.toCQL(cqlFilter)); // iteratorSettings.addOption( // CqlQueryFilterIterator.DATA_ADAPTER, // ByteArrayUtils.byteArrayToString(PersistenceUtils.toBinary(dataAdapter))); // iteratorSettings.addOption( // CqlQueryFilterIterator.MODEL, // ByteArrayUtils.byteArrayToString(PersistenceUtils.toBinary(index.getIndexModel()))); // // scanner.addScanIterator(iteratorSettings); // } } ================================================ FILE: extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/query/TemporalRangeTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.query; import static org.junit.Assert.assertEquals; import java.io.IOException; import java.text.ParseException; import java.util.Calendar; import java.util.TimeZone; import java.util.UUID; import org.geotools.data.DataStore; import org.geotools.data.DataUtilities; import org.geotools.data.DefaultTransaction; import org.geotools.data.FeatureWriter; import org.geotools.data.Transaction; import org.geotools.feature.SchemaException; import org.geotools.filter.text.cql2.CQLException; import org.junit.Before; import org.junit.Test; import org.locationtech.geowave.adapter.vector.BaseDataStoreTest; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.adapter.vector.plugin.GeoWavePluginException; import org.locationtech.geowave.adapter.vector.util.DateUtilities; import org.locationtech.geowave.core.geotime.store.query.TemporalRange; import org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic; import org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic.TimeRangeValue; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.PrecisionModel; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; public class TemporalRangeTest extends BaseDataStoreTest { DataStore dataStore; SimpleFeatureType type; GeometryFactory factory = new GeometryFactory(new PrecisionModel(PrecisionModel.FIXED)); @Before public void setup() throws SchemaException, CQLException, IOException, GeoWavePluginException { dataStore = createDataStore(); type = DataUtilities.createType( "geostuff", "geometry:Geometry:srid=4326,pop:java.lang.Long,pid:String,when:Date"); dataStore.createSchema(type); } @Test public void test() throws ParseException, IOException { final Calendar gmt = Calendar.getInstance(TimeZone.getTimeZone("GMT")); final Calendar local = Calendar.getInstance(TimeZone.getTimeZone("EDT")); local.setTimeInMillis(gmt.getTimeInMillis()); final TemporalRange rGmt = new TemporalRange(gmt.getTime(), gmt.getTime()); final TemporalRange rLocal = new TemporalRange(local.getTime(), local.getTime()); rGmt.fromBinary(rGmt.toBinary()); assertEquals(gmt.getTime(), rGmt.getEndTime()); assertEquals(rLocal.getEndTime(), rGmt.getEndTime()); assertEquals(rLocal.getEndTime().getTime(), rGmt.getEndTime().getTime()); final Transaction transaction1 = new DefaultTransaction(); final FeatureWriter writer = dataStore.getFeatureWriter(type.getTypeName(), transaction1); final SimpleFeature newFeature = writer.next(); newFeature.setAttribute("pop", Long.valueOf(77)); newFeature.setAttribute("pid", UUID.randomUUID().toString()); newFeature.setAttribute("when", DateUtilities.parseISO("2005-05-19T19:32:56-04:00")); newFeature.setAttribute("geometry", factory.createPoint(new Coordinate(43.454, 28.232))); FeatureDataAdapter adapter = new FeatureDataAdapter(type); final TimeRangeStatistic stats = new TimeRangeStatistic(type.getTypeName(), "when"); final TimeRangeValue statValue = stats.createEmpty(); statValue.entryIngested(adapter, newFeature); assertEquals( DateUtilities.parseISO("2005-05-19T23:32:56Z"), statValue.asTemporalRange().getStartTime()); writer.close(); transaction1.close(); } } ================================================ FILE: extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/query/cql/CQLQueryFilterTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.query.cql; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.List; import java.util.UUID; import org.geotools.data.DataUtilities; import org.geotools.feature.SchemaException; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.filter.FilterFactoryImpl; import org.geotools.filter.text.cql2.CQLException; import org.junit.Before; import org.junit.Test; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.core.geotime.store.InternalGeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.geotime.store.query.ExplicitCQLQuery; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.SinglePartitionInsertionIds; import org.locationtech.geowave.core.store.AdapterToIndexMapping; import org.locationtech.geowave.core.store.adapter.AdapterPersistenceEncoding; import org.locationtech.geowave.core.store.adapter.IndexedAdapterPersistenceEncoding; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.base.BaseDataStoreUtils; import org.locationtech.geowave.core.store.query.filter.FilterList; import org.locationtech.geowave.core.store.query.filter.QueryFilter; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.GeometryFactory; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.AttributeDescriptor; import org.opengis.filter.Filter; import org.opengis.filter.expression.Expression; public class CQLQueryFilterTest { SimpleFeatureType type; Object[] defaults; GeometryFactory factory = new GeometryFactory(); @Before public void setup() throws SchemaException, CQLException { type = DataUtilities.createType( "geostuff", "geom:Geometry:srid=4326,pop:java.lang.Long,pid:String"); final List descriptors = type.getAttributeDescriptors(); defaults = new Object[descriptors.size()]; int p = 0; for (final AttributeDescriptor descriptor : descriptors) { defaults[p++] = descriptor.getDefaultValue(); } } @Test public void test() { final FilterFactoryImpl factory = new FilterFactoryImpl(); final Expression exp1 = factory.property("pid"); final Expression exp2 = factory.literal("a89dhd-123-abc"); final Filter f = factory.equal(exp1, exp2, false); final Index spatialIndex = SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()); final FeatureDataAdapter adapter = new FeatureDataAdapter(type); final AdapterToIndexMapping indexMapping = BaseDataStoreUtils.mapAdapterToIndex(adapter.asInternalAdapter((short) -1), spatialIndex); final InternalGeotoolsFeatureDataAdapter internalAdapter = (InternalGeotoolsFeatureDataAdapter) adapter.asInternalAdapter((short) -1); final ExplicitCQLQuery cqlQuery = new ExplicitCQLQuery(null, f, internalAdapter, indexMapping); final List filters = cqlQuery.createFilters(spatialIndex); final List dFilters = new ArrayList<>(); for (final QueryFilter filter : filters) { dFilters.add(filter); } final FilterList dFilterList = new FilterList(dFilters); assertTrue( dFilterList.accept( spatialIndex.getIndexModel(), getEncodings( spatialIndex, internalAdapter.encode(createFeature(), indexMapping, spatialIndex)).get(0))); } private static List getEncodings( final Index index, final AdapterPersistenceEncoding encoding) { final InsertionIds ids = encoding.getInsertionIds(index); final ArrayList encodings = new ArrayList<>(); for (final SinglePartitionInsertionIds partitionIds : ids.getPartitionKeys()) { for (final byte[] sortKey : partitionIds.getSortKeys()) { encodings.add( new IndexedAdapterPersistenceEncoding( encoding.getInternalAdapterId(), encoding.getDataId(), partitionIds.getPartitionKey(), sortKey, ids.getSize(), encoding.getCommonData(), encoding.getUnknownData(), encoding.getAdapterExtendedData())); } } return encodings; } private SimpleFeature createFeature() { final SimpleFeature instance = SimpleFeatureBuilder.build(type, defaults, UUID.randomUUID().toString()); instance.setAttribute("pop", Long.valueOf(100)); instance.setAttribute("pid", "a89dhd-123-abc"); instance.setAttribute("geom", factory.createPoint(new Coordinate(27.25, 41.25))); return instance; } } ================================================ FILE: extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/query/cql/CQLQueryTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.query.cql; import static org.junit.Assert.assertTrue; import java.util.Arrays; import java.util.List; import org.geotools.data.DataUtilities; import org.geotools.feature.SchemaException; import org.geotools.filter.text.cql2.CQLException; import org.junit.Before; import org.junit.Test; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialTemporalOptions; import org.locationtech.geowave.core.geotime.store.InternalGeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.geotime.store.query.ExplicitCQLQuery; import org.locationtech.geowave.core.geotime.store.query.OptimalCQLQuery; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.store.api.Index; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; public class CQLQueryTest { private static final Index SPATIAL_INDEX = SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()); private static final Index SPATIAL_TEMPORAL_INDEX = SpatialTemporalDimensionalityTypeProvider.createIndexFromOptions( new SpatialTemporalOptions()); SimpleFeatureType type; InternalGeotoolsFeatureDataAdapter adapter; @Before public void init() throws SchemaException { type = DataUtilities.createType( "geostuff", "geometry:Geometry:srid=4326,pop:java.lang.Long,when:Date,pid:String"); final FeatureDataAdapter a = new FeatureDataAdapter(type); adapter = (InternalGeotoolsFeatureDataAdapter) a.asInternalAdapter((short) -1); } @Test public void testGeoAndTemporalWithMatchingIndex() throws CQLException { final ExplicitCQLQuery query = (ExplicitCQLQuery) OptimalCQLQuery.createOptimalQuery( "BBOX(geometry,27.20,41.30,27.30,41.20) and when during 2005-05-19T20:32:56Z/2005-05-19T21:32:56Z", adapter, null, null); final List constraints = query.getIndexConstraints(SPATIAL_TEMPORAL_INDEX); assertTrue( Arrays.equals( constraints.get(0).getMinValuesPerDimension(), new Double[] {27.2, 41.2, 1.116534776001E12})); assertTrue( Arrays.equals( constraints.get(0).getMaxValuesPerDimension(), new Double[] {27.3, 41.3, 1.116538375999E12})); } @Test public void testGeoAndTemporalWithNonMatchingIndex() throws CQLException { final ExplicitCQLQuery query = (ExplicitCQLQuery) OptimalCQLQuery.createOptimalQuery( "BBOX(geometry,27.20,41.30,27.30,41.20) and when during 2005-05-19T20:32:56Z/2005-05-19T21:32:56Z", adapter, null, null); final List constraints = query.getIndexConstraints(SPATIAL_INDEX); assertTrue( Arrays.equals(constraints.get(0).getMinValuesPerDimension(), new Double[] {27.2, 41.2})); assertTrue( Arrays.equals(constraints.get(0).getMaxValuesPerDimension(), new Double[] {27.3, 41.3})); } @Test public void testGeoWithMatchingIndex() throws CQLException { final ExplicitCQLQuery query = (ExplicitCQLQuery) OptimalCQLQuery.createOptimalQuery( "BBOX(geometry,27.20,41.30,27.30,41.20)", adapter, null, null); final List constraints = query.getIndexConstraints(SPATIAL_INDEX); assertTrue( Arrays.equals(constraints.get(0).getMinValuesPerDimension(), new Double[] {27.2, 41.2})); assertTrue( Arrays.equals(constraints.get(0).getMaxValuesPerDimension(), new Double[] {27.3, 41.3})); } @Test public void testNoConstraintsWithGeoIndex() throws CQLException { final ExplicitCQLQuery query = (ExplicitCQLQuery) OptimalCQLQuery.createOptimalQuery("pid = '10'", adapter, null, null); assertTrue(query.getIndexConstraints(SPATIAL_INDEX).isEmpty()); } @Test public void testNoConstraintsWithTemporalIndex() throws CQLException { final ExplicitCQLQuery query = (ExplicitCQLQuery) OptimalCQLQuery.createOptimalQuery("pid = '10'", adapter, null, null); assertTrue(query.getIndexConstraints(SPATIAL_TEMPORAL_INDEX).isEmpty()); } @Test public void testGeoWithTemporalIndex() throws CQLException { final ExplicitCQLQuery query = (ExplicitCQLQuery) OptimalCQLQuery.createOptimalQuery( "BBOX(geometry,27.20,41.30,27.30,41.20)", adapter, null, null); assertTrue(query.getIndexConstraints(SPATIAL_TEMPORAL_INDEX).isEmpty()); } @Test public void testGeoTemporalRangeWithMatchingIndex() throws CQLException, SchemaException { final SimpleFeatureType type = DataUtilities.createType( "geostuff", "geometry:Geometry:srid=4326,pop:java.lang.Long,start:Date,end:Date,pid:String"); final FeatureDataAdapter a = new FeatureDataAdapter(type); final InternalGeotoolsFeatureDataAdapter adapter = (InternalGeotoolsFeatureDataAdapter) a.asInternalAdapter((short) -1); final ExplicitCQLQuery query = (ExplicitCQLQuery) OptimalCQLQuery.createOptimalQuery( "BBOX(geometry,27.20,41.30,27.30,41.20) and start during 2005-05-19T20:32:56Z/2005-05-19T21:32:56Z", adapter, null, null); final List constraints = query.getIndexConstraints(SPATIAL_TEMPORAL_INDEX); assertTrue( Arrays.equals( constraints.get(0).getMinValuesPerDimension(), new Double[] {27.2, 41.2, 1.116534776001E12})); assertTrue( Arrays.equals( constraints.get(0).getMaxValuesPerDimension(), new Double[] {27.3, 41.3, 1.116538375999E12})); final ExplicitCQLQuery query2 = (ExplicitCQLQuery) OptimalCQLQuery.createOptimalQuery( "BBOX(geometry,27.20,41.30,27.30,41.20) and end during 2005-05-19T20:32:56Z/2005-05-19T21:32:56Z", adapter, null, null); final List constraints2 = query2.getIndexConstraints(SPATIAL_TEMPORAL_INDEX); assertTrue( Arrays.equals( constraints2.get(0).getMinValuesPerDimension(), new Double[] {27.2, 41.2, 1.116534776001E12})); assertTrue( Arrays.equals( constraints2.get(0).getMaxValuesPerDimension(), new Double[] {27.3, 41.3, 1.116538375999E12})); final ExplicitCQLQuery query3 = (ExplicitCQLQuery) OptimalCQLQuery.createOptimalQuery( "BBOX(geometry,27.20,41.30,27.30,41.20) and (start before 2005-05-19T21:32:56Z and end after 2005-05-19T20:32:56Z)", adapter, null, null); final List constraints3 = query3.getIndexConstraints(SPATIAL_TEMPORAL_INDEX); assertTrue( Arrays.equals( constraints3.get(0).getMinValuesPerDimension(), new Double[] {27.2, 41.2, 1.116534776001E12})); assertTrue( Arrays.equals( constraints3.get(0).getMaxValuesPerDimension(), new Double[] {27.3, 41.3, 1.116538375999E12})); final ExplicitCQLQuery query4 = (ExplicitCQLQuery) OptimalCQLQuery.createOptimalQuery( "BBOX(geometry,27.20,41.30,27.30,41.20) and (start after 2005-05-19T20:32:56Z and end after 2005-05-19T20:32:56Z)", adapter, null, null); final List constraints4 = query4.getIndexConstraints(SPATIAL_TEMPORAL_INDEX); assertTrue( Arrays.equals( constraints4.get(0).getMinValuesPerDimension(), new Double[] {27.2, 41.2, 1.116534776001E12})); assertTrue( Arrays.equals( constraints4.get(0).getMaxValuesPerDimension(), new Double[] {27.3, 41.3, 9.223372036854775999E18})); } } ================================================ FILE: extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/query/cql/FilterToCQLToolTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.query.cql; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import org.apache.commons.lang3.tuple.Pair; import org.geotools.data.DataUtilities; import org.geotools.feature.SchemaException; import org.geotools.filter.FilterFactoryImpl; import org.geotools.filter.identity.FeatureIdImpl; import org.geotools.filter.text.cql2.CQL; import org.geotools.filter.text.cql2.CQLException; import org.geotools.filter.text.ecql.ECQL; import org.junit.Before; import org.junit.Test; import org.locationtech.geowave.adapter.vector.util.FeatureDataUtils; import org.locationtech.geowave.core.geotime.util.FilterToCQLTool; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.GeometryFactory; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.filter.Filter; import org.opengis.filter.Id; import org.opengis.filter.expression.Expression; public class FilterToCQLToolTest { SimpleFeatureType type; @Before public void setup() throws SchemaException, CQLException { type = DataUtilities.createType( "geostuff", "geom:Geometry:srid=4326,pop:java.lang.Long,pid:String"); } @Test public void testDate() throws CQLException { assertNotNull(FilterToCQLTool.toFilter("when = 2005-05-19T21:32:56Z")); } @Test public void tesFid() { final FilterFactoryImpl factory = new FilterFactoryImpl(); final Id f = factory.id(new FeatureIdImpl("123-abc")); final String ss = ECQL.toCQL(f); System.out.println(ss); assertTrue(ss.contains("'123-abc'")); } @Test public void test() { final FilterFactoryImpl factory = new FilterFactoryImpl(); final Expression exp1 = factory.property("pid"); final Expression exp2 = factory.literal("a89dhd-123-abc"); final Filter f = factory.equal(exp1, exp2, false); final String ss = ECQL.toCQL(f); assertTrue(ss.contains("'a89dhd-123-abc'")); } @Test public void testDWithinFromCQLFilter() throws CQLException { final Filter filter = CQL.toFilter("DWITHIN(geom, POINT(-122.7668 0.4979), 233.7, meters)"); final String gtFilterStr = ECQL.toCQL(FilterToCQLTool.fixDWithin(filter)); System.out.println(gtFilterStr); assertTrue(gtFilterStr.contains("INTERSECTS(geom, POLYGON ((")); testFilter(FilterToCQLTool.toFilter(gtFilterStr)); } @Test public void testDWithinFromTool() throws CQLException { testFilter(FilterToCQLTool.toFilter("DWITHIN(geom, POINT(-122.7668 0.4979), 233.7, meters)")); } public void testFilter(final Filter gtFilter) { final SimpleFeature newFeature = FeatureDataUtils.buildFeature( type, new Pair[] { Pair.of( "geom", new GeometryFactory().createPoint(new Coordinate(-122.76570055844142, 0.4979))), Pair.of("pop", Long.valueOf(100))}); assertTrue(gtFilter.evaluate(newFeature)); final SimpleFeature newFeatureToFail = FeatureDataUtils.buildFeature( type, new Pair[] { Pair.of( "geom", new GeometryFactory().createPoint(new Coordinate(-122.7690, 0.4980))), Pair.of("pop", Long.valueOf(100))}); assertFalse(gtFilter.evaluate(newFeatureToFail)); } } ================================================ FILE: extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/stats/CountMinSketchStatisticsTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.stats; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.text.ParseException; import java.util.List; import java.util.Locale; import java.util.Random; import java.util.UUID; import org.geotools.data.DataUtilities; import org.geotools.feature.SchemaException; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.filter.text.cql2.CQLException; import org.junit.Before; import org.junit.Test; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.core.store.statistics.field.CountMinSketchStatistic; import org.locationtech.geowave.core.store.statistics.field.CountMinSketchStatistic.CountMinSketchValue; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.PrecisionModel; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.AttributeDescriptor; public class CountMinSketchStatisticsTest { private SimpleFeatureType schema; FeatureDataAdapter dataAdapter; private final String sample = "The construction of global warming [Source: CHE] Climate warming, whatever one concludes about its effect on the earth, is insufficiently understood as a concept that has been constructed by scientists, politicians and others, argues David Demerrit, a lecturer in geography at King's College London, in an exchange with Stephen H. Schneider, a professor of biological sciences at Stanford University. Many observers consider the phenomenon's construction -- as a global-scale environmental problem caused by the universal physical properties of greenhouse gases -- to be reductionist, Mr. Demerrit writes. Yet this reductionist formulation serves a variety of political purposes, including obscuring the role of rich nations in producing the vast majority of the greenhouse gases." + "Mr. Demerrit says his objective is to unmask the ways that scientific judgments " + "have both reinforced and been reinforced by certain political considerations about managing" + "global warming. Scientific uncertainty, he suggests, is emphasized in a way that reinforces dependence on experts. He is skeptical of efforts to increase public technical knowledge of the phenomenon, and instead urges efforts to increase public understanding of and therefore trust in the social process through which the facts are scientifically determined." + "In response, Mr. Schneider agrees that the conclusion that science is at least partially socially constructed, even if still news to some scientists, is clearly established." + "He bluntly states, however, that if scholars in the social studies of science are to be heard by more scientists, they will have to be careful to back up all social theoretical assertions with large numbers of broadly representative empirical examples." + " Mr. Schneider also questions Mr. Demerrit's claim that scientists are motivated by politics to conceive of climate warming as a global problem rather than one created primarily by rich nations: Most scientists are woefully unaware of the social context of the implications of their work and are too naive to be politically conspiratorial He says: What needs to be done is to go beyond platitudes about values embedded in science and to show explicitly, via many detailed and representative empirical examples, precisely how those social factors affected the outcome, and how it might have been otherwise if the process were differently constructed. The exchange is available online to subscribers of the journal at http://www.blackwellpublishers.co.uk/journals/anna"; final String[] pidSet = sample.toLowerCase(Locale.ENGLISH).replaceAll("[,.:\\[\\]']", "").split(" "); final GeometryFactory factory = new GeometryFactory(new PrecisionModel(PrecisionModel.FIXED)); @Before public void setup() throws SchemaException, CQLException, ParseException { schema = DataUtilities.createType("sp.geostuff", "geometry:Geometry:srid=4326,pid:String"); dataAdapter = new FeatureDataAdapter(schema); } final Random rnd = new Random(7733); private SimpleFeature create() { return create(pidSet[Math.abs(rnd.nextInt()) % pidSet.length]); } private SimpleFeature create(final String pid) { final List descriptors = schema.getAttributeDescriptors(); final Object[] defaults = new Object[descriptors.size()]; int p = 0; for (final AttributeDescriptor descriptor : descriptors) { defaults[p++] = descriptor.getDefaultValue(); } final SimpleFeature newFeature = SimpleFeatureBuilder.build(schema, defaults, UUID.randomUUID().toString()); newFeature.setAttribute("pid", pid); return newFeature; } @Test public void test() { final CountMinSketchStatistic stat = new CountMinSketchStatistic("", "pid"); final CountMinSketchValue statValue = stat.createEmpty(); for (int i = 0; i < 10000; i++) { statValue.entryIngested(dataAdapter, create()); } statValue.entryIngested(dataAdapter, create("barney")); final CountMinSketchValue statValue2 = stat.createEmpty(); for (int i = 0; i < 10000; i++) { statValue2.entryIngested(dataAdapter, create()); } statValue2.entryIngested(dataAdapter, create("global")); statValue2.entryIngested(dataAdapter, create("fred")); assertTrue(statValue2.count("global") > 0); assertTrue(statValue2.count("fred") > 0); assertTrue(statValue.count("fred") == 0); assertTrue(statValue.count("barney") > 0); assertTrue(statValue2.count("barney") == 0); statValue.merge(statValue); assertTrue(statValue2.count("global") > 0); assertTrue(statValue2.count("fred") > 0); statValue2.fromBinary(statValue.toBinary()); assertTrue(statValue2.count("barney") > 0); assertEquals(statValue2.getValue().toString(), statValue.getValue().toString()); } } ================================================ FILE: extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/stats/FixedBinNumericHistogramStatisticTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.stats; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.text.ParseException; import java.util.Date; import java.util.List; import java.util.Random; import java.util.UUID; import org.geotools.data.DataUtilities; import org.geotools.feature.SchemaException; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.filter.text.cql2.CQLException; import org.junit.Before; import org.junit.Test; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.core.store.statistics.field.FixedBinNumericHistogramStatistic; import org.locationtech.geowave.core.store.statistics.field.FixedBinNumericHistogramStatistic.FixedBinNumericHistogramValue; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.PrecisionModel; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.AttributeDescriptor; public class FixedBinNumericHistogramStatisticTest { private SimpleFeatureType schema; FeatureDataAdapter dataAdapter; GeometryFactory factory = new GeometryFactory(new PrecisionModel(PrecisionModel.FIXED)); @Before public void setup() throws SchemaException, CQLException, ParseException { schema = DataUtilities.createType( "sp.geostuff", "geometry:Geometry:srid=4326,pop:java.lang.Double,when:Date,whennot:Date,somewhere:Polygon,pid:String"); dataAdapter = new FeatureDataAdapter(schema); } private SimpleFeature create(final Double val) { final List descriptors = schema.getAttributeDescriptors(); final Object[] defaults = new Object[descriptors.size()]; int p = 0; for (final AttributeDescriptor descriptor : descriptors) { defaults[p++] = descriptor.getDefaultValue(); } final SimpleFeature newFeature = SimpleFeatureBuilder.build(schema, defaults, UUID.randomUUID().toString()); newFeature.setAttribute("pop", val); newFeature.setAttribute("pid", UUID.randomUUID().toString()); newFeature.setAttribute("when", new Date()); newFeature.setAttribute("whennot", new Date()); newFeature.setAttribute("geometry", factory.createPoint(new Coordinate(27.25, 41.25))); return newFeature; } @Test public void testPositive() { final FixedBinNumericHistogramStatistic stat = new FixedBinNumericHistogramStatistic("", "pop"); final FixedBinNumericHistogramValue statValue = stat.createEmpty(); final Random rand = new Random(7777); statValue.entryIngested(dataAdapter, create(100.0)); statValue.entryIngested(dataAdapter, create(101.0)); statValue.entryIngested(dataAdapter, create(2.0)); double next = 1; for (int i = 0; i < 10000; i++) { next = next + (Math.round(rand.nextDouble())); statValue.entryIngested(dataAdapter, create(next)); } final FixedBinNumericHistogramValue statValue2 = stat.createEmpty(); next += 1000; final double skewvalue = next + (1000 * rand.nextDouble()); final SimpleFeature skewedFeature = create(skewvalue); for (int i = 0; i < 10000; i++) { statValue2.entryIngested(dataAdapter, skewedFeature); } next += 1000; double max = 0; for (long i = 0; i < 10000; i++) { final double val = next + (1000 * rand.nextDouble()); statValue2.entryIngested(dataAdapter, create(val)); max = Math.max(val, max); } final byte[] b = statValue2.toBinary(); statValue2.fromBinary(b); assertEquals(1.0, statValue2.cdf(max + 1), 0.00001); statValue.merge(statValue2); assertEquals(1.0, statValue.cdf(max + 1), 0.00001); assertEquals(.33, statValue.cdf(skewvalue - 1000), 0.01); assertEquals(30003, sum(statValue.count(10))); final double r = statValue.percentPopulationOverRange(skewvalue - 1000, skewvalue + 1000); assertTrue((r > 0.45) && (r < 0.55)); } @Test public void testRapidIncreaseInRange() { final FixedBinNumericHistogramStatistic stat = new FixedBinNumericHistogramStatistic("", "pop"); final FixedBinNumericHistogramValue statValue = stat.createEmpty(); final Random rand = new Random(7777); double next = 1; for (int i = 0; i < 10000; i++) { next = next + (rand.nextDouble() * 100.0); statValue.entryIngested(dataAdapter, create(next)); } FixedBinNumericHistogramValue statValue2 = stat.createEmpty(); next = 4839434.547854578; for (long i = 0; i < 10000; i++) { final double val = next + (1000.0 * rand.nextDouble()); statValue2.entryIngested(dataAdapter, create(val)); } byte[] b = statValue2.toBinary(); statValue2.fromBinary(b); b = statValue.toBinary(); statValue.fromBinary(b); statValue.merge(statValue2); statValue2 = stat.createEmpty(); for (int i = 0; i < 40000; i++) { next = (Math.round(rand.nextDouble())); statValue2.entryIngested(dataAdapter, create(next)); } final FixedBinNumericHistogramValue statValue3 = stat.createEmpty(); next = 54589058545734.049454545458; for (long i = 0; i < 10000; i++) { final double val = next + (rand.nextDouble()); statValue3.entryIngested(dataAdapter, create(val)); } b = statValue2.toBinary(); statValue2.fromBinary(b); b = statValue3.toBinary(); statValue3.fromBinary(b); statValue.merge(statValue3); statValue.merge(statValue2); b = statValue.toBinary(); statValue.fromBinary(b); } @Test public void testMix() { final FixedBinNumericHistogramStatistic stat = new FixedBinNumericHistogramStatistic("", "pop"); final FixedBinNumericHistogramValue statValue = stat.createEmpty(); final Random rand = new Random(7777); double min = 0; double max = 0; double next = 0; for (int i = 0; i < 10000; i++) { next = next + (100 * rand.nextDouble()); statValue.entryIngested(dataAdapter, create(next)); max = Math.max(next, max); } next = 0; for (int i = 0; i < 10000; i++) { next = next - (100 * rand.nextDouble()); statValue.entryIngested(dataAdapter, create(next)); min = Math.min(next, min); } assertEquals(0.0, statValue.cdf(min), 0.00001); assertEquals(1.0, statValue.cdf(max), 0.00001); assertEquals(0.5, statValue.cdf(0), 0.05); assertEquals(20000, sum(statValue.count(10))); final double r = statValue.percentPopulationOverRange(min / 2, max / 2); assertEquals(0.5, r, 0.05); } @Test public void testMix2() { final FixedBinNumericHistogramStatistic stat = new FixedBinNumericHistogramStatistic("", "pop"); final FixedBinNumericHistogramValue statValue = stat.createEmpty(); final Random rand = new Random(7777); final double min = 0; double max = 0; double next = 0; for (int i = 0; i < 100000; i++) { next = 1000 * rand.nextGaussian(); statValue.entryIngested(dataAdapter, create(next)); max = Math.max(next, max); } assertEquals(1.0, statValue.cdf(max), 0.00001); assertEquals(0.5, statValue.cdf(0), 0.05); assertEquals(100000, sum(statValue.count(10))); final double r = statValue.percentPopulationOverRange(min / 2, max / 2); assertEquals(0.5, r, 0.05); System.out.println(stat.toString()); } private long sum(final long[] list) { long result = 0; for (final long v : list) { result += v; } return result; } } ================================================ FILE: extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/stats/HyperLogLogStaticticsTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.stats; import static org.junit.Assert.assertTrue; import java.text.ParseException; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Random; import java.util.Set; import java.util.UUID; import org.geotools.data.DataUtilities; import org.geotools.feature.SchemaException; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.filter.text.cql2.CQLException; import org.junit.Before; import org.junit.Test; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.core.store.statistics.field.HyperLogLogStatistic; import org.locationtech.geowave.core.store.statistics.field.HyperLogLogStatistic.HyperLogLogPlusValue; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.PrecisionModel; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.AttributeDescriptor; public class HyperLogLogStaticticsTest { private SimpleFeatureType schema; FeatureDataAdapter dataAdapter; private final String sample1 = "The construction of global warming [Source: CHE] Climate warming, whatever one concludes about its effect on the earth, is insufficiently understood as a concept that has been constructed by scientists, politicians and others, argues David Demerrit, a lecturer in geography at King's College London, in an exchange with Stephen H. Schneider, a professor of biological sciences at Stanford University. Many observers consider the phenomenon's construction -- as a global-scale environmental problem caused by the universal physical properties of greenhouse gases -- to be reductionist, Mr. Demerrit writes. Yet this reductionist formulation serves a variety of political purposes, including obscuring the role of rich nations in producing the vast majority of the greenhouse gases." + "Mr. Demerrit says his objective is to unmask the ways that scientific judgments " + "have both reinforced and been reinforced by certain political considerations about managing" + "global warming. Scientific uncertainty, he suggests, is emphasized in a way that reinforces dependence on experts. He is skeptical of efforts to increase public technical knowledge of the phenomenon, and instead urges efforts to increase public understanding of and therefore trust in the social process through which the facts are scientifically determined." + "In response, Mr. Schneider agrees that the conclusion that science is at least partially socially constructed, even if still news to some scientists, is clearly established." + "He bluntly states, however, that if scholars in the social studies of science are to be heard by more scientists, they will have to be careful to back up all social theoretical assertions with large numbers of broadly representative empirical examples." + " Mr. Schneider also questions Mr. Demerrit's claim that scientists are motivated by politics to conceive of climate warming as a global problem rather than one created primarily by rich nations: Most scientists are woefully unaware of the social context of the implications of their work and are too naive to be politically conspiratorial He says: What needs to be done is to go beyond platitudes about values embedded in science and to show explicitly, via many detailed and representative empirical examples, precisely how those social factors affected the outcome, and how it might have been otherwise if the process were differently constructed. The exchange is available online to subscribers of the journal at http://www.blackwellpublishers.co.uk/journals/anna"; private final String sample2 = "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum"; final String[] pidSetOne = sample1.toLowerCase(Locale.ENGLISH).replaceAll("[,.:\\[\\]']", "").split(" "); final String[] pidSetTwo = sample2.toLowerCase(Locale.ENGLISH).replaceAll("[,.:\\[\\]']", "").split(" "); final GeometryFactory factory = new GeometryFactory(new PrecisionModel(PrecisionModel.FIXED)); @Before public void setup() throws SchemaException, CQLException, ParseException { schema = DataUtilities.createType("sp.geostuff", "geometry:Geometry:srid=4326,pid:String"); dataAdapter = new FeatureDataAdapter(schema); } final Random rnd = new Random(7733); private SimpleFeature create(final String[] pidSet, final Set set) { return create(pidSet[Math.abs(rnd.nextInt()) % pidSet.length], set); } private SimpleFeature create(final String pid, final Set set) { final List descriptors = schema.getAttributeDescriptors(); final Object[] defaults = new Object[descriptors.size()]; int p = 0; for (final AttributeDescriptor descriptor : descriptors) { defaults[p++] = descriptor.getDefaultValue(); } final SimpleFeature newFeature = SimpleFeatureBuilder.build(schema, defaults, UUID.randomUUID().toString()); newFeature.setAttribute("pid", pid); set.add(pid); return newFeature; } @Test public void test() { final Set firstSet = new HashSet<>(); final Set secondSet = new HashSet<>(); final HyperLogLogStatistic stat = new HyperLogLogStatistic("", "pid", 16); final HyperLogLogPlusValue statValue = stat.createEmpty(); for (int i = 0; i < 10000; i++) { statValue.entryIngested(dataAdapter, create(pidSetOne, firstSet)); } final HyperLogLogPlusValue statValue2 = stat.createEmpty(); for (int i = 0; i < 10000; i++) { statValue2.entryIngested(dataAdapter, create(pidSetTwo, secondSet)); } assertTrue(Math.abs(firstSet.size() - statValue.cardinality()) < 10); assertTrue(Math.abs(secondSet.size() - statValue2.cardinality()) < 10); secondSet.addAll(firstSet); statValue.merge(statValue2); assertTrue(Math.abs(secondSet.size() - statValue.cardinality()) < 10); statValue2.fromBinary(statValue.toBinary()); assertTrue(Math.abs(secondSet.size() - statValue2.cardinality()) < 10); System.out.println(statValue2.toString()); } } ================================================ FILE: extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/stats/NumericHistogramStatisticsTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.stats; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.text.ParseException; import java.util.Date; import java.util.List; import java.util.Random; import java.util.UUID; import org.apache.commons.math.util.MathUtils; import org.geotools.data.DataUtilities; import org.geotools.feature.SchemaException; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.filter.text.cql2.CQLException; import org.junit.Before; import org.junit.Test; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.core.store.statistics.field.NumericHistogramStatistic; import org.locationtech.geowave.core.store.statistics.field.NumericHistogramStatistic.NumericHistogramValue; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.PrecisionModel; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.AttributeDescriptor; public class NumericHistogramStatisticsTest { private SimpleFeatureType schema; FeatureDataAdapter dataAdapter; GeometryFactory factory = new GeometryFactory(new PrecisionModel(PrecisionModel.FIXED)); @Before public void setup() throws SchemaException, CQLException, ParseException { schema = DataUtilities.createType( "sp.geostuff", "geometry:Geometry:srid=4326,pop:java.lang.Long,when:Date,whennot:Date,somewhere:Polygon,pid:String"); dataAdapter = new FeatureDataAdapter(schema); } private SimpleFeature create(final Double val) { final List descriptors = schema.getAttributeDescriptors(); final Object[] defaults = new Object[descriptors.size()]; int p = 0; for (final AttributeDescriptor descriptor : descriptors) { defaults[p++] = descriptor.getDefaultValue(); } final SimpleFeature newFeature = SimpleFeatureBuilder.build(schema, defaults, UUID.randomUUID().toString()); newFeature.setAttribute("pop", val); newFeature.setAttribute("pid", UUID.randomUUID().toString()); newFeature.setAttribute("when", new Date()); newFeature.setAttribute("whennot", new Date()); newFeature.setAttribute("geometry", factory.createPoint(new Coordinate(27.25, 41.25))); return newFeature; } @Test public void testPositive() { final NumericHistogramStatistic stat = new NumericHistogramStatistic("", "pop"); final NumericHistogramValue statValue = stat.createEmpty(); final Random rand = new Random(7777); statValue.entryIngested(dataAdapter, create(100.0)); statValue.entryIngested(dataAdapter, create(101.0)); statValue.entryIngested(dataAdapter, create(2.0)); double next = 1; for (int i = 0; i < 10000; i++) { next = next + (Math.round(rand.nextDouble())); statValue.entryIngested(dataAdapter, create(next)); } final NumericHistogramValue statValue2 = stat.createEmpty(); final double start2 = next; double max = 0; for (long i = 0; i < 10000; i++) { final double val = next + (1000 * rand.nextDouble()); statValue2.entryIngested(dataAdapter, create(val)); max = Math.max(val, max); } final double skewvalue = next + (1000 * rand.nextDouble()); final SimpleFeature skewedFeature = create(skewvalue); for (int i = 0; i < 10000; i++) { statValue2.entryIngested(dataAdapter, skewedFeature); // skewedFeature.setAttribute("pop", Long.valueOf(next + (long) // (1000 * rand.nextDouble()))); } final byte[] b = statValue2.toBinary(); statValue2.fromBinary(b); assertEquals(1.0, statValue2.cdf(max + 1), 0.00001); statValue.merge(statValue2); assertEquals(1.0, statValue.cdf(max + 1), 0.00001); assertEquals(0.33, statValue.cdf(start2), 0.01); assertEquals(30003, sum(statValue.count(10))); final double r = statValue.percentPopulationOverRange(skewvalue - 1, skewvalue + 1); assertTrue((r > 0.3) && (r < 0.35)); } @Test public void testRapidIncreaseInRange() { final NumericHistogramStatistic stat = new NumericHistogramStatistic("", "pop"); final NumericHistogramValue statValue = stat.createEmpty(); final Random rand = new Random(7777); double next = 1; for (int i = 0; i < 100; i++) { next = next + (rand.nextDouble() * 100.0); statValue.entryIngested(dataAdapter, create(next)); } for (long i = 0; i < 100; i++) { final NumericHistogramValue statValue2 = stat.createEmpty(); for (int j = 0; j < 100; j++) { statValue2.entryIngested( dataAdapter, create(4839000434.547854578 * rand.nextDouble() * rand.nextGaussian())); } byte[] b = statValue2.toBinary(); statValue2.fromBinary(b); b = statValue.toBinary(); statValue.fromBinary(b); statValue.merge(statValue2); } } @Test public void testNegative() { final NumericHistogramStatistic stat = new NumericHistogramStatistic("", "pop"); final NumericHistogramValue statValue = stat.createEmpty(); final Random rand = new Random(7777); statValue.entryIngested(dataAdapter, create(-100.0)); statValue.entryIngested(dataAdapter, create(-101.0)); statValue.entryIngested(dataAdapter, create(-2.0)); double next = -1; for (int i = 0; i < 10000; i++) { next = next - (Math.round(rand.nextDouble())); statValue.entryIngested(dataAdapter, create(next)); } final NumericHistogramValue statValue2 = stat.createEmpty(); final double start2 = next; double min = 0; for (long i = 0; i < 10000; i++) { final double val = next - (long) (1000 * rand.nextDouble()); statValue2.entryIngested(dataAdapter, create(val)); min = Math.min(val, min); } final double skewvalue = next - (1000 * rand.nextDouble()); final SimpleFeature skewedFeature = create(skewvalue); for (int i = 0; i < 10000; i++) { statValue2.entryIngested(dataAdapter, skewedFeature); } assertEquals(1.0, statValue2.cdf(0), 0.00001); final byte[] b = statValue2.toBinary(); statValue2.fromBinary(b); assertEquals(0.0, statValue2.cdf(min), 0.00001); statValue.merge(statValue2); assertEquals(1.0, statValue.cdf(0), 0.00001); assertEquals(0.66, statValue.cdf(start2), 0.01); assertEquals(30003, sum(statValue.count(10))); final double r = statValue.percentPopulationOverRange(skewvalue - 1, skewvalue + 1); assertTrue((r > 0.3) && (r < 0.35)); } @Test public void testMix() { final NumericHistogramStatistic stat = new NumericHistogramStatistic("", "pop"); final NumericHistogramValue statValue = stat.createEmpty(); final Random rand = new Random(7777); double min = 0; double max = 0; double next = 0; for (int i = 1; i < 300; i++) { final NumericHistogramValue statValue2 = stat.createEmpty(); final double m = 10000.0 * Math.pow(10.0, ((i / 100) + 1)); if (i == 50) { next = 0.0; } else if (i == 100) { next = Double.NaN; } else if (i == 150) { next = Double.MAX_VALUE; } else if (i == 200) { next = Integer.MAX_VALUE; } else if (i == 225) { next = Integer.MIN_VALUE; } else { next = (m * rand.nextDouble() * MathUtils.sign(rand.nextGaussian())); } statValue2.entryIngested(dataAdapter, create(next)); if (!Double.isNaN(next)) { max = Math.max(next, max); min = Math.min(next, min); stat.fromBinary(stat.toBinary()); statValue2.fromBinary(statValue2.toBinary()); statValue.merge(statValue2); } } assertEquals(0.5, statValue.cdf(0), 0.1); assertEquals(0.0, statValue.cdf(min), 0.00001); assertEquals(1.0, statValue.cdf(max), 0.00001); assertEquals(298, sum(statValue.count(10))); } private long sum(final long[] list) { long result = 0; for (final long v : list) { result += v; } return result; } } ================================================ FILE: extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/util/FeatureDataUtilsTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.util; import static org.junit.Assert.assertEquals; import org.geotools.feature.SchemaException; import org.junit.Test; import org.locationtech.geowave.core.index.StringUtils; import org.opengis.feature.simple.SimpleFeatureType; public class FeatureDataUtilsTest { @Test public void testWithSRID() throws SchemaException { final SimpleFeatureType type = FeatureDataUtils.decodeType( "http://somens.org", "type1", "geometry:Geometry:srid=4326,pop:java.lang.Long,when:Date,whennot:Date,pid:String", "east"); assertEquals("type1", type.getName().getLocalPart()); } /** * This test only works in some versions. So, comment out for now. * *

public void testWithSRIDAndMisMatch() throws SchemaException { SimpleFeatureType type = * FeatureDataUtils.decodeType("http://somens.org", "type1", * "geometry:Geometry:srid=4326,pop:java.lang.Long,when:Date,whennot:Date,pid:String" , "north"); * assertEquals("type1",type.getName().getLocalPart()); assertEquals * ("NORTH",type.getCoordinateReferenceSystem().getCoordinateSystem * ().getAxis(0).getDirection().name()); } */ @Test public void testWithoutSRID() throws SchemaException { final SimpleFeatureType type = FeatureDataUtils.decodeType( "http://somens.org", "type1", "geometry:Geometry,pop:java.lang.Long,when:Date,whennot:Date,pid:String", StringUtils.stringFromBinary(new byte[0])); assertEquals("type1", type.getName().getLocalPart()); } } ================================================ FILE: extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/util/QueryIndexHelperTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.util; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.text.ParseException; import java.util.Date; import java.util.List; import java.util.UUID; import org.geotools.data.DataUtilities; import org.geotools.feature.SchemaException; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.referencing.CRS; import org.junit.Before; import org.junit.Test; import org.locationtech.geowave.adapter.vector.FeatureDataAdapter; import org.locationtech.geowave.adapter.vector.plugin.transaction.StatisticsCache; import org.locationtech.geowave.core.geotime.index.SpatialDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialOptions; import org.locationtech.geowave.core.geotime.index.SpatialTemporalDimensionalityTypeProvider; import org.locationtech.geowave.core.geotime.index.SpatialTemporalOptions; import org.locationtech.geowave.core.geotime.store.query.TemporalConstraints; import org.locationtech.geowave.core.geotime.store.query.TemporalConstraintsSet; import org.locationtech.geowave.core.geotime.store.query.TemporalRange; import org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic; import org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxStatistic.BoundingBoxValue; import org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic; import org.locationtech.geowave.core.geotime.store.statistics.TimeRangeStatistic.TimeRangeValue; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.geotime.util.TimeDescriptors; import org.locationtech.geowave.core.geotime.util.TimeDescriptors.TimeDescriptorConfiguration; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.api.StatisticValue; import org.locationtech.geowave.core.store.query.constraints.BasicQueryByClass; import org.locationtech.geowave.core.store.query.constraints.Constraints; import org.locationtech.geowave.core.store.statistics.StatisticId; import org.locationtech.geowave.core.store.statistics.StatisticType; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.Point; import org.locationtech.jts.geom.PrecisionModel; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.AttributeDescriptor; import org.opengis.referencing.FactoryException; import org.opengis.referencing.operation.MathTransform; import com.google.common.primitives.Bytes; public class QueryIndexHelperTest { private static final Index SPATIAL_INDEX = SpatialDimensionalityTypeProvider.createIndexFromOptions(new SpatialOptions()); private static final Index SPATIAL_TEMPORAL_INDEX = SpatialTemporalDimensionalityTypeProvider.createIndexFromOptions( new SpatialTemporalOptions()); final ByteArray dataAdapterId = new ByteArray("123"); SimpleFeatureType rangeType; SimpleFeatureType singleType; SimpleFeatureType geoType; SimpleFeatureType geoMercType; final TimeDescriptors geoTimeDescriptors = new TimeDescriptors(); final TimeDescriptors rangeTimeDescriptors = new TimeDescriptors(); final TimeDescriptors singleTimeDescriptors = new TimeDescriptors(); final GeometryFactory factory = new GeometryFactory(new PrecisionModel(PrecisionModel.FIXED)); Date startTime, endTime; Object[] singleDefaults, rangeDefaults, geoDefaults; MathTransform transform; @Before public void setup() throws SchemaException, ParseException, FactoryException { startTime = DateUtilities.parseISO("2005-05-15T20:32:56Z"); endTime = DateUtilities.parseISO("2005-05-20T20:32:56Z"); geoType = DataUtilities.createType( "geostuff", "geometry:Geometry:srid=4326,pop:java.lang.Long,pid:String"); geoMercType = DataUtilities.createType( "geostuff", "geometry:Geometry:srid=3785,pop:java.lang.Long,pid:String"); rangeType = DataUtilities.createType( "geostuff", "geometry:Geometry:srid=4326,start:Date,end:Date,pop:java.lang.Long,pid:String"); singleType = DataUtilities.createType( "geostuff", "geometry:Geometry:srid=4326,when:Date,pop:java.lang.Long,pid:String"); transform = CRS.findMathTransform( geoMercType.getCoordinateReferenceSystem(), geoType.getCoordinateReferenceSystem(), true); final TimeDescriptorConfiguration rangeConfig = new TimeDescriptorConfiguration(); rangeConfig.configureFromType(rangeType); rangeTimeDescriptors.update(rangeType, rangeConfig); final TimeDescriptorConfiguration singleTimeConfig = new TimeDescriptorConfiguration(); singleTimeConfig.configureFromType(singleType); singleTimeDescriptors.update(singleType, singleTimeConfig); List descriptors = rangeType.getAttributeDescriptors(); rangeDefaults = new Object[descriptors.size()]; int p = 0; for (final AttributeDescriptor descriptor : descriptors) { rangeDefaults[p++] = descriptor.getDefaultValue(); } descriptors = singleType.getAttributeDescriptors(); singleDefaults = new Object[descriptors.size()]; p = 0; for (final AttributeDescriptor descriptor : descriptors) { singleDefaults[p++] = descriptor.getDefaultValue(); } descriptors = geoType.getAttributeDescriptors(); geoDefaults = new Object[descriptors.size()]; p = 0; for (final AttributeDescriptor descriptor : descriptors) { geoDefaults[p++] = descriptor.getDefaultValue(); } } @Test public void testGetTemporalConstraintsForSingleClippedRange() throws ParseException { final Date stime = DateUtilities.parseISO("2005-05-14T20:32:56Z"); final Date etime = DateUtilities.parseISO("2005-05-18T20:32:56Z"); final Date stime1 = DateUtilities.parseISO("2005-05-18T20:32:56Z"); final Date etime1 = DateUtilities.parseISO("2005-05-19T20:32:56Z"); final TestStatisticsCache statsCache = new TestStatisticsCache(); final TimeRangeStatistic whenStats = new TimeRangeStatistic(singleType.getTypeName(), "when"); final TimeRangeValue whenValue = whenStats.createEmpty(); statsCache.putFieldStatistic(TimeRangeStatistic.STATS_TYPE, "when", whenValue); final TemporalConstraintsSet constraintsSet = new TemporalConstraintsSet(); constraintsSet.getConstraintsFor("when").add(new TemporalRange(stime, etime)); final FeatureDataAdapter singleDataAdapter = new FeatureDataAdapter(singleType); final SimpleFeature notIntersectSingle1 = createSingleTimeFeature(startTime); whenValue.entryIngested(singleDataAdapter, notIntersectSingle1); final SimpleFeature notIntersectSingle = createSingleTimeFeature(endTime); whenValue.entryIngested(singleDataAdapter, notIntersectSingle); final TemporalConstraintsSet resultConstraintsSet = QueryIndexHelper.clipIndexedTemporalConstraints( statsCache, singleTimeDescriptors, constraintsSet); final TemporalConstraints constraints = resultConstraintsSet.getConstraintsFor("when"); assertEquals(1, constraints.getRanges().size()); assertEquals(startTime, constraints.getStartRange().getStartTime()); assertEquals(etime, constraints.getStartRange().getEndTime()); final TemporalConstraintsSet constraintsSet1 = new TemporalConstraintsSet(); constraintsSet1.getConstraintsFor("when").add(new TemporalRange(stime1, etime1)); final TemporalConstraintsSet resultConstraintsSet1 = QueryIndexHelper.clipIndexedTemporalConstraints( statsCache, singleTimeDescriptors, constraintsSet1); final TemporalConstraints constraints1 = resultConstraintsSet1.getConstraintsFor("when"); assertEquals(1, constraints1.getRanges().size()); assertEquals(stime1, constraints1.getStartRange().getStartTime()); assertEquals(etime1, constraints1.getStartRange().getEndTime()); } @Test public void testGetTemporalConstraintsForRangeClippedFullRange() throws ParseException { final TestStatisticsCache statsCache = new TestStatisticsCache(); final TimeRangeStatistic startStats = new TimeRangeStatistic("type", "start"); final TimeRangeValue startValue = startStats.createEmpty(); statsCache.putFieldStatistic(TimeRangeStatistic.STATS_TYPE, "start", startValue); final TimeRangeStatistic endStats = new TimeRangeStatistic("type", "end"); final TimeRangeValue endValue = endStats.createEmpty(); statsCache.putFieldStatistic(TimeRangeStatistic.STATS_TYPE, "end", endValue); final Date statsStart1 = DateUtilities.parseISO("2005-05-18T20:32:56Z"); final Date statsStart2 = DateUtilities.parseISO("2005-05-20T20:32:56Z"); final Date statsEnd1 = DateUtilities.parseISO("2005-05-21T20:32:56Z"); final Date statsEnd2 = DateUtilities.parseISO("2005-05-24T20:32:56Z"); final SimpleFeature firstRangFeature = createFeature(statsStart1, statsEnd1); FeatureDataAdapter adapter = new FeatureDataAdapter(firstRangFeature.getFeatureType()); startValue.entryIngested(adapter, firstRangFeature); endValue.entryIngested(adapter, firstRangFeature); final SimpleFeature secondRangFeature = createFeature(statsStart2, statsEnd2); startValue.entryIngested(adapter, secondRangFeature); endValue.entryIngested(adapter, secondRangFeature); final Date stime = DateUtilities.parseISO("2005-05-18T20:32:56Z"); final Date etime = DateUtilities.parseISO("2005-05-19T20:32:56Z"); final TemporalConstraintsSet constraintsSet = new TemporalConstraintsSet(); constraintsSet.getConstraintsForRange("start", "end").add( new TemporalRange(new Date(0), etime)); final TemporalConstraintsSet resultConstraintsSet = QueryIndexHelper.clipIndexedTemporalConstraints( statsCache, rangeTimeDescriptors, constraintsSet); final TemporalConstraints constraints = resultConstraintsSet.getConstraintsForRange("start", "end"); assertEquals(1, constraints.getRanges().size()); assertEquals(stime, constraints.getStartRange().getStartTime()); assertEquals(etime, constraints.getStartRange().getEndTime()); } @Test public void testComposeQueryWithTimeRange() throws ParseException { final TestStatisticsCache statsCache = new TestStatisticsCache(); final TimeRangeStatistic startStats = new TimeRangeStatistic("type", "start"); final TimeRangeValue startValue = startStats.createEmpty(); statsCache.putFieldStatistic(TimeRangeStatistic.STATS_TYPE, "start", startValue); final TimeRangeStatistic endStats = new TimeRangeStatistic("type", "end"); final TimeRangeValue endValue = endStats.createEmpty(); statsCache.putFieldStatistic(TimeRangeStatistic.STATS_TYPE, "end", endValue); final Date statsStart1 = DateUtilities.parseISO("2005-05-18T20:32:56Z"); final Date statsStart2 = DateUtilities.parseISO("2005-05-20T20:32:56Z"); final Date statsEnd1 = DateUtilities.parseISO("2005-05-21T20:32:56Z"); final Date statsEnd2 = DateUtilities.parseISO("2005-05-24T20:32:56Z"); final SimpleFeature firstRangFeature = createFeature(statsStart1, statsEnd1); FeatureDataAdapter adapter = new FeatureDataAdapter(firstRangFeature.getFeatureType()); startValue.entryIngested(adapter, firstRangFeature); endValue.entryIngested(adapter, firstRangFeature); final SimpleFeature secondRangFeature = createFeature(statsStart2, statsEnd2); startValue.entryIngested(adapter, secondRangFeature); endValue.entryIngested(adapter, secondRangFeature); final Date stime = DateUtilities.parseISO("2005-05-18T20:32:56Z"); final Date etime = DateUtilities.parseISO("2005-05-19T20:32:56Z"); final TemporalConstraintsSet constraintsSet = new TemporalConstraintsSet(); constraintsSet.getConstraintsForRange("start", "end").add(new TemporalRange(stime, etime)); final BasicQueryByClass query = new BasicQueryByClass( QueryIndexHelper.composeConstraints( statsCache, rangeType, rangeTimeDescriptors, factory.toGeometry( factory.createPoint(new Coordinate(27.25, 41.25)).getEnvelopeInternal()), constraintsSet)); final List nd = query.getIndexConstraints(SPATIAL_TEMPORAL_INDEX); assertEquals(stime.getTime(), nd.get(0).getDataPerDimension()[2].getMin().longValue()); assertEquals(etime.getTime(), nd.get(0).getDataPerDimension()[2].getMax().longValue()); final BasicQueryByClass query1 = new BasicQueryByClass( QueryIndexHelper.composeConstraints( statsCache, rangeType, rangeTimeDescriptors, factory.toGeometry( factory.createPoint(new Coordinate(27.25, 41.25)).getEnvelopeInternal()), null)); final List nd1 = query1.getIndexConstraints(SPATIAL_TEMPORAL_INDEX); assertEquals(statsStart1.getTime(), nd1.get(0).getDataPerDimension()[2].getMin().longValue()); assertEquals(statsEnd2.getTime(), nd1.get(0).getDataPerDimension()[2].getMax().longValue()); } @Test public void testComposeQueryWithOutTimeRange() { final TestStatisticsCache statsCache = new TestStatisticsCache(); final BoundingBoxStatistic geoStats = new BoundingBoxStatistic("type", "geometry"); final BoundingBoxValue value = geoStats.createEmpty(); statsCache.putFieldStatistic(BoundingBoxStatistic.STATS_TYPE, "geometry", value); final SimpleFeature firstFeature = createGeoFeature(factory.createPoint(new Coordinate(22.25, 42.25))); FeatureDataAdapter adapter = new FeatureDataAdapter(firstFeature.getFeatureType()); value.entryIngested(adapter, firstFeature); final SimpleFeature secondFeature = createGeoFeature(factory.createPoint(new Coordinate(27.25, 41.25))); value.entryIngested(adapter, secondFeature); final Envelope bounds = new Envelope(21.23, 26.23, 41.75, 43.1); final BasicQueryByClass query = new BasicQueryByClass( QueryIndexHelper.composeConstraints( statsCache, geoType, geoTimeDescriptors, new GeometryFactory().toGeometry(bounds), null)); final List nd = query.getIndexConstraints(SPATIAL_INDEX); assertEquals(21.23, nd.get(0).getDataPerDimension()[0].getMin(), 0.0001); assertEquals(26.23, nd.get(0).getDataPerDimension()[0].getMax(), 0.0001); assertEquals(41.75, nd.get(0).getDataPerDimension()[1].getMin(), 0.0001); assertEquals(43.1, nd.get(0).getDataPerDimension()[1].getMax(), 0.0001); } @Test public void testGetBBOX() { final TestStatisticsCache statsCache = new TestStatisticsCache(); final BoundingBoxStatistic geoStats = new BoundingBoxStatistic("type", "geometry"); final BoundingBoxValue value = geoStats.createEmpty(); statsCache.putFieldStatistic(BoundingBoxStatistic.STATS_TYPE, "geometry", value); final SimpleFeature firstFeature = createGeoFeature(factory.createPoint(new Coordinate(22.25, 42.25))); FeatureDataAdapter adapter = new FeatureDataAdapter(firstFeature.getFeatureType()); value.entryIngested(adapter, firstFeature); final SimpleFeature secondFeature = createGeoFeature(factory.createPoint(new Coordinate(27.25, 41.25))); value.entryIngested(adapter, secondFeature); final Envelope bounds = new Envelope(21.23, 26.23, 41.75, 43.1); final Geometry bbox = QueryIndexHelper.clipIndexedBBOXConstraints( statsCache, geoType, geoType.getCoordinateReferenceSystem(), new GeometryFactory().toGeometry(bounds)); final Envelope env = bbox.getEnvelopeInternal(); assertEquals(22.25, env.getMinX(), 0.0001); assertEquals(26.23, env.getMaxX(), 0.0001); assertEquals(41.75, env.getMinY(), 0.0001); assertEquals(42.25, env.getMaxY(), 0.0001); } @Test public void testBBOXStatReprojection() { // create a EPSG:3785 feature (units in meters) final SimpleFeature mercFeat = createGeoMercFeature(factory.createPoint(new Coordinate(19971868.8804, 20037508.3428))); // convert from EPSG:3785 to EPSG:4326 (convert to degrees lon/lat) // approximately 180.0, 85.0 final SimpleFeature defaultCRSFeat = GeometryUtils.crsTransform(mercFeat, geoType, transform); final BoundingBoxStatistic bboxStat = new BoundingBoxStatistic( geoType.getTypeName(), geoType.getGeometryDescriptor().getLocalName(), geoMercType.getCoordinateReferenceSystem(), geoType.getCoordinateReferenceSystem()); final BoundingBoxValue bboxValue = bboxStat.createEmpty(); bboxValue.entryIngested(new FeatureDataAdapter(geoType), mercFeat); final Coordinate coord = ((Point) defaultCRSFeat.getDefaultGeometry()).getCoordinate(); // coordinate should match reprojected feature assertEquals(coord.x, bboxValue.getMinX(), 0.0001); assertEquals(coord.x, bboxValue.getMaxX(), 0.0001); assertEquals(coord.y, bboxValue.getMinY(), 0.0001); assertEquals(coord.y, bboxValue.getMaxY(), 0.0001); } private SimpleFeature createGeoFeature(final Geometry geo) { final SimpleFeature instance = SimpleFeatureBuilder.build(geoType, geoDefaults, UUID.randomUUID().toString()); instance.setAttribute("pop", Long.valueOf(100)); instance.setAttribute("pid", UUID.randomUUID().toString()); instance.setAttribute("geometry", geo); return instance; } private SimpleFeature createGeoMercFeature(final Geometry geo) { final SimpleFeature instance = SimpleFeatureBuilder.build(geoMercType, geoDefaults, UUID.randomUUID().toString()); instance.setAttribute("pop", Long.valueOf(100)); instance.setAttribute("pid", UUID.randomUUID().toString()); instance.setAttribute("geometry", geo); return instance; } private SimpleFeature createSingleTimeFeature(final Date time) { final SimpleFeature instance = SimpleFeatureBuilder.build(singleType, singleDefaults, UUID.randomUUID().toString()); instance.setAttribute("pop", Long.valueOf(100)); instance.setAttribute("pid", UUID.randomUUID().toString()); instance.setAttribute("when", time); instance.setAttribute("geometry", factory.createPoint(new Coordinate(27.25, 41.25))); return instance; } @Test public void testComposeSubsetConstraints() throws ParseException { final TestStatisticsCache statsCache = new TestStatisticsCache(); final TimeRangeStatistic startStats = new TimeRangeStatistic("type", "start"); final TimeRangeValue startValue = startStats.createEmpty(); statsCache.putFieldStatistic(TimeRangeStatistic.STATS_TYPE, "start", startValue); final TimeRangeStatistic endStats = new TimeRangeStatistic("type", "end"); final TimeRangeValue endValue = endStats.createEmpty(); statsCache.putFieldStatistic(TimeRangeStatistic.STATS_TYPE, "end", endValue); final Date statsStart1 = DateUtilities.parseISO("2005-05-18T20:32:56Z"); final Date statsStart2 = DateUtilities.parseISO("2005-05-20T20:32:56Z"); final Date statsEnd1 = DateUtilities.parseISO("2005-05-21T20:32:56Z"); final Date statsEnd2 = DateUtilities.parseISO("2005-05-24T20:32:56Z"); final SimpleFeature firstRangFeature = createFeature(statsStart1, statsEnd1); FeatureDataAdapter adapter = new FeatureDataAdapter(firstRangFeature.getFeatureType()); startValue.entryIngested(adapter, firstRangFeature); endValue.entryIngested(adapter, firstRangFeature); final SimpleFeature secondRangFeature = createFeature(statsStart2, statsEnd2); startValue.entryIngested(adapter, secondRangFeature); endValue.entryIngested(adapter, secondRangFeature); final Date stime = DateUtilities.parseISO("2005-05-18T20:32:56Z"); final Date etime = DateUtilities.parseISO("2005-05-19T20:32:56Z"); final TemporalConstraintsSet constraintsSet = new TemporalConstraintsSet(); constraintsSet.getConstraintsForRange("start", "end").add(new TemporalRange(stime, etime)); final Constraints constraints = QueryIndexHelper.composeTimeBoundedConstraints( rangeType, rangeTimeDescriptors, constraintsSet); final List nd = constraints.getIndexConstraints(SPATIAL_TEMPORAL_INDEX); assertTrue(nd.isEmpty()); final BoundingBoxStatistic geoStats = new BoundingBoxStatistic("type", "geometry"); final BoundingBoxValue geoValue = geoStats.createEmpty(); statsCache.putFieldStatistic(BoundingBoxStatistic.STATS_TYPE, "geometry", geoValue); final SimpleFeature firstFeature = createGeoFeature(factory.createPoint(new Coordinate(22.25, 42.25))); geoValue.entryIngested(adapter, firstFeature); final SimpleFeature secondFeature = createGeoFeature(factory.createPoint(new Coordinate(27.25, 41.25))); geoValue.entryIngested(adapter, secondFeature); final Constraints constraints1 = QueryIndexHelper.composeConstraints( statsCache, rangeType, rangeTimeDescriptors, null, constraintsSet); final List nd1 = constraints1.getIndexConstraints(SPATIAL_TEMPORAL_INDEX); assertTrue(nd1.isEmpty()); /* * assertEquals( stime.getTime(), (long) nd1.get( 0).getDataPerDimension()[2].getMin()); * assertEquals( etime.getTime(), (long) nd1.get( 0).getDataPerDimension()[2].getMax()); */ final TemporalConstraintsSet constraintsSet2 = new TemporalConstraintsSet(); constraintsSet2.getConstraintsForRange("start", "end").add( new TemporalRange(statsStart1, statsEnd2)); final Constraints constraints2 = QueryIndexHelper.composeTimeBoundedConstraints( rangeType, rangeTimeDescriptors, constraintsSet2); final List nd2 = constraints2.getIndexConstraints(SPATIAL_TEMPORAL_INDEX); assertTrue(nd2.isEmpty()); } private SimpleFeature createFeature(final Date sTime, final Date eTime) { final SimpleFeature instance = SimpleFeatureBuilder.build(rangeType, rangeDefaults, UUID.randomUUID().toString()); instance.setAttribute("pop", Long.valueOf(100)); instance.setAttribute("pid", UUID.randomUUID().toString()); instance.setAttribute("start", sTime); instance.setAttribute("end", eTime); instance.setAttribute("geometry", factory.createPoint(new Coordinate(27.25, 41.25))); return instance; } private static class TestStatisticsCache extends StatisticsCache { public TestStatisticsCache() { super(null, null); } @SuppressWarnings("unchecked") @Override public , R> V getFieldStatistic( final StatisticType statisticType, final String fieldName) { if (statisticType == null || fieldName == null) { return null; } ByteArray key = new ByteArray( Bytes.concat( statisticType.getBytes(), StatisticId.UNIQUE_ID_SEPARATOR, fieldName.getBytes())); if (cache.containsKey(key)) { return (V) cache.get(key); } cache.put(key, null); return null; } @SuppressWarnings("unchecked") @Override public , R> V getAdapterStatistic( final StatisticType statisticType) { ByteArray key = statisticType; if (cache.containsKey(key)) { return (V) cache.get(key); } cache.put(key, null); return null; } public void putFieldStatistic( final StatisticType statisticType, final String fieldName, final StatisticValue value) { ByteArray key = new ByteArray( Bytes.concat( statisticType.getBytes(), StatisticId.UNIQUE_ID_SEPARATOR, fieldName.getBytes())); cache.put(key, value); } public void putAdapterStatistic( final StatisticType statisticType, final StatisticValue value) { cache.put(statisticType, value); } } } ================================================ FILE: extensions/adapters/vector/src/test/java/org/locationtech/geowave/adapter/vector/util/TimeDescriptorsTest.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.adapter.vector.util; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import org.geotools.data.DataUtilities; import org.geotools.feature.SchemaException; import org.junit.Test; import org.locationtech.geowave.core.geotime.util.TimeDescriptors; import org.locationtech.geowave.core.geotime.util.TimeDescriptors.TimeDescriptorConfiguration; import org.opengis.feature.simple.SimpleFeatureType; public class TimeDescriptorsTest { @Test public void testOneTime() throws SchemaException { final SimpleFeatureType schema = DataUtilities.createType( "sp.geostuff", "geometry:Geometry:srid=4326,pop:java.lang.Long,when:Date,whennot:Date,pid:String"); final TimeDescriptorConfiguration timeConfig = new TimeDescriptorConfiguration(); timeConfig.configureFromType(schema); final TimeDescriptors td = new TimeDescriptors(schema, timeConfig); assertEquals("when", td.getTime().getLocalName()); assertNull(td.getStartRange()); assertNull(td.getEndRange()); assertTrue(td.hasTime()); } @Test public void testRangeTime() throws SchemaException { final SimpleFeatureType schema = DataUtilities.createType( "sp.geostuff", "geometry:Geometry:srid=4326,pop:java.lang.Long,start:Date,end:Date,pid:String"); final TimeDescriptorConfiguration timeConfig = new TimeDescriptorConfiguration(); timeConfig.configureFromType(schema); final TimeDescriptors td = new TimeDescriptors(schema, timeConfig); assertEquals("start", td.getStartRange().getLocalName()); assertEquals("end", td.getEndRange().getLocalName()); assertNull(td.getTime()); assertTrue(td.hasTime()); } @Test public void testMixedTime() throws SchemaException { final SimpleFeatureType schema = DataUtilities.createType( "sp.geostuff", "geometry:Geometry:srid=4326,pop:java.lang.Long,when:Date,start:Date,end:Date,pid:String"); final TimeDescriptorConfiguration timeConfig = new TimeDescriptorConfiguration(); timeConfig.configureFromType(schema); final TimeDescriptors td = new TimeDescriptors(schema, timeConfig); assertEquals("start", td.getStartRange().getLocalName()); assertEquals("end", td.getEndRange().getLocalName()); assertNull(td.getTime()); assertTrue(td.hasTime()); } @Test public void testJustStartTime() throws SchemaException { final SimpleFeatureType schema = DataUtilities.createType( "sp.geostuff", "geometry:Geometry:srid=4326,pop:java.lang.Long,start:Date,pid:String"); final TimeDescriptorConfiguration timeConfig = new TimeDescriptorConfiguration(); timeConfig.configureFromType(schema); final TimeDescriptors td = new TimeDescriptors(schema, timeConfig); assertEquals("start", td.getTime().getLocalName()); assertNull(td.getStartRange()); assertNull(td.getEndRange()); assertTrue(td.hasTime()); } @Test public void testJustEndTime() throws SchemaException { final SimpleFeatureType schema = DataUtilities.createType( "sp.geostuff", "geometry:Geometry:srid=4326,pop:java.lang.Long,end:Date,pid:String"); final TimeDescriptorConfiguration timeConfig = new TimeDescriptorConfiguration(); timeConfig.configureFromType(schema); final TimeDescriptors td = new TimeDescriptors(schema, timeConfig); assertEquals("end", td.getTime().getLocalName()); assertNull(td.getStartRange()); assertNull(td.getEndRange()); assertTrue(td.hasTime()); } @Test public void testWhenAndEndTime() throws SchemaException { final SimpleFeatureType schema = DataUtilities.createType( "sp.geostuff", "geometry:Geometry:srid=4326,pop:java.lang.Long,when:Date,end:Date,pid:String"); final TimeDescriptorConfiguration timeConfig = new TimeDescriptorConfiguration(); timeConfig.configureFromType(schema); final TimeDescriptors td = new TimeDescriptors(schema, timeConfig); assertEquals("when", td.getTime().getLocalName()); assertNull(td.getStartRange()); assertNull(td.getEndRange()); assertTrue(td.hasTime()); } @Test public void testWhenAndStartTime() throws SchemaException { final SimpleFeatureType schema = DataUtilities.createType( "sp.geostuff", "geometry:Geometry:srid=4326,pop:java.lang.Long,when:Date,start:Date,pid:String"); final TimeDescriptorConfiguration timeConfig = new TimeDescriptorConfiguration(); timeConfig.configureFromType(schema); final TimeDescriptors td = new TimeDescriptors(schema, timeConfig); assertEquals("when", td.getTime().getLocalName()); assertNull(td.getStartRange()); assertNull(td.getEndRange()); assertTrue(td.hasTime()); } } ================================================ FILE: extensions/adapters/vector/src/test/resources/statsFile.json ================================================ { "configurations": { "type1": [ { "@class":"org.locationtech.geowave.adapter.vector.stats.StatsConfigurationCollection$SimpleFeatureStatsConfigurationCollection", "attConfig" : { "pop" : { "configurationsForAttribute" : [ {"@class" : "org.locationtech.geowave.adapter.vector.stats.FeatureFixedBinNumericStatistics$FeatureFixedBinConfig","bins" : 24} ] } } }, { "@class": "org.locationtech.geowave.adapter.vector.index.NumericSecondaryIndexConfiguration", "attributes" : ["pop"] }, { "@class": "org.locationtech.geowave.adapter.vector.plugin.visibility.VisibilityConfiguration", "attributeName" : "vis" }, { "@class": "org.locationtech.geowave.adapter.vector.index.SimpleFeaturePrimaryIndexConfiguration", "indexNames": ["SPATIAL_IDX"] } ] } } ================================================ FILE: extensions/cli/accumulo-embed/pom.xml ================================================ 4.0.0 geowave-extension-parent org.locationtech.geowave ../../ 2.0.2-SNAPSHOT geowave-cli-accumulo-embed GeoWave Embedded Accumulo GeoWave Accumulo Commands for Running Embedded Accumulo Server org.locationtech.geowave geowave-datastore-accumulo provided org.apache.accumulo accumulo-core provided org.apache.thrift libthrift org.apache.htrace htrace-core org.apache.accumulo accumulo-start ${accumulo.version} provided org.apache.thrift libthrift provided org.apache.accumulo accumulo-monitor org.apache.accumulo accumulo-minicluster org.apache.accumulo accumulo-monitor org.apache.accumulo accumulo-core org.apache.accumulo accumulo-fate org.apache.accumulo accumulo-start org.slf4j * jersey-core com.sun.jersey org.apache.thrift libthrift org.apache.htrace htrace-core org.apache.accumulo accumulo-shell build-installer-plugin maven-assembly-plugin compatibility org.apache.maven.plugins maven-dependency-plugin 2.9 setup-accumulo copy package log4j log4j 1.2.17 ${project.build.directory}/accumulo/lib ================================================ FILE: extensions/cli/accumulo-embed/src/main/java/org/locationtech/geowave/datastore/accumulo/cli/AccumuloMiniCluster.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.datastore.accumulo.cli; import java.io.File; import java.net.MalformedURLException; import java.net.URL; import java.util.Arrays; import java.util.Objects; import java.util.concurrent.TimeUnit; import org.apache.accumulo.core.conf.Property; import org.apache.accumulo.minicluster.MiniAccumuloCluster; import org.apache.accumulo.minicluster.MiniAccumuloConfig; import org.apache.accumulo.monitor.Monitor; import org.apache.hadoop.util.VersionInfo; import org.apache.hadoop.util.VersionUtil; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.core.config.Configurator; import org.locationtech.geowave.core.store.util.DataStoreUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.io.Files; public class AccumuloMiniCluster { private static final Logger LOGGER = LoggerFactory.getLogger(AccumuloMiniCluster.class); private static final String DEFAULT_LIB_DIR = "lib/services/third-party/embedded-accumulo/lib"; protected static boolean isYarn() { return VersionUtil.compareVersions(VersionInfo.getVersion(), "2.2.0") >= 0; } public static void main(final String[] args) throws Exception { Configurator.setLevel(LogManager.getRootLogger().getName(), Level.WARN); final boolean interactive = (System.getProperty("interactive") != null) ? Boolean.parseBoolean(System.getProperty("interactive")) : true; final String password = System.getProperty("password", "secret"); final String user = System.getProperty("rootUser", "root"); final File tempDir = Files.createTempDir(); final String instanceName = System.getProperty("instanceName", "accumulo"); final MiniAccumuloConfig miniAccumuloConfig = new MiniAccumuloConfig(tempDir, password).setNumTservers(2).setInstanceName( instanceName).setZooKeeperPort(2181); MiniAccumuloUtils.setRootUserName(miniAccumuloConfig, user); MiniAccumuloUtils.setProperty(miniAccumuloConfig, Property.MONITOR_PORT, "9995"); final String geowaveHome = System.getProperty("geowave.home", DataStoreUtils.DEFAULT_GEOWAVE_DIRECTORY); final File libDir = new File(geowaveHome, DEFAULT_LIB_DIR); final URL[] extraLibraries; if (libDir.exists() && libDir.isDirectory()) { extraLibraries = Arrays.stream( libDir.listFiles( (f) -> f.isFile() && f.getName().toLowerCase().endsWith(".jar"))).map(f -> { try { return f.toURI().toURL(); } catch (final MalformedURLException e) { LOGGER.warn("Unable to add to accumulo classpath", e); } return null; }).filter(Objects::nonNull).toArray(URL[]::new); } else { extraLibraries = new URL[0]; } final MiniAccumuloCluster accumulo = MiniAccumuloClusterFactory.newAccumuloCluster( miniAccumuloConfig, AccumuloMiniCluster.class, extraLibraries); accumulo.start(); MiniAccumuloUtils.exec(accumulo, Monitor.class); System.out.println("starting up ..."); Thread.sleep(3000); System.out.println( "cluster running with root user " + user + ", password " + password + ", instance name " + accumulo.getInstanceName() + ", and zookeeper " + accumulo.getZooKeepers()); if (interactive) { System.out.println("Press Enter to shutdown.."); System.in.read(); System.out.println("Shutting down!"); accumulo.stop(); } else { Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { try { accumulo.stop(); } catch (final Exception e) { LOGGER.warn("Unable to shutdown Accumulo", e); System.out.println("Error shutting down Accumulo."); } System.out.println("Shutting down!"); } }); while (true) { Thread.sleep(TimeUnit.MILLISECONDS.convert(Long.MAX_VALUE, TimeUnit.DAYS)); } } } } ================================================ FILE: extensions/cli/accumulo-embed/src/main/java/org/locationtech/geowave/datastore/accumulo/cli/AccumuloMiniClusterShell.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.datastore.accumulo.cli; import org.apache.accumulo.shell.Shell; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.core.config.Configurator; public class AccumuloMiniClusterShell { public static void main(final String[] args) throws Exception { Configurator.setLevel(LogManager.getRootLogger().getName(), Level.WARN); final String instanceName = (System.getProperty("instanceName") != null) ? System.getProperty("instanceName") : "geowave"; final String password = (System.getProperty("password") != null) ? System.getProperty("password") : "password"; final String[] shellArgs = new String[] {"-u", "root", "-p", password, "-z", instanceName, "localhost:2181"}; Shell.main(shellArgs); } } ================================================ FILE: extensions/cli/accumulo-embed/src/main/java/org/locationtech/geowave/datastore/accumulo/cli/AccumuloRunServerCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.datastore.accumulo.cli; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.Command; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.datastore.accumulo.cli.AccumuloSection; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "run", parentOperation = AccumuloSection.class) @Parameters( commandDescription = "Runs a standalone mini Accumulo server for test and debug with GeoWave") public class AccumuloRunServerCommand extends DefaultOperation implements Command { private static final Logger LOGGER = LoggerFactory.getLogger(AccumuloRunServerCommand.class); /** Prep the driver & run the operation. */ @Override public void execute(final OperationParams params) { try { AccumuloMiniCluster.main(new String[] {}); } catch (final Exception e) { LOGGER.error("Unable to run Accumulo mini cluster", e); } } } ================================================ FILE: extensions/cli/accumulo-embed/src/main/java/org/locationtech/geowave/datastore/accumulo/cli/EmbeddedAccumuloOperationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.datastore.accumulo.cli; import org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi; public class EmbeddedAccumuloOperationProvider implements CLIOperationProviderSpi { private static final Class[] OPERATIONS = new Class[] {AccumuloRunServerCommand.class}; @Override public Class[] getOperations() { return OPERATIONS; } } ================================================ FILE: extensions/cli/accumulo-embed/src/main/java/org/locationtech/geowave/datastore/accumulo/cli/MiniAccumuloClusterFactory.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.datastore.accumulo.cli; import java.io.File; import java.io.IOException; import java.net.URL; import java.util.Map; import org.apache.accumulo.minicluster.MiniAccumuloCluster; import org.apache.accumulo.minicluster.MiniAccumuloConfig; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.SystemUtils; import org.apache.hadoop.util.VersionInfo; import org.apache.hadoop.util.VersionUtil; import org.locationtech.geowave.core.store.util.ClasspathUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class MiniAccumuloClusterFactory { private static final Logger LOGGER = LoggerFactory.getLogger(MiniAccumuloClusterFactory.class); protected static final String HADOOP_WINDOWS_UTIL = "winutils.exe"; protected static boolean isYarn() { return VersionUtil.compareVersions(VersionInfo.getVersion(), "2.2.0") >= 0; } public static MiniAccumuloCluster newAccumuloCluster( final MiniAccumuloConfig config, final Class context, final URL... additionalClasspathUrls) throws IOException { final String jarPath = ClasspathUtils.setupPathingJarClassPath(config.getDir(), context, additionalClasspathUrls); if (jarPath == null) { // Jar was not successfully created return null; } MiniAccumuloUtils.setClasspathItems(config, jarPath); final MiniAccumuloCluster retVal = new MiniAccumuloCluster(config); if (SystemUtils.IS_OS_WINDOWS) { if (directoryStartsWithT(config.getDir())) { System.out.println( "Accumulo directory paths on Windows cannot begin with 't'. Try placing the accumulo data directory near the root of the file system to fix this issue."); } if (isYarn()) { // this must happen after instantiating Mini // Accumulo Cluster because it ensures the accumulo // directory is empty or it will fail, but must // happen before the cluster is started because yarn // expects winutils.exe to exist within a bin // directory in the mini accumulo cluster directory // (mini accumulo cluster will always set this // directory as hadoop_home) LOGGER.info("Running YARN on windows requires a local installation of Hadoop"); LOGGER.info("'HADOOP_HOME' must be set and 'PATH' must contain %HADOOP_HOME%/bin"); final Map env = System.getenv(); // HP Fortify "Path Manipulation" false positive // What Fortify considers "user input" comes only // from users with OS-level access anyway String hadoopHome = System.getProperty("hadoop.home.dir"); if (hadoopHome == null) { hadoopHome = env.get("HADOOP_HOME"); } boolean success = false; if (hadoopHome != null) { // HP Fortify "Path Traversal" false positive // What Fortify considers "user input" comes only // from users with OS-level access anyway final File hadoopDir = new File(hadoopHome); if (hadoopDir.exists()) { final File binDir = new File(config.getDir(), "bin"); if (binDir.mkdir()) { FileUtils.copyFile( new File(hadoopDir + File.separator + "bin", HADOOP_WINDOWS_UTIL), new File(binDir, HADOOP_WINDOWS_UTIL)); success = true; } } } if (!success) { LOGGER.error( "'HADOOP_HOME' environment variable is not set or /bin/winutils.exe does not exist"); // return mini accumulo cluster anyways return retVal; } } } return retVal; } private static boolean directoryStartsWithT(File f) { String name = f.getName(); if (name != null && name.toLowerCase().startsWith("t")) { return true; } File parent = f.getParentFile(); if (parent != null && directoryStartsWithT(parent)) { return true; } return false; } } ================================================ FILE: extensions/cli/accumulo-embed/src/main/java/org/locationtech/geowave/datastore/accumulo/cli/MiniAccumuloUtils.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.datastore.accumulo.cli; import java.io.File; import java.io.IOException; import java.lang.reflect.Field; import java.util.List; import java.util.Map; import org.apache.accumulo.core.conf.Property; import org.apache.accumulo.minicluster.MiniAccumuloCluster; import org.apache.accumulo.minicluster.MiniAccumuloConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Because the impl package changed between Accumulo 1.x and 2.x we are using this to access methods * in impl without requiring the impl package name * */ public class MiniAccumuloUtils { private static final Logger LOGGER = LoggerFactory.getLogger(MiniAccumuloUtils.class); public static void setClasspathItems( final MiniAccumuloConfig config, final String... classpathItems) { try { final Field impl = MiniAccumuloConfig.class.getDeclaredField("impl"); impl.setAccessible(true); impl.getType().getMethod("setClasspathItems", String[].class).invoke( impl.get(config), new Object[] {classpathItems}); } catch (final Exception e) { LOGGER.warn("Unable to setClasspathItems", e); } } public static void setRootUserName(final MiniAccumuloConfig config, final String rootUserName) { try { final Field impl = MiniAccumuloConfig.class.getDeclaredField("impl"); impl.setAccessible(true); impl.getType().getMethod("setRootUserName", String.class).invoke( impl.get(config), rootUserName); } catch (final Exception e) { LOGGER.warn("Unable to setRootUserName", e); } } public static Object getClientProperty(final String name) { try { return MiniAccumuloUtils.class.getClassLoader().loadClass( "org.apache.accumulo.core.conf.ClientProperty").getDeclaredMethod( "valueOf", String.class).invoke(null, name); } catch (final Exception e) { LOGGER.warn("Unable to getClientProperty", e); } return null; } public static void setClientProperty( final MiniAccumuloConfig config, final Object property, final String value) { try { final Field impl = MiniAccumuloConfig.class.getDeclaredField("impl"); impl.setAccessible(true); impl.getType().getMethod( "setClientProperty", MiniAccumuloUtils.class.getClassLoader().loadClass( "org.apache.accumulo.core.conf.ClientProperty"), String.class).invoke(impl.get(config), property, value); } catch (final Exception e) { LOGGER.warn("Unable to setClientProperty", e); } } public static void setProperty( final MiniAccumuloConfig config, final Property property, final String value) { try { final Field impl = MiniAccumuloConfig.class.getDeclaredField("impl"); impl.setAccessible(true); impl.getType().getMethod("setProperty", Property.class, String.class).invoke( impl.get(config), property, value); } catch (final Exception e) { LOGGER.warn("Unable to setProperty", e); } } public static Map getSiteConfig(final MiniAccumuloConfig config) { try { final Field impl = MiniAccumuloConfig.class.getDeclaredField("impl"); impl.setAccessible(true); return (Map) impl.getType().getMethod("getSiteConfig").invoke( impl.get(config)); } catch (final Exception e) { LOGGER.warn("Unable to getSiteConfig", e); } return null; } public static Map getSystemProperties(final MiniAccumuloConfig config) { try { final Field impl = MiniAccumuloConfig.class.getDeclaredField("impl"); impl.setAccessible(true); return (Map) impl.getType().getMethod("getSystemProperties").invoke( impl.get(config)); } catch (final Exception e) { LOGGER.warn("Unable to getSystemProperties", e); } return null; } public static void setSystemProperties( final MiniAccumuloConfig config, final Map systemProperties) { try { final Field impl = MiniAccumuloConfig.class.getDeclaredField("impl"); impl.setAccessible(true); impl.getType().getMethod("setSystemProperties", Map.class).invoke( impl.get(config), systemProperties); } catch (final Exception e) { LOGGER.warn("Unable to setSystemProperties", e); } } public static File getConfDir(final MiniAccumuloConfig config) throws IOException { try { final Field impl = MiniAccumuloConfig.class.getDeclaredField("impl"); impl.setAccessible(true); return (File) impl.getType().getMethod("getConfDir").invoke(impl.get(config)); } catch (final Exception e) { LOGGER.warn("Unable to getConfDir", e); } return null; } public static File getLogDir(final MiniAccumuloConfig config) throws IOException { try { final Field impl = MiniAccumuloConfig.class.getDeclaredField("impl"); impl.setAccessible(true); return (File) impl.getType().getMethod("getLogDir").invoke(impl.get(config)); } catch (final Exception e) { LOGGER.warn("Unable to getLogDir", e); } return null; } public static String getZooKeepers(final MiniAccumuloConfig config) throws IOException { try { final Field impl = MiniAccumuloConfig.class.getDeclaredField("impl"); impl.setAccessible(true); return (String) impl.getType().getMethod("getZooKeepers").invoke(impl.get(config)); } catch (final Exception e) { LOGGER.warn("Unable to getZooKeepers", e); } return null; } public static Process exec( final MiniAccumuloCluster cluster, final Class clazz, final String... args) throws IOException { return exec(cluster, clazz, null, args); } public static Process exec( final MiniAccumuloCluster cluster, final Class clazz, final List jvmArgs, final String... args) throws IOException { try { final Field impl = MiniAccumuloCluster.class.getDeclaredField("impl"); impl.setAccessible(true); final Object obj = impl.getType().getMethod("exec", Class.class, List.class, String[].class).invoke( impl.get(cluster), clazz, jvmArgs, args); if (obj instanceof Process) { return (Process) obj; } else { return (Process) obj.getClass().getMethod("getProcess").invoke(obj); } } catch (final Exception e) { LOGGER.warn("Unable start process for " + clazz, e); } return null; } } ================================================ FILE: extensions/cli/accumulo-embed/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi ================================================ org.locationtech.geowave.datastore.accumulo.cli.EmbeddedAccumuloOperationProvider ================================================ FILE: extensions/cli/bigtable-embed/pom.xml ================================================ 4.0.0 geowave-extension-parent org.locationtech.geowave ../../ 2.0.2-SNAPSHOT geowave-cli-bigtable-embed GeoWave Bigtable Embedded Server Geowave Bigtable Embedded Server org.locationtech.geowave geowave-core-index ${project.version} org.apache.commons commons-exec 1.3 org.locationtech.geowave geowave-adapter-raster ${project.version} org.locationtech.geowave geowave-core-cli ${project.version} org.locationtech.geowave geowave-datastore-bigtable ${project.version} provided build-installer-plugin maven-assembly-plugin ================================================ FILE: extensions/cli/bigtable-embed/src/main/java/org/locationtech/geowave/datastore/bigtable/cli/BigtableEmulator.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.datastore.bigtable.cli; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.io.UnsupportedEncodingException; import java.io.Writer; import java.net.URL; import java.util.Iterator; import java.util.LinkedList; import java.util.Queue; import org.apache.commons.exec.CommandLine; import org.apache.commons.exec.DefaultExecuteResultHandler; import org.apache.commons.exec.DefaultExecutor; import org.apache.commons.exec.ExecuteException; import org.apache.commons.exec.ExecuteWatchdog; import org.apache.commons.exec.Executor; import org.apache.commons.exec.PumpStreamHandler; import org.apache.commons.io.IOUtils; import org.codehaus.plexus.archiver.tar.TarGZipUnArchiver; import org.codehaus.plexus.logging.Logger; import org.codehaus.plexus.logging.console.ConsoleLogger; import org.locationtech.geowave.adapter.raster.util.ZipUtils; import org.locationtech.geowave.core.index.StringUtils; import org.slf4j.LoggerFactory; import com.google.common.io.ByteStreams; public class BigtableEmulator { private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(BigtableEmulator.class); // Property names public static final String HOST_PORT_PROPERTY = "bigtable.emulator.endpoint"; public static final String INTERNAL_PROPERTY = "bigtable.emulator.internal"; public static final String DOWNLOAD_URL_PROPERTY = "bigtable.sdk.url"; public static final String DOWNLOAD_FILE_PROPERTY = "bigtable.sdk.file"; public static final File DEFAULT_DIR = new File("./target/temp"); // Download and executable paths private final String downloadUrl; private final String fileName; private static final String GCLOUD_EXE_DIR = "google-cloud-sdk/bin"; private final Object STARTUP_LOCK = new Object(); private boolean matchFound = false; private final long MAX_STARTUP_WAIT = 60000L; // if it doesn't start in 1 // minute, just move on and // get it over with private final File sdkDir; private ExecuteWatchdog watchdog; public BigtableEmulator(final RunBigtableEmulatorOptions options) { this(options.getDirectory(), options.getUrl(), options.getSdk()); } public BigtableEmulator( final String sdkDir, final String sdkDownloadUrl, final String sdkFileName) { if (sdkDir != null && !sdkDir.isEmpty()) { this.sdkDir = new File(sdkDir); } else { this.sdkDir = new File(DEFAULT_DIR, "gcloud"); } downloadUrl = sdkDownloadUrl; fileName = sdkFileName; if (!this.sdkDir.exists() && !this.sdkDir.mkdirs()) { LOGGER.warn("unable to create directory " + this.sdkDir.getAbsolutePath()); } } public boolean start(final String emulatorHostPort) { if (!isInstalled()) { try { if (!install()) { return false; } } catch (final IOException e) { LOGGER.error(e.getMessage()); return false; } } try { startEmulator(emulatorHostPort); } catch (IOException | InterruptedException e) { LOGGER.error(e.getMessage()); return false; } return true; } public boolean isRunning() { return ((watchdog != null) && watchdog.isWatching()); } public void stop() { // first, ask the watchdog nicely: watchdog.destroyProcess(); // then kill all the extra emulator processes like this: final String KILL_CMD_1 = "for i in $(ps -ef | grep -i \"[b]eta emulators bigtable\" | awk '{print $2}'); do kill -9 $i; done"; final String KILL_CMD_2 = "for i in $(ps -ef | grep -i \"[c]btemulator\" | awk '{print $2}'); do kill -9 $i; done"; final File bashFile = new File(DEFAULT_DIR, "kill-bigtable.sh"); PrintWriter scriptWriter; try { final Writer w = new OutputStreamWriter(new FileOutputStream(bashFile), "UTF-8"); scriptWriter = new PrintWriter(w); scriptWriter.println("#!/bin/bash"); scriptWriter.println("set -ev"); scriptWriter.println(KILL_CMD_1); scriptWriter.println(KILL_CMD_2); scriptWriter.close(); bashFile.setExecutable(true); } catch (final FileNotFoundException e1) { LOGGER.error("Unable to create bigtable emulator kill script", e1); return; } catch (final UnsupportedEncodingException e) { LOGGER.error("Unable to create bigtable emulator kill script", e); } final CommandLine cmdLine = new CommandLine(bashFile.getAbsolutePath()); final DefaultExecutor executor = new DefaultExecutor(); int exitValue = 0; try { exitValue = executor.execute(cmdLine); } catch (final IOException ex) { LOGGER.error("Unable to execute bigtable emulator kill script", ex); } LOGGER.warn("Bigtable emulator " + (exitValue == 0 ? "stopped" : "failed to stop")); } private boolean isInstalled() { final File gcloudExe = new File(sdkDir, GCLOUD_EXE_DIR + "/gcloud"); return (gcloudExe.canExecute()); } protected boolean install() throws IOException { final URL url = new URL(downloadUrl + "/" + fileName); final File downloadFile = new File(sdkDir.getParentFile(), fileName); if (!downloadFile.exists()) { try (FileOutputStream fos = new FileOutputStream(downloadFile)) { IOUtils.copyLarge(url.openStream(), fos); fos.flush(); } } if (downloadFile.getName().endsWith(".zip")) { ZipUtils.unZipFile(downloadFile, sdkDir.getAbsolutePath()); } else if (downloadFile.getName().endsWith(".tar.gz")) { final TarGZipUnArchiver unarchiver = new TarGZipUnArchiver(); unarchiver.enableLogging(new ConsoleLogger(Logger.LEVEL_WARN, "Gcloud SDK Unarchive")); unarchiver.setSourceFile(downloadFile); unarchiver.setDestDirectory(sdkDir); unarchiver.extract(); } if (!downloadFile.delete()) { LOGGER.warn("cannot delete " + downloadFile.getAbsolutePath()); } // Check the install if (!isInstalled()) { LOGGER.error("Gcloud install failed"); return false; } // Install the beta components final File gcloudExe = new File(sdkDir, GCLOUD_EXE_DIR + "/gcloud"); final CommandLine cmdLine = new CommandLine(gcloudExe); cmdLine.addArgument("components"); cmdLine.addArgument("install"); cmdLine.addArgument("beta"); cmdLine.addArgument("--quiet"); final DefaultExecutor executor = new DefaultExecutor(); final int exitValue = executor.execute(cmdLine); return (exitValue == 0); } /** * Using apache commons exec for cmd line execution * * @param command * @return exitCode * @throws ExecuteException * @throws IOException * @throws InterruptedException */ private void startEmulator(final String emulatorHostPort) throws ExecuteException, IOException, InterruptedException { final CommandLine cmdLine = new CommandLine(sdkDir + "/" + GCLOUD_EXE_DIR + "/gcloud"); cmdLine.addArgument("beta"); cmdLine.addArgument("emulators"); cmdLine.addArgument("bigtable"); cmdLine.addArgument("start"); cmdLine.addArgument("--quiet"); cmdLine.addArgument("--host-port"); cmdLine.addArgument(emulatorHostPort); // Using a result handler makes the emulator run async final DefaultExecuteResultHandler resultHandler = new DefaultExecuteResultHandler(); // watchdog shuts down the emulator, later watchdog = new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT); final Executor executor = new DefaultExecutor(); executor.setWatchdog(watchdog); executor.setStreamHandler( new PumpStreamHandler( ByteStreams.nullOutputStream(), ByteStreams.nullOutputStream(), null) { @Override protected Thread createPump( final InputStream is, final OutputStream os, final boolean closeWhenExhausted) { final FilterInputStream fis = new FilterInputStream(is) { byte[] startupBytes = ("running on " + emulatorHostPort).getBytes(StringUtils.UTF8_CHARSET); Queue queue = new LinkedList<>(); private boolean isStartupFound() { final Integer[] array = queue.toArray(new Integer[] {}); final byte[] ba = new byte[array.length]; for (int i = 0; i < ba.length; i++) { ba[i] = array[i].byteValue(); } final Iterator iterator = queue.iterator(); for (final byte b : startupBytes) { if (!iterator.hasNext() || (b != iterator.next())) { return false; } } return true; } private void readAhead() throws IOException { // Work up some look-ahead. while (queue.size() < startupBytes.length) { final int next = super.read(); queue.offer(next); if (next == -1) { break; } } } @Override public int read() throws IOException { if (matchFound) { super.read(); } readAhead(); if (isStartupFound()) { synchronized (STARTUP_LOCK) { STARTUP_LOCK.notifyAll(); } matchFound = true; } return queue.remove(); } @Override public int read(final byte b[]) throws IOException { if (matchFound) { super.read(b); } return read(b, 0, b.length); } // copied straight from InputStream implementation, // just need to use `read()` // from this class @Override public int read(final byte b[], final int off, final int len) throws IOException { if (matchFound) { super.read(b, off, len); } if (b == null) { throw new NullPointerException(); } else if ((off < 0) || (len < 0) || (len > (b.length - off))) { throw new IndexOutOfBoundsException(); } else if (len == 0) { return 0; } int c = read(); if (c == -1) { return -1; } b[off] = (byte) c; int i = 1; try { for (; i < len; i++) { c = read(); if (c == -1) { break; } b[off + i] = (byte) c; } } catch (final IOException ee) { } return i; } }; return super.createPump(fis, os, closeWhenExhausted); } }); LOGGER.warn("Starting Bigtable Emulator: " + cmdLine.toString()); synchronized (STARTUP_LOCK) { executor.execute(cmdLine, resultHandler); STARTUP_LOCK.wait(MAX_STARTUP_WAIT); } } } ================================================ FILE: extensions/cli/bigtable-embed/src/main/java/org/locationtech/geowave/datastore/bigtable/cli/BigtableOperationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.datastore.bigtable.cli; import org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi; public class BigtableOperationProvider implements CLIOperationProviderSpi { private static final Class[] OPERATIONS = new Class[] {BigtableSection.class, RunBigtableEmulator.class}; @Override public Class[] getOperations() { return OPERATIONS; } } ================================================ FILE: extensions/cli/bigtable-embed/src/main/java/org/locationtech/geowave/datastore/bigtable/cli/BigtableSection.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.datastore.bigtable.cli; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.operations.util.UtilSection; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "bigtable", parentOperation = UtilSection.class) @Parameters(commandDescription = "Bigtable embedded server commands") public class BigtableSection extends DefaultOperation { } ================================================ FILE: extensions/cli/bigtable-embed/src/main/java/org/locationtech/geowave/datastore/bigtable/cli/RunBigtableEmulator.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.datastore.bigtable.cli; import java.util.concurrent.TimeUnit; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.Command; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.Parameter; import com.beust.jcommander.Parameters; import com.beust.jcommander.ParametersDelegate; @GeowaveOperation(name = "run", parentOperation = BigtableSection.class) @Parameters( commandDescription = "Runs a standalone Bigtable server for test and debug with GeoWave") public class RunBigtableEmulator extends DefaultOperation implements Command { private static final Logger LOGGER = LoggerFactory.getLogger(RunBigtableEmulator.class); @ParametersDelegate private RunBigtableEmulatorOptions options = new RunBigtableEmulatorOptions(); @Parameter( names = {"--interactive", "-i"}, arity = 1, description = "Whether to prompt for user input to end the process") private boolean interactive = true; /** * Prep the driver & run the operation. */ @Override public void execute(final OperationParams params) { try { final BigtableEmulator server = options.getServer(); server.start(options.getPort()); if (interactive) { System.out.println("Press Enter to shutdown.."); System.in.read(); System.out.println("Shutting down!"); server.stop(); } else { Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { try { server.stop(); } catch (final Exception e) { LOGGER.warn("Unable to shutdown Bigtable", e); System.out.println("Error shutting down Bigtable."); } System.out.println("Shutting down!"); } }); while (true) { Thread.sleep(TimeUnit.MILLISECONDS.convert(Long.MAX_VALUE, TimeUnit.DAYS)); } } } catch (final Exception e) { LOGGER.error("Unable to run embedded Bigtable server", e); } } } ================================================ FILE: extensions/cli/bigtable-embed/src/main/java/org/locationtech/geowave/datastore/bigtable/cli/RunBigtableEmulatorOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.datastore.bigtable.cli; import java.io.File; import java.io.IOException; import com.beust.jcommander.Parameter; public class RunBigtableEmulatorOptions { @Parameter(names = {"--directory", "-d"}, description = "The directory to use for Bigtable") private String directory = BigtableEmulator.DEFAULT_DIR.getPath(); @Parameter(names = {"--url", "-u"}, description = "The url location to download Bigtable") private String url = "https://dl.google.com/dl/cloudsdk/channels/rapid/downloads"; @Parameter(names = {"--sdk", "-s"}, description = "The name of the Bigtable SDK") private String sdk = "google-cloud-sdk-183.0.0-linux-x86_64.tar.gz"; @Parameter(names = {"--port", "-p"}, description = "The port the emulator will run on") private String port = "127.0.0.1:8086"; public String getDirectory() { return directory; } public String getUrl() { return url; } public String getSdk() { return sdk; } public String getPort() { return port; } public void setDirectory(String directory) { this.directory = directory; } public void setUrl(String url) { this.url = url; } public void setSdk(String sdk) { this.sdk = sdk; } public void setPort(String port) { this.port = port; } public BigtableEmulator getServer() throws IOException { return new BigtableEmulator(this); } } ================================================ FILE: extensions/cli/bigtable-embed/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi ================================================ org.locationtech.geowave.datastore.bigtable.cli.BigtableOperationProvider ================================================ FILE: extensions/cli/cassandra-embed/pom.xml ================================================ 4.0.0 geowave-extension-parent org.locationtech.geowave ../../ 2.0.2-SNAPSHOT geowave-cli-cassandra-embed GeoWave Cassandra Embedded Server Geowave Cassandra Embedded Server org.locationtech.geowave geowave-datastore-cassandra ${project.version} provided com.google.guava failureaccess 1.0.1 org.apache.cassandra cassandra-all ${cassandra.version} org.slf4j slf4j-log4j12 commons-logging commons-logging log4j log4j org.slf4j log4j-over-slf4j ch.qos.logback logback-core ch.qos.logback logback-classic org.hibernate hibernate-validator net.jpountz.lz4 lz4 build-installer-plugin maven-assembly-plugin ================================================ FILE: extensions/cli/cassandra-embed/src/main/java/org/locationtech/geowave/datastore/cassandra/cli/CassandraOperationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.datastore.cassandra.cli; import org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi; public class CassandraOperationProvider implements CLIOperationProviderSpi { private static final Class[] OPERATIONS = new Class[] {CassandraSection.class, RunCassandraServer.class}; @Override public Class[] getOperations() { return OPERATIONS; } } ================================================ FILE: extensions/cli/cassandra-embed/src/main/java/org/locationtech/geowave/datastore/cassandra/cli/CassandraSection.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.datastore.cassandra.cli; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.operations.util.UtilSection; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "cassandra", parentOperation = UtilSection.class) @Parameters(commandDescription = "Cassandra embedded server commands") public class CassandraSection extends DefaultOperation { } ================================================ FILE: extensions/cli/cassandra-embed/src/main/java/org/locationtech/geowave/datastore/cassandra/cli/CassandraServer.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.datastore.cassandra.cli; import java.io.IOException; import org.apache.cassandra.service.EmbeddedCassandraService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class CassandraServer { private static final Logger LOGGER = LoggerFactory.getLogger(CassandraServer.class); protected static final String NODE_DIRECTORY_PREFIX = "cassandra"; private final EmbeddedCassandraService embeddedService; public CassandraServer() { embeddedService = new EmbeddedCassandraService(); } public void start() { try { embeddedService.start(); } catch (final IOException e) { LOGGER.warn("Unable to start Cassandra", e); } } public void stop() { embeddedService.stop(); } } ================================================ FILE: extensions/cli/cassandra-embed/src/main/java/org/locationtech/geowave/datastore/cassandra/cli/RunCassandraServer.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.datastore.cassandra.cli; import java.io.File; import java.util.concurrent.TimeUnit; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.Command; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.Parameter; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "run", parentOperation = CassandraSection.class) @Parameters( commandDescription = "Runs a standalone Cassandra server for test and debug with GeoWave. The default file store will be './cassandra'.") public class RunCassandraServer extends DefaultOperation implements Command { private static final Logger LOGGER = LoggerFactory.getLogger(RunCassandraServer.class); @Parameter( names = {"--interactive", "-i"}, arity = 1, description = "Whether to prompt for user input to end the process") private boolean interactive = true; @Parameter( names = {"--config", "-c"}, description = "Optionally, a URL to a valid cassandra YAML for configuration.") private String config = "cassandra-default.yaml"; /** * Prep the driver & run the operation. */ @Override public void execute(final OperationParams params) { try { System.setProperty("cassandra.config", config); if (config.equals("cassandra-default.yaml")) { if (!new File("cassandra").mkdirs()) { LOGGER.warn("Unable to create cassandra directory"); } } final CassandraServer server = new CassandraServer(); server.start(); if (interactive) { System.out.println("Press Enter to shutdown.."); System.in.read(); System.out.println("Shutting down!"); server.stop(); } else { Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { try { server.stop(); } catch (final Exception e) { LOGGER.warn("Unable to shutdown Cassandra", e); System.out.println("Error shutting down Cassandra."); } System.out.println("Shutting down!"); } }); while (true) { Thread.sleep(TimeUnit.MILLISECONDS.convert(Long.MAX_VALUE, TimeUnit.DAYS)); } } } catch (final Exception e) { LOGGER.error("Unable to run embedded Cassandra server", e); } } public boolean isInteractive() { return interactive; } public void setInteractive(final boolean interactive) { this.interactive = interactive; } public String getConfig() { return config; } public void setConfig(final String config) { this.config = config; } } ================================================ FILE: extensions/cli/cassandra-embed/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi ================================================ org.locationtech.geowave.datastore.cassandra.cli.CassandraOperationProvider ================================================ FILE: extensions/cli/cassandra-embed/src/main/resources/cassandra-default.yaml ================================================ # # Warning! # Consider the effects on 'o.a.c.i.s.LegacySSTableTest' before changing schemas in this file. # cluster_name: Test Cluster # memtable_allocation_type: heap_buffers memtable_allocation_type: offheap_objects commitlog_sync: batch commitlog_sync_batch_window_in_ms: 1.0 commitlog_segment_size_in_mb: 5 commitlog_directory: cassandra/commitlog # commitlog_compression: # - class_name: LZ4Compressor cdc_raw_directory: cassandra/cdc_raw cdc_enabled: false hints_directory: cassandra/hints partitioner: org.apache.cassandra.dht.ByteOrderedPartitioner listen_address: 127.0.0.1 storage_port: 7012 ssl_storage_port: 17012 start_native_transport: true native_transport_port: 9042 column_index_size_in_kb: 4 saved_caches_directory: cassandra/saved_caches data_file_directories: - cassandra/data disk_access_mode: mmap seed_provider: - class_name: org.apache.cassandra.locator.SimpleSeedProvider parameters: - seeds: "127.0.0.1:7012" endpoint_snitch: org.apache.cassandra.locator.SimpleSnitch dynamic_snitch: true server_encryption_options: internode_encryption: none keystore: conf/.keystore keystore_password: cassandra truststore: conf/.truststore truststore_password: cassandra incremental_backups: true concurrent_compactors: 4 compaction_throughput_mb_per_sec: 0 row_cache_class_name: org.apache.cassandra.cache.OHCProvider row_cache_size_in_mb: 16 enable_user_defined_functions: true enable_scripted_user_defined_functions: true prepared_statements_cache_size_mb: 1 corrupted_tombstone_strategy: exception stream_entire_sstables: true stream_throughput_outbound_megabits_per_sec: 200000000 #this is fairly high, but the goal is to avoid failures based on batch size batch_size_fail_threshold_in_kb: 50000 enable_sasi_indexes: true enable_materialized_views: true file_cache_enabled: true ================================================ FILE: extensions/cli/debug/pom.xml ================================================ 4.0.0 org.locationtech.geowave geowave-extension-parent ../../ 2.0.2-SNAPSHOT geowave-cli-debug GeoWave Debug Commandline Tools A set of ad-hoc debug tools available through the command line that can be applied to GeoWave data org.locationtech.geowave geowave-datastore-accumulo ${project.version} org.locationtech.geowave geowave-datastore-hbase ${project.version} org.locationtech.geowave geowave-adapter-vector ${project.version} org.locationtech.geowave geowave-adapter-raster ${project.version} org.locationtech.geowave geowave-analytic-spark ================================================ FILE: extensions/cli/debug/src/main/java/org/locationtech/geowave/cli/debug/AbstractGeoWaveQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.debug; import java.util.ArrayList; import java.util.List; import org.apache.commons.cli.ParseException; import org.apache.commons.lang3.time.StopWatch; import org.locationtech.geowave.core.cli.api.Command; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.converters.GeoWaveBaseConverter; import org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.index.ByteArray; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; public abstract class AbstractGeoWaveQuery extends DefaultOperation implements Command { private static Logger LOGGER = LoggerFactory.getLogger(AbstractGeoWaveQuery.class); @Parameter(description = "") private List parameters = new ArrayList<>(); @Parameter(names = "--indexName", description = "The name of the index (optional)") private String indexName; @Parameter(names = "--typeName", description = "Optional ability to provide an adapter type name") private String typeName; @Parameter(names = "--debug", description = "Print out additional info for debug purposes") private boolean debug = false; public void setParameters(final List parameters) { this.parameters = parameters; } public void setDebug(final boolean debug) { this.debug = debug; } @Override public void execute(final OperationParams params) throws ParseException { final StopWatch stopWatch = new StopWatch(); // Ensure we have all the required arguments if (parameters.size() != 1) { throw new ParameterException("Requires arguments: "); } final String storeName = parameters.get(0); // Attempt to load store. final DataStorePluginOptions storeOptions = CLIUtils.loadStore(storeName, getGeoWaveConfigFile(params), params.getConsole()); DataStore dataStore; PersistentAdapterStore adapterStore; dataStore = storeOptions.createDataStore(); adapterStore = storeOptions.createAdapterStore(); final GeotoolsFeatureDataAdapter adapter; if (typeName != null) { adapter = (GeotoolsFeatureDataAdapter) adapterStore.getAdapter( storeOptions.createInternalAdapterStore().getAdapterId(typeName)).getAdapter(); } else { final InternalDataAdapter[] adapters = adapterStore.getAdapters(); adapter = (GeotoolsFeatureDataAdapter) adapters[0].getAdapter(); } if (debug && (adapter != null)) { System.out.println(adapter); } stopWatch.start(); final long results = runQuery(adapter, typeName, indexName, dataStore, debug, storeOptions); stopWatch.stop(); System.out.println("Got " + results + " results in " + stopWatch.toString()); } protected abstract long runQuery( final GeotoolsFeatureDataAdapter adapter, final String typeName, final String indexName, DataStore dataStore, boolean debug, DataStorePluginOptions pluginOptions); public static class StringToByteArrayConverter extends GeoWaveBaseConverter { public StringToByteArrayConverter(final String optionName) { super(optionName); } @Override public ByteArray convert(final String value) { return new ByteArray(value); } } } ================================================ FILE: extensions/cli/debug/src/main/java/org/locationtech/geowave/cli/debug/BBOXQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.debug; import org.apache.commons.lang3.time.StopWatch; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.geotime.store.query.api.VectorAggregationQueryBuilder; import org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; import org.opengis.feature.simple.SimpleFeature; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.Parameter; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "bbox", parentOperation = DebugSection.class) @Parameters(commandDescription = "bbox query") public class BBOXQuery extends AbstractGeoWaveQuery { private static Logger LOGGER = LoggerFactory.getLogger(BBOXQuery.class); @Parameter(names = {"-e", "--east"}, required = true, description = "Max Longitude of BBOX") private Double east; @Parameter(names = {"-w", "--west"}, required = true, description = "Min Longitude of BBOX") private Double west; @Parameter(names = {"-n", "--north"}, required = true, description = "Max Latitude of BBOX") private Double north; @Parameter(names = {"-s", "--south"}, required = true, description = "Min Latitude of BBOX") private Double south; @Parameter(names = {"--useAggregation", "-agg"}, description = "Compute count on the server side") private Boolean useAggregation = Boolean.FALSE; private Geometry geom; private void getBoxGeom() { geom = new GeometryFactory().toGeometry(new Envelope(west, east, south, north)); } @Override protected long runQuery( final GeotoolsFeatureDataAdapter adapter, final String typeName, final String indexName, final DataStore dataStore, final boolean debug, final DataStorePluginOptions pluginOptions) { final StopWatch stopWatch = new StopWatch(); getBoxGeom(); long count = 0; if (useAggregation) { final VectorAggregationQueryBuilder bldr = (VectorAggregationQueryBuilder) VectorAggregationQueryBuilder.newBuilder().count( typeName).indexName(indexName); final Long countResult = dataStore.aggregate( bldr.constraints( bldr.constraintsFactory().spatialTemporalConstraints().spatialConstraints( geom).build()).build()); if (countResult != null) { count += countResult; } } else { final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder().addTypeName(typeName).indexName(indexName); stopWatch.start(); try (final CloseableIterator it = dataStore.query( bldr.constraints( bldr.constraintsFactory().spatialTemporalConstraints().spatialConstraints( geom).build()).build())) { stopWatch.stop(); System.out.println("Ran BBOX query in " + stopWatch.toString()); stopWatch.reset(); stopWatch.start(); while (it.hasNext()) { if (debug) { System.out.println(it.next()); } else { it.next(); } count++; } stopWatch.stop(); System.out.println("BBOX query results iteration took " + stopWatch.toString()); } } return count; } } ================================================ FILE: extensions/cli/debug/src/main/java/org/locationtech/geowave/cli/debug/CQLQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.debug; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.geotime.store.query.api.VectorAggregationQueryBuilder; import org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder; import org.locationtech.geowave.core.index.persist.Persistable; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.opengis.feature.simple.SimpleFeature; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.Parameter; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "serverCql", parentOperation = DebugSection.class) @Parameters(commandDescription = "cql server-side") public class CQLQuery extends AbstractGeoWaveQuery { private static Logger LOGGER = LoggerFactory.getLogger(CQLQuery.class); @Parameter(names = "--cql", required = true, description = "CQL Filter executed client side") private String cqlStr; @Parameter(names = {"--useAggregation", "-agg"}, description = "Compute count on the server side") private Boolean useAggregation = Boolean.FALSE; @Override protected long runQuery( final GeotoolsFeatureDataAdapter adapter, final String typeName, final String indexName, final DataStore dataStore, final boolean debug, final DataStorePluginOptions pluginOptions) { long count = 0; if (useAggregation) { final VectorAggregationQueryBuilder bldr = (VectorAggregationQueryBuilder) VectorAggregationQueryBuilder.newBuilder().count( typeName).indexName(indexName); final Long countResult = dataStore.aggregate( bldr.constraints(bldr.constraintsFactory().cqlConstraints(cqlStr)).build()); if (countResult != null) { count += countResult; } return count; } else { final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder().addTypeName(typeName).indexName(indexName); try (final CloseableIterator it = dataStore.query( bldr.constraints(bldr.constraintsFactory().cqlConstraints(cqlStr)).build())) { while (it.hasNext()) { if (debug) { System.out.println(it.next()); } else { it.next(); } count++; } } return count; } } } ================================================ FILE: extensions/cli/debug/src/main/java/org/locationtech/geowave/cli/debug/ClientSideCQLQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.debug; import org.geotools.filter.text.cql2.CQLException; import org.geotools.filter.text.ecql.ECQL; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.QueryBuilder; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.opengis.feature.simple.SimpleFeature; import org.opengis.filter.Filter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.Parameter; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "clientCql", parentOperation = DebugSection.class) @Parameters(commandDescription = "cql client-side, primarily useful for consistency checking") public class ClientSideCQLQuery extends AbstractGeoWaveQuery { private static Logger LOGGER = LoggerFactory.getLogger(ClientSideCQLQuery.class); @Parameter(names = "--cql", required = true, description = "CQL Filter executed client side") private String cql; private Filter filter; private void getFilter() { try { filter = ECQL.toFilter(cql); } catch (final CQLException e) { LOGGER.warn("Unable to retrive filter", e); } } @Override protected long runQuery( final GeotoolsFeatureDataAdapter adapter, final String typeName, final String indexName, final DataStore dataStore, final boolean debug, final DataStorePluginOptions pluginOptions) { getFilter(); long count = 0; try (final CloseableIterator it = dataStore.query( QueryBuilder.newBuilder().addTypeName(typeName).indexName(indexName).build())) { while (it.hasNext()) { final Object o = it.next(); if (o instanceof SimpleFeature) { if (filter.evaluate(o)) { if (debug) { System.out.println(o); } count++; } } } } return count; } } ================================================ FILE: extensions/cli/debug/src/main/java/org/locationtech/geowave/cli/debug/DebugOperationsProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.debug; import org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi; public class DebugOperationsProvider implements CLIOperationProviderSpi { private static final Class[] OPERATIONS = new Class[] { DebugSection.class, BBOXQuery.class, ClientSideCQLQuery.class, CQLQuery.class, FullTableScan.class, MinimalFullTable.class, SparkQuery.class}; @Override public Class[] getOperations() { return OPERATIONS; } } ================================================ FILE: extensions/cli/debug/src/main/java/org/locationtech/geowave/cli/debug/DebugSection.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.debug; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.operations.GeoWaveTopLevelSection; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "debug", parentOperation = GeoWaveTopLevelSection.class) @Parameters(commandDescription = "Scratchpad for geowave ops") public class DebugSection extends DefaultOperation { } ================================================ FILE: extensions/cli/debug/src/main/java/org/locationtech/geowave/cli/debug/FullTableScan.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.debug; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.store.CloseableIterator; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.QueryBuilder; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "fullscan", parentOperation = DebugSection.class) @Parameters(commandDescription = "fulltable scan") public class FullTableScan extends AbstractGeoWaveQuery { private static Logger LOGGER = LoggerFactory.getLogger(FullTableScan.class); @Override protected long runQuery( final GeotoolsFeatureDataAdapter adapter, final String typeName, final String indexName, final DataStore dataStore, final boolean debug, final DataStorePluginOptions pluginOptions) { long count = 0; try (final CloseableIterator it = dataStore.query( QueryBuilder.newBuilder().addTypeName(typeName).indexName(indexName).build())) { while (it.hasNext()) { if (debug) { System.out.println(it.next()); } else { it.next(); } count++; } } return count; } } ================================================ FILE: extensions/cli/debug/src/main/java/org/locationtech/geowave/cli/debug/MinimalFullTable.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.debug; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import org.apache.accumulo.core.client.AccumuloException; import org.apache.accumulo.core.client.AccumuloSecurityException; import org.apache.accumulo.core.client.BatchScanner; import org.apache.accumulo.core.client.TableNotFoundException; import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.data.Range; import org.apache.accumulo.core.data.Value; import org.apache.commons.cli.ParseException; import org.apache.commons.lang3.time.StopWatch; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.Command; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.locationtech.geowave.datastore.accumulo.AccumuloStoreFactoryFamily; import org.locationtech.geowave.datastore.accumulo.config.AccumuloOptions; import org.locationtech.geowave.datastore.accumulo.config.AccumuloRequiredOptions; import org.locationtech.geowave.datastore.accumulo.operations.AccumuloOperations; import org.locationtech.geowave.datastore.hbase.HBaseStoreFactoryFamily; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map.Entry; @GeowaveOperation(name = "fullscanMinimal", parentOperation = DebugSection.class) @Parameters(commandDescription = "full table scan without any iterators or deserialization") public class MinimalFullTable extends DefaultOperation implements Command { private static Logger LOGGER = LoggerFactory.getLogger(MinimalFullTable.class); @Parameter(description = "") private List parameters = new ArrayList<>(); @Parameter(names = "--indexId", required = true, description = "The name of the index (optional)") private String indexId; public void setParameters(final List parameters) { this.parameters = parameters; } @Override public void execute(final OperationParams params) throws ParseException { final StopWatch stopWatch = new StopWatch(); // Ensure we have all the required arguments if (parameters.size() != 1) { throw new ParameterException("Requires arguments: "); } final String storeName = parameters.get(0); // Attempt to load store. final DataStorePluginOptions storeOptions = CLIUtils.loadStore(storeName, getGeoWaveConfigFile(params), params.getConsole()); final String storeType = storeOptions.getType(); if (storeType.equals(AccumuloStoreFactoryFamily.TYPE)) { try { final AccumuloRequiredOptions opts = (AccumuloRequiredOptions) storeOptions.getFactoryOptions(); final AccumuloOperations ops = new AccumuloOperations( opts.getZookeeper(), opts.getInstance(), opts.getUser(), opts.getPasswordOrKeytab(), opts.isUseSasl(), opts.getGeoWaveNamespace(), (AccumuloOptions) opts.getStoreOptions()); long results = 0; final BatchScanner scanner = ops.createBatchScanner(indexId); scanner.setRanges(Collections.singleton(new Range())); final Iterator> it = scanner.iterator(); stopWatch.start(); while (it.hasNext()) { it.next(); results++; } stopWatch.stop(); scanner.close(); System.out.println("Got " + results + " results in " + stopWatch.toString()); } catch (AccumuloException | AccumuloSecurityException | TableNotFoundException | IOException e) { LOGGER.error("Unable to scan accumulo datastore", e); } } else if (storeType.equals(HBaseStoreFactoryFamily.TYPE)) { throw new UnsupportedOperationException( "full scan for store type " + storeType + " not yet implemented."); } else { throw new UnsupportedOperationException( "full scan for store type " + storeType + " not implemented."); } } } ================================================ FILE: extensions/cli/debug/src/main/java/org/locationtech/geowave/cli/debug/SparkQuery.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.debug; import java.io.IOException; import java.net.URISyntaxException; import java.util.Objects; import org.apache.spark.SparkConf; import org.apache.spark.sql.SparkSession; import org.locationtech.geowave.analytic.spark.GeoWaveRDDLoader; import org.locationtech.geowave.analytic.spark.GeoWaveSparkConf; import org.locationtech.geowave.analytic.spark.RDDOptions; import org.locationtech.geowave.analytic.spark.spatial.SpatialJoinRunner; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.geotime.store.query.api.VectorQueryBuilder; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.Parameter; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "sparkcql", parentOperation = DebugSection.class) @Parameters(commandDescription = "spark cql query") public class SparkQuery extends AbstractGeoWaveQuery { private static Logger LOGGER = LoggerFactory.getLogger(SparkQuery.class); @Parameter(names = "--cql", required = true, description = "CQL Filter executed client side") private String cqlStr; @Parameter(names = "--sparkMaster", description = "Spark Master") private String sparkMaster = "yarn"; @Parameter(names = {"-n", "--name"}, description = "The spark application name") private String appName = "Spatial Join Spark"; @Parameter(names = {"-ho", "--host"}, description = "The spark driver host") private String host = "localhost"; @Override protected long runQuery( final GeotoolsFeatureDataAdapter adapter, final String typeName, final String indexName, final DataStore dataStore, final boolean debug, final DataStorePluginOptions pluginOptions) { String jar = ""; try { jar = SpatialJoinRunner.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath(); } catch (final URISyntaxException e) { LOGGER.error("Unable to set jar location in spark configuration", e); } SparkConf addonOptions = GeoWaveSparkConf.getDefaultConfig(); addonOptions = addonOptions.setAppName(appName).setMaster(sparkMaster).set("spark.jars", jar); if (!Objects.equals(sparkMaster, "yarn")) { addonOptions = addonOptions.set("spark.driver.host", host); } final SparkSession session = GeoWaveSparkConf.createDefaultSession(addonOptions); long count = 0; final VectorQueryBuilder bldr = VectorQueryBuilder.newBuilder(); if (typeName != null) { bldr.addTypeName(typeName); } if (indexName != null) { bldr.indexName(indexName); } final RDDOptions rddOptions = new RDDOptions(); rddOptions.setQuery(bldr.constraints(bldr.constraintsFactory().cqlConstraints(cqlStr)).build()); try { count = GeoWaveRDDLoader.loadRDD( session.sparkContext(), pluginOptions, rddOptions).getRawRDD().count(); } catch (final IOException e) { LOGGER.warn("Unable to load RDD", e); } return count; } } ================================================ FILE: extensions/cli/debug/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi ================================================ org.locationtech.geowave.cli.debug.DebugOperationsProvider ================================================ FILE: extensions/cli/dynamodb-embed/pom.xml ================================================ 4.0.0 geowave-extension-parent org.locationtech.geowave ../../ 2.0.2-SNAPSHOT geowave-cli-dynamodb-embed GeoWave DynamoDB Embedded Server Geowave DynamoDB Embedded Server org.locationtech.geowave geowave-core-index ${project.version} org.apache.commons commons-exec 1.3 org.locationtech.geowave geowave-adapter-raster ${project.version} org.locationtech.geowave geowave-core-cli ${project.version} org.locationtech.geowave geowave-datastore-dynamodb ${project.version} provided com.jcraft jsch 0.1.55 build-installer-plugin maven-assembly-plugin ================================================ FILE: extensions/cli/dynamodb-embed/src/main/java/org/locationtech/geowave/datastore/dynamodb/cli/DynamoDBLocal.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.datastore.dynamodb.cli; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.net.HttpURLConnection; import java.net.URL; import org.apache.commons.exec.CommandLine; import org.apache.commons.exec.DefaultExecuteResultHandler; import org.apache.commons.exec.DefaultExecutor; import org.apache.commons.exec.ExecuteException; import org.apache.commons.exec.ExecuteWatchdog; import org.apache.commons.exec.Executor; import org.apache.commons.io.IOUtils; import org.codehaus.plexus.archiver.tar.TarGZipUnArchiver; import org.codehaus.plexus.logging.console.ConsoleLogger; import org.slf4j.LoggerFactory; import com.jcraft.jsch.Logger; public class DynamoDBLocal { private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(DynamoDBLocal.class); // these need to move to config private static final String DYNDB_URL = "https://s3-us-west-2.amazonaws.com/dynamodb-local/"; private static final String DYNDB_TAR = "dynamodb_local_latest.tar.gz"; public static final int DEFAULT_PORT = 8000; private static final long EMULATOR_SPINUP_DELAY_MS = 30000L; public static final File DEFAULT_DIR = new File("./temp"); private final File dynLocalDir; private final int port; private ExecuteWatchdog watchdog; public DynamoDBLocal() { this(null, null); } public DynamoDBLocal(final String localDir) { this(localDir, null); } public DynamoDBLocal(final int port) { this(null, port); } public DynamoDBLocal(final String localDir, final Integer port) { if ((localDir != null) && !localDir.isEmpty()) { dynLocalDir = new File(localDir); } else { dynLocalDir = new File(DEFAULT_DIR, "dynamodb"); } if (port != null) { this.port = port; } else { this.port = DEFAULT_PORT; } if (!dynLocalDir.exists() && !dynLocalDir.mkdirs()) { LOGGER.warn("unable to create directory " + dynLocalDir.getAbsolutePath()); } } public boolean start() { if (!isInstalled()) { try { if (!install()) { return false; } } catch (final IOException e) { LOGGER.error(e.getMessage()); return false; } } try { startDynamoLocal(); } catch (IOException | InterruptedException e) { LOGGER.error(e.getMessage()); return false; } return true; } public boolean isRunning() { return ((watchdog != null) && watchdog.isWatching()); } public void stop() { // first, ask the watchdog nicely: watchdog.destroyProcess(); } private boolean isInstalled() { final File dynLocalJar = new File(dynLocalDir, "DynamoDBLocal.jar"); return (dynLocalJar.canRead()); } protected boolean install() throws IOException { HttpURLConnection.setFollowRedirects(true); final URL url = new URL(DYNDB_URL + DYNDB_TAR); final File downloadFile = new File(dynLocalDir, DYNDB_TAR); if (!downloadFile.exists()) { try (FileOutputStream fos = new FileOutputStream(downloadFile)) { IOUtils.copyLarge(url.openStream(), fos); fos.flush(); } } final TarGZipUnArchiver unarchiver = new TarGZipUnArchiver(); unarchiver.enableLogging(new ConsoleLogger(Logger.WARN, "DynamoDB Local Unarchive")); unarchiver.setSourceFile(downloadFile); unarchiver.setDestDirectory(dynLocalDir); unarchiver.extract(); if (!downloadFile.delete()) { LOGGER.warn("cannot delete " + downloadFile.getAbsolutePath()); } // Check the install if (!isInstalled()) { LOGGER.error("DynamoDB Local install failed"); return false; } return true; } /** * Using apache commons exec for cmd line execution * * @param command * @return exitCode * @throws ExecuteException * @throws IOException * @throws InterruptedException */ private void startDynamoLocal() throws ExecuteException, IOException, InterruptedException { // java -Djava.library.path=./DynamoDBLocal_lib -jar DynamoDBLocal.jar // -sharedDb final CommandLine cmdLine = new CommandLine("java"); cmdLine.addArgument("-Djava.library.path=" + dynLocalDir + "/DynamoDBLocal_lib"); cmdLine.addArgument("-jar"); cmdLine.addArgument(dynLocalDir + "/DynamoDBLocal.jar"); cmdLine.addArgument("-sharedDb"); cmdLine.addArgument("-inMemory"); cmdLine.addArgument("-port"); cmdLine.addArgument(Integer.toString(port)); System.setProperty("aws.accessKeyId", "dummy"); System.setProperty("aws.secretKey", "dummy"); // Using a result handler makes the emulator run async final DefaultExecuteResultHandler resultHandler = new DefaultExecuteResultHandler(); // watchdog shuts down the emulator, later watchdog = new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT); final Executor executor = new DefaultExecutor(); executor.setWatchdog(watchdog); executor.execute(cmdLine, resultHandler); // we need to wait here for a bit, in case the emulator needs to update // itself Thread.sleep(EMULATOR_SPINUP_DELAY_MS); } } ================================================ FILE: extensions/cli/dynamodb-embed/src/main/java/org/locationtech/geowave/datastore/dynamodb/cli/DynamoDBOperationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.datastore.dynamodb.cli; import org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi; public class DynamoDBOperationProvider implements CLIOperationProviderSpi { private static final Class[] OPERATIONS = new Class[] {DynamoDBSection.class, RunDynamoDBLocal.class}; @Override public Class[] getOperations() { return OPERATIONS; } } ================================================ FILE: extensions/cli/dynamodb-embed/src/main/java/org/locationtech/geowave/datastore/dynamodb/cli/DynamoDBSection.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.datastore.dynamodb.cli; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.operations.util.UtilSection; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "dynamodb", parentOperation = UtilSection.class) @Parameters(commandDescription = "DynamoDB embedded server commands") public class DynamoDBSection extends DefaultOperation { } ================================================ FILE: extensions/cli/dynamodb-embed/src/main/java/org/locationtech/geowave/datastore/dynamodb/cli/RunDynamoDBLocal.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.datastore.dynamodb.cli; import java.util.concurrent.TimeUnit; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.Command; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.Parameter; import com.beust.jcommander.Parameters; import com.beust.jcommander.ParametersDelegate; @GeowaveOperation(name = "run", parentOperation = DynamoDBSection.class) @Parameters( commandDescription = "Runs a standalone DynamoDB server for test and debug with GeoWave") public class RunDynamoDBLocal extends DefaultOperation implements Command { private static final Logger LOGGER = LoggerFactory.getLogger(RunDynamoDBLocal.class); @ParametersDelegate private RunDynamoDBLocalOptions options = new RunDynamoDBLocalOptions(); @Parameter( names = {"--interactive", "-i"}, arity = 1, description = "Whether to prompt for user input to end the process") private boolean interactive = true; /** * Prep the driver & run the operation. */ @Override public void execute(final OperationParams params) { try { final DynamoDBLocal server = options.getServer(); server.start(); if (interactive) { System.out.println("Press Enter to shutdown.."); System.in.read(); System.out.println("Shutting down!"); server.stop(); } else { Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { try { server.stop(); } catch (final Exception e) { LOGGER.warn("Unable to shutdown DynamoDB", e); System.out.println("Error shutting down DynamoDB."); } System.out.println("Shutting down!"); } }); while (true) { Thread.sleep(TimeUnit.MILLISECONDS.convert(Long.MAX_VALUE, TimeUnit.DAYS)); } } } catch (final Exception e) { LOGGER.error("Unable to run embedded DynamoDB server", e); } } } ================================================ FILE: extensions/cli/dynamodb-embed/src/main/java/org/locationtech/geowave/datastore/dynamodb/cli/RunDynamoDBLocalOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * * See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.datastore.dynamodb.cli; import java.io.IOException; import com.beust.jcommander.Parameter; public class RunDynamoDBLocalOptions { @Parameter(names = {"--directory", "-d"}, description = "The directory to use for DynamoDB") private String directory = DynamoDBLocal.DEFAULT_DIR.getPath(); @Parameter( names = {"--port", "-p"}, description = "The port to use for DynamoDB (defaults to " + DynamoDBLocal.DEFAULT_PORT + ")") private Integer port = DynamoDBLocal.DEFAULT_PORT; public String getDirectory() { return directory; } public void setDirectory(final String directory) { this.directory = directory; } public Integer getPort() { return port; } public void setPort(Integer port) { this.port = port; } public DynamoDBLocal getServer() throws IOException { return new DynamoDBLocal(directory, port); } } ================================================ FILE: extensions/cli/dynamodb-embed/src/main/resources/META-INF/services/org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi ================================================ org.locationtech.geowave.datastore.dynamodb.cli.DynamoDBOperationProvider ================================================ FILE: extensions/cli/geoserver/pom.xml ================================================ 4.0.0 org.locationtech.geowave geowave-extension-parent ../../ 2.0.2-SNAPSHOT geowave-cli-geoserver Geowave GeoServer Commandline Tools Geowave Commandline Tools For Managing GeoServer Layers and Data Stores org.glassfish.jersey.core jersey-client org.glassfish.jersey.media jersey-media-multipart org.locationtech.geowave geowave-core-cli ${project.version} org.locationtech.geowave geowave-core-store ${project.version} org.locationtech.geowave geowave-adapter-raster ${project.version} org.locationtech.geowave geowave-adapter-vector ${project.version} org.mockito mockito-all 1.9.5 test build-installer-plugin maven-assembly-plugin ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/ConfigGeoServerCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver; import static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_NAMESPACE_PREFIX; import static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_PASS; import static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_URL; import static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_USER; import static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_WORKSPACE; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Properties; import org.apache.commons.lang3.StringUtils; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.cli.converters.GeoWaveBaseConverter; import org.locationtech.geowave.core.cli.converters.OptionalPasswordConverter; import org.locationtech.geowave.core.cli.operations.config.ConfigSection; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.cli.prefix.JCommanderPrefixTranslator; import org.locationtech.geowave.core.cli.prefix.JCommanderTranslationMap; import org.locationtech.geowave.core.cli.prefix.TranslationEntry; import com.beust.jcommander.JCommander; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import com.beust.jcommander.ParametersDelegate; @GeowaveOperation(name = "geoserver", parentOperation = ConfigSection.class) @Parameters(commandDescription = "Create a local configuration for GeoServer") public class ConfigGeoServerCommand extends ServiceEnabledCommand { /** Return "200 OK" for the config geoserver command. */ @Override public Boolean successStatusIs200() { return true; } @Parameter(names = {"-u", "--username"}, description = "GeoServer User") private String username; // GEOWAVE-811 - adding additional password options for added protection @Parameter( names = {"-p", "--password"}, description = "GeoServer Password - " + OptionalPasswordConverter.DEFAULT_PASSWORD_DESCRIPTION, converter = OptionalPasswordConverter.class) private String pass; @Parameter(names = {"-ws", "--workspace"}, description = "GeoServer Default Workspace") private String workspace; @Parameter(description = "") private List parameters = new ArrayList(); private String url = null; @ParametersDelegate private GeoServerSSLConfigurationOptions sslConfigOptions = new GeoServerSSLConfigurationOptions(); @Override public boolean prepare(final OperationParams params) { boolean retval = true; retval |= super.prepare(params); final String username = getName(); final String password = getPass(); final boolean usernameSpecified = (username != null) && !"".equals(username.trim()); final boolean passwordSpecified = (password != null) && !"".equals(password.trim()); if (usernameSpecified || passwordSpecified) { if (usernameSpecified && !passwordSpecified) { setPass( GeoWaveBaseConverter.promptAndReadPassword( "Please enter a password for username [" + username + "]: ")); if ((getPass() == null) || "".equals(getPass().trim())) { throw new ParameterException("Password cannot be null or empty if username is specified"); } } else if (passwordSpecified && !usernameSpecified) { setName( GeoWaveBaseConverter.promptAndReadValue( "Please enter a username associated with specified password: ")); if ((getName() == null) || "".equals(getName().trim())) { throw new ParameterException("Username cannot be null or empty if password is specified"); } } } return retval; } @Override public void execute(final OperationParams params) throws Exception { params.getConsole().println(computeResults(params)); } @Override public String getName() { return username; } public void setName(final String name) { username = name; } public String getPass() { return pass; } public void setPass(final String pass) { this.pass = pass; } public String getWorkspace() { return workspace; } public void setWorkspace(final String workspace) { this.workspace = workspace; } public GeoServerSSLConfigurationOptions getGeoServerSSLConfigurationOptions() { return sslConfigOptions; } public void setGeoServerSSLConfigurationOptions( final GeoServerSSLConfigurationOptions sslConfigOptions) { this.sslConfigOptions = sslConfigOptions; } @Override public String usage() { StringBuilder builder = new StringBuilder(); final List nameArray = new ArrayList<>(); final JCommanderPrefixTranslator translator = new JCommanderPrefixTranslator(); translator.addObject(this); final JCommanderTranslationMap map = translator.translate(); map.createFacadeObjects(); // Copy default parameters over for help display. map.transformToFacade(); JCommander jc = new JCommander(); final Map translations = map.getEntries(); for (final Object obj : map.getObjects()) { for (final Field field : obj.getClass().getDeclaredFields()) { final TranslationEntry tEntry = translations.get(field.getName()); if ((tEntry != null) && (tEntry.getObject() instanceof ConfigGeoServerCommand)) { jc.addObject(obj); break; } } } final String programName = StringUtils.join(nameArray, " "); jc.setProgramName(programName); jc.getUsageFormatter().usage(builder); // Trim excess newlines. final String operations = builder.toString().trim(); builder = new StringBuilder(); builder.append(operations); builder.append("\n\n"); builder.append(" "); jc = new JCommander(); for (final Object obj : map.getObjects()) { for (final Field field : obj.getClass().getDeclaredFields()) { final TranslationEntry tEntry = translations.get(field.getName()); if ((tEntry != null) && !(tEntry.getObject() instanceof ConfigGeoServerCommand)) { final Parameters parameters = tEntry.getObject().getClass().getAnnotation(Parameters.class); if (parameters != null) { builder.append(parameters.commandDescription()); } else { builder.append("Additional Parameters"); } jc.addObject(obj); break; } } } jc.setProgramName(programName); jc.getUsageFormatter().usage(builder); builder.append("\n\n"); return builder.toString().trim(); } @Override public String computeResults(final OperationParams params) throws Exception { if (parameters.size() != 1) { throw new ParameterException("Requires argument: "); } url = parameters.get(0); final Properties existingProps = getGeoWaveConfigProperties(params); // all switches are optional if (url != null) { existingProps.setProperty(GEOSERVER_URL, url); } if (getName() != null) { existingProps.setProperty(GEOSERVER_USER, getName()); } if (getPass() != null) { existingProps.setProperty(GEOSERVER_PASS, getPass()); } if (getWorkspace() != null) { existingProps.setProperty(GEOSERVER_WORKSPACE, getWorkspace()); } // save properties from ssl configurations sslConfigOptions.saveProperties(existingProps); // Write properties file ConfigOptions.writeProperties( getGeoWaveConfigFile(params), existingProps, this.getClass(), GEOSERVER_NAMESPACE_PREFIX, params.getConsole()); GeoServerRestClient.invalidateInstance(); // generate a return for rest calls final StringBuilder builder = new StringBuilder(); for (final Object key : existingProps.keySet()) { if (key.toString().startsWith("geoserver")) { builder.append(key.toString() + "=" + existingProps.getProperty(key.toString()) + "\n"); } } return builder.toString(); } } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/GeoServerCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver; import javax.ws.rs.NotAuthorizedException; import javax.ws.rs.core.Response; import org.apache.spark.status.api.v1.ForbiddenException; import org.locationtech.geowave.core.cli.api.OperationParams; import org.locationtech.geowave.core.cli.api.ServiceEnabledCommand; import org.locationtech.geowave.core.cli.exceptions.DuplicateEntryException; import org.locationtech.geowave.core.cli.exceptions.TargetNotFoundException; public abstract class GeoServerCommand extends ServiceEnabledCommand { protected GeoServerRestClient geoserverClient = null; @Override public boolean prepare(final OperationParams params) { if (geoserverClient == null) { // Create the rest client geoserverClient = GeoServerRestClient.getInstance( new GeoServerConfig(getGeoWaveConfigFile(params), params.getConsole()), params.getConsole()); } // Successfully prepared return true; } public boolean isDuplicate(final Response response, final String errorMessage) throws TargetNotFoundException { if (errorMessage.toLowerCase().contains("already exists")) { return true; } return false; } public T handleError(final Response response, final String errorMessage) throws Exception { if (isDuplicate(response, errorMessage)) { throw new DuplicateEntryException(errorMessage); } switch (response.getStatus()) { case 401: throw new NotAuthorizedException(errorMessage); case 403: throw new ForbiddenException(errorMessage); case 404: throw new TargetNotFoundException(errorMessage); // GeoServer responses for 500 codes are poorly formatted so // don't return that response case 500: throw new Exception("Internal Server Error\n GeoServer Response Code = 500"); default: throw new Exception(errorMessage); } } } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/GeoServerConfig.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver; import static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_PASS; import static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_URL; import static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_USER; import static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_WORKSPACE; import java.io.File; import java.net.MalformedURLException; import java.net.URISyntaxException; import java.util.Properties; import org.locationtech.geowave.core.cli.operations.config.options.ConfigOptions; import org.locationtech.geowave.core.cli.operations.config.security.utils.SecurityUtils; import org.locationtech.geowave.core.cli.utils.URLUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.internal.Console; public class GeoServerConfig { private static final Logger LOGGER = LoggerFactory.getLogger(GeoServerConfig.class); public static final String DEFAULT_URL = "localhost:8080"; public static final String DEFAULT_USER = "admin"; public static final String DEFAULT_PASS = "geoserver"; public static final String DEFAULT_WORKSPACE = "geowave"; public static final String DEFAULT_CS = "-raster"; public static final String DEFAULT_DS = "-vector"; public static final String DISPLAY_NAME_PREFIX = "GeoWave Datastore - "; public static final String QUERY_INDEX_STRATEGY_KEY = "Query Index Strategy"; private String url = null; private String user = null; private String pass = null; private String workspace = null; private final File propFile; private final Properties gsConfigProperties; /** * Properties File holds defaults; updates config if empty. * * @param propFile */ public GeoServerConfig(final File propFile, final Console console) { this.propFile = propFile; if ((propFile != null) && propFile.exists()) { gsConfigProperties = ConfigOptions.loadProperties(propFile); } else { gsConfigProperties = new Properties(); } boolean update = false; url = gsConfigProperties.getProperty(GEOSERVER_URL); if (url == null) { url = DEFAULT_URL; gsConfigProperties.setProperty(GEOSERVER_URL, url); update = true; } user = gsConfigProperties.getProperty(GEOSERVER_USER); if (user == null) { user = DEFAULT_USER; gsConfigProperties.setProperty(GEOSERVER_USER, user); update = true; } pass = gsConfigProperties.getProperty(GEOSERVER_PASS); if (pass == null) { pass = DEFAULT_PASS; gsConfigProperties.setProperty(GEOSERVER_PASS, pass); update = true; } else { try { final File resourceTokenFile = SecurityUtils.getFormattedTokenKeyFileForConfig(propFile); // if password in config props is encrypted, need to decrypt it pass = SecurityUtils.decryptHexEncodedValue( pass, resourceTokenFile.getCanonicalPath(), console); } catch (final Exception e) { LOGGER.error("An error occurred decrypting password: " + e.getLocalizedMessage(), e); } } workspace = gsConfigProperties.getProperty(GEOSERVER_WORKSPACE); if (workspace == null) { workspace = DEFAULT_WORKSPACE; gsConfigProperties.setProperty(GEOSERVER_WORKSPACE, workspace); update = true; } if (update) { ConfigOptions.writeProperties(propFile, gsConfigProperties, console); LOGGER.info("GeoServer Config Saved"); } } /** Secondary no-arg constructor for direct-access testing */ public GeoServerConfig(final Console console) { this(ConfigOptions.getDefaultPropertyFile(console), console); } public String getUrl() { String internalUrl; if (!url.contains("//")) { internalUrl = url + "/geoserver"; } else { internalUrl = url; } try { return URLUtils.getUrl(internalUrl); } catch (MalformedURLException | URISyntaxException e) { LOGGER.error("Error discovered in validating specified url: " + e.getLocalizedMessage(), e); return internalUrl; } } public void setUrl(final String url) { this.url = url; } public String getUser() { return user; } public void setUser(final String user) { this.user = user; } public String getPass() { return pass; } public void setPass(final String pass) { this.pass = pass; } public String getWorkspace() { return workspace; } public void setWorkspace(final String workspace) { this.workspace = workspace; } public File getPropFile() { return propFile; } public Properties getGsConfigProperties() { return gsConfigProperties; } } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/GeoServerOperationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver; import org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi; public class GeoServerOperationProvider implements CLIOperationProviderSpi { private static final Class[] OPERATIONS = new Class[] {GeoServerSection.class, ConfigGeoServerCommand.class}; @Override public Class[] getOperations() { return OPERATIONS; } } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/GeoServerRemoveCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver; public abstract class GeoServerRemoveCommand extends GeoServerCommand { /** Return "200 OK" for all remove commands. */ @Override public Boolean successStatusIs200() { return true; } } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/GeoServerRestClient.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver; import static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_SSL_KEYMGR_ALG; import static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_SSL_KEYMGR_PROVIDER; import static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_SSL_KEYSTORE_FILE; import static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_SSL_KEYSTORE_PASS; import static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_SSL_KEYSTORE_PROVIDER; import static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_SSL_KEYSTORE_TYPE; import static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_SSL_KEY_PASS; import static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_SSL_SECURITY_PROTOCOL; import static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_SSL_TRUSTMGR_ALG; import static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_SSL_TRUSTMGR_PROVIDER; import static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_SSL_TRUSTSTORE_FILE; import static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_SSL_TRUSTSTORE_PASS; import static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_SSL_TRUSTSTORE_PROVIDER; import static org.locationtech.geowave.cli.geoserver.constants.GeoServerConstants.GEOSERVER_SSL_TRUSTSTORE_TYPE; import java.io.Closeable; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.StringWriter; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.net.ssl.HttpsURLConnection; import javax.net.ssl.SSLContext; import javax.ws.rs.PathParam; import javax.ws.rs.client.Client; import javax.ws.rs.client.ClientBuilder; import javax.ws.rs.client.Entity; import javax.ws.rs.client.WebTarget; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import org.glassfish.jersey.SslConfigurator; import org.glassfish.jersey.client.authentication.HttpAuthenticationFeature; import org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter; import org.locationtech.geowave.adapter.vector.plugin.GeoWavePluginConfig; import org.locationtech.geowave.cli.geoserver.layer.GeoServerAddLayerCommand.AddOption; import org.locationtech.geowave.core.cli.operations.config.security.crypto.BaseEncryption; import org.locationtech.geowave.core.cli.operations.config.security.utils.SecurityUtils; import org.locationtech.geowave.core.cli.utils.FileUtils; import org.locationtech.geowave.core.geotime.store.GeotoolsFeatureDataAdapter; import org.locationtech.geowave.core.store.adapter.InternalDataAdapter; import org.locationtech.geowave.core.store.api.DataStore; import org.locationtech.geowave.core.store.api.DataTypeAdapter; import org.locationtech.geowave.core.store.cli.CLIUtils; import org.locationtech.geowave.core.store.cli.store.DataStorePluginOptions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.w3c.dom.Document; import org.w3c.dom.Element; import com.beust.jcommander.internal.Console; import net.sf.json.JSONArray; import net.sf.json.JSONObject; public class GeoServerRestClient { private static GeoServerRestClient SINGLETON_INSTANCE; private static final Logger LOGGER = LoggerFactory.getLogger(GeoServerRestClient.class); private static final int defaultIndentation = 2; private static class DataAdapterInfo { String typeName; Boolean isRaster; } private final GeoServerConfig config; private final Console console; private WebTarget webTarget = null; private GeoServerRestClient(final GeoServerConfig config, final Console console) { this.config = config; this.console = console; } private GeoServerRestClient( final GeoServerConfig config, final WebTarget webTarget, final Console console) { this.config = config; this.webTarget = webTarget; this.console = console; } public static GeoServerRestClient getInstance( final GeoServerConfig config, final Console console) { if (SINGLETON_INSTANCE == null) { SINGLETON_INSTANCE = new GeoServerRestClient(config, console); } return SINGLETON_INSTANCE; } public void setWebTarget(final WebTarget webTarget) { this.webTarget = webTarget; } public static void invalidateInstance() { SINGLETON_INSTANCE = null; } public GeoServerConfig getConfig() { return config; } private WebTarget getWebTarget() { if (webTarget == null) { String url = getConfig().getUrl(); if (url != null) { url = url.trim().toLowerCase(Locale.ROOT); Client client = null; if (url.startsWith("http://")) { client = ClientBuilder.newClient(); } else if (url.startsWith("https://")) { final SslConfigurator sslConfig = SslConfigurator.newInstance(); if (getConfig().getGsConfigProperties() != null) { loadSSLConfigurations(sslConfig, getConfig().getGsConfigProperties()); } final SSLContext sslContext = sslConfig.createSSLContext(); HttpsURLConnection.setDefaultSSLSocketFactory(sslContext.getSocketFactory()); client = ClientBuilder.newBuilder().sslContext(sslContext).build(); } if (client != null) { client.register( HttpAuthenticationFeature.basic(getConfig().getUser(), getConfig().getPass())); try { webTarget = client.target(new URI(url)); } catch (final URISyntaxException e) { LOGGER.error("Unable to parse geoserver URL: " + url, e); } } } } return webTarget; } /** * If connecting to GeoServer over HTTPS (HTTP+SSL), we need to specify the SSL properties. The * SSL properties are set from a properties file. Since the properties will be different, based on * one GeoServer deployment compared to another, this gives the ability to specify any of the * fields. If the key is in provided properties file, it will be loaded into the GeoServer SSL * configuration. * * @param sslConfig SSL Configuration object for use when instantiating an HTTPS connection to * GeoServer * @param gsConfigProperties Properties object with applicable GeoServer connection properties */ private void loadSSLConfigurations( final SslConfigurator sslConfig, final Properties gsConfigProperties) { if ((gsConfigProperties != null) && (sslConfig != null)) { // default to TLS for geoserver ssl security protocol sslConfig.securityProtocol( getPropertyValue(gsConfigProperties, GEOSERVER_SSL_SECURITY_PROTOCOL, "TLS")); // check truststore property settings if (gsConfigProperties.containsKey(GEOSERVER_SSL_TRUSTSTORE_FILE)) { // resolve file path - either relative or absolute - then get // the canonical path final File trustStoreFile = new File(getPropertyValue(gsConfigProperties, GEOSERVER_SSL_TRUSTSTORE_FILE)); if (trustStoreFile != null) { try { sslConfig.trustStoreFile(trustStoreFile.getCanonicalPath()); } catch (final IOException e) { LOGGER.error( "An error occurred loading the truststore at the specified path [" + getPropertyValue(gsConfigProperties, GEOSERVER_SSL_TRUSTSTORE_FILE) + "]:" + e.getLocalizedMessage(), e); } } } if (gsConfigProperties.containsKey(GEOSERVER_SSL_TRUSTSTORE_PASS)) { sslConfig.trustStorePassword( getPropertyValue(gsConfigProperties, GEOSERVER_SSL_TRUSTSTORE_PASS)); } if (gsConfigProperties.containsKey(GEOSERVER_SSL_TRUSTSTORE_TYPE)) { sslConfig.trustStoreType( getPropertyValue(gsConfigProperties, GEOSERVER_SSL_TRUSTSTORE_TYPE)); } if (gsConfigProperties.containsKey(GEOSERVER_SSL_TRUSTSTORE_PROVIDER)) { sslConfig.trustStoreProvider( getPropertyValue(gsConfigProperties, GEOSERVER_SSL_TRUSTSTORE_PROVIDER)); } if (gsConfigProperties.containsKey(GEOSERVER_SSL_TRUSTMGR_ALG)) { sslConfig.trustManagerFactoryAlgorithm( getPropertyValue(gsConfigProperties, GEOSERVER_SSL_TRUSTMGR_ALG)); } if (gsConfigProperties.containsKey(GEOSERVER_SSL_TRUSTMGR_PROVIDER)) { sslConfig.trustManagerFactoryProvider( getPropertyValue(gsConfigProperties, GEOSERVER_SSL_TRUSTMGR_PROVIDER)); } // check keystore property settings if (gsConfigProperties.containsKey(GEOSERVER_SSL_KEYSTORE_FILE)) { // resolve file path - either relative or absolute - then get // the canonical path // HP Fortify "Path Traversal" false positive // What Fortify considers "user input" comes only // from users with OS-level access anyway final File keyStoreFile = new File( FileUtils.formatFilePath( getPropertyValue(gsConfigProperties, GEOSERVER_SSL_KEYSTORE_FILE))); if (keyStoreFile != null) { try { sslConfig.keyStoreFile(keyStoreFile.getCanonicalPath()); } catch (final IOException e) { LOGGER.error( "An error occurred loading the keystore at the specified path [" + getPropertyValue(gsConfigProperties, GEOSERVER_SSL_KEYSTORE_FILE) + "]:" + e.getLocalizedMessage(), e); } } } if (gsConfigProperties.containsKey(GEOSERVER_SSL_KEYSTORE_PASS)) { sslConfig.keyStorePassword( getPropertyValue(gsConfigProperties, GEOSERVER_SSL_KEYSTORE_PASS)); } if (gsConfigProperties.containsKey(GEOSERVER_SSL_KEY_PASS)) { sslConfig.keyPassword(getPropertyValue(gsConfigProperties, GEOSERVER_SSL_KEY_PASS)); } if (gsConfigProperties.containsKey(GEOSERVER_SSL_KEYSTORE_PROVIDER)) { sslConfig.keyStoreProvider( getPropertyValue(gsConfigProperties, GEOSERVER_SSL_KEYSTORE_PROVIDER)); } if (gsConfigProperties.containsKey(GEOSERVER_SSL_KEYSTORE_TYPE)) { sslConfig.keyStoreType(getPropertyValue(gsConfigProperties, GEOSERVER_SSL_KEYSTORE_TYPE)); } if (gsConfigProperties.containsKey(GEOSERVER_SSL_KEYMGR_ALG)) { sslConfig.keyManagerFactoryAlgorithm( getPropertyValue(gsConfigProperties, GEOSERVER_SSL_KEYMGR_ALG)); } if (gsConfigProperties.containsKey(GEOSERVER_SSL_KEYMGR_PROVIDER)) { sslConfig.keyManagerFactoryProvider( getPropertyValue(gsConfigProperties, GEOSERVER_SSL_KEYMGR_PROVIDER)); } } } private String getPropertyValue(final Properties configProps, final String configKey) { return getPropertyValue(configProps, configKey, null); } private String getPropertyValue( final Properties configProps, final String configKey, final String defaultValue) { String configValue = defaultValue; if (configProps != null) { configValue = configProps.getProperty(configKey, defaultValue); if (BaseEncryption.isProperlyWrapped(configValue)) { try { final File resourceTokenFile = SecurityUtils.getFormattedTokenKeyFileForConfig(getConfig().getPropFile()); // if password in config props is encrypted, need to decrypt // it configValue = SecurityUtils.decryptHexEncodedValue( configValue, resourceTokenFile.getCanonicalPath(), console); return configValue; } catch (final Exception e) { LOGGER.error("An error occurred decrypting password: " + e.getLocalizedMessage(), e); return configValue; } } } return configValue; } /** * Convenience - add layer(s) for the given store to geoserver */ public Response addLayer( final String workspaceName, final String storeName, final String adapterId, final String defaultStyle) { // retrieve the adapter info list for the store boolean layerAdded = false; int retStatus = -1; final StringBuilder buf = new StringBuilder("{\"adapters\":["); final ArrayList adapterInfoList = getStoreAdapterInfo(storeName, adapterId); LOGGER.debug("Finished retrieving adapter list"); if ((adapterInfoList.size() > 1) && (adapterId == null)) { LOGGER.debug("addlayer doesn't know how to deal with multiple adapters"); final String descr = "Failed to add layer(s). Please use -a, or choose one of these layers with -id:"; final JSONObject jsonObj = getJsonFromAdapters(adapterInfoList, descr); LOGGER.debug(jsonObj.toString()); return Response.ok(jsonObj.toString(defaultIndentation)).build(); } // verify the workspace exists if (!workspaceExists(workspaceName)) { LOGGER.debug("addlayer needs to create the " + workspaceName + " workspace"); // If the WS cannot be created, return the error final Response addWsResponse = addWorkspace(workspaceName); if (addWsResponse.getStatus() != Status.CREATED.getStatusCode()) { return addWsResponse; } } final String cvgStoreName = storeName + GeoServerConfig.DEFAULT_CS; final String dataStoreName = storeName + GeoServerConfig.DEFAULT_DS; // iterate through data adapters for (final DataAdapterInfo dataAdapterInfo : adapterInfoList) { // handle coverage stores & coverages if (dataAdapterInfo.isRaster) { // verify coverage store exists final Response getCsResponse = getCoverageStore(workspaceName, cvgStoreName, true); if (getCsResponse.getStatus() == Status.NOT_FOUND.getStatusCode()) { final Response addCsResponse = addCoverageStore(workspaceName, cvgStoreName, storeName, null, null, null); if (addCsResponse.getStatus() != Status.CREATED.getStatusCode()) { final String ret = "{ \"Adapter\":\"" + adapterId + "\",\"Status\":" + addCsResponse.getStatus() + ",\"Message\":\"Adding coverage store returned error: " + addCsResponse.readEntity(String.class) + "\"},"; buf.append(ret); if (retStatus == -1) { retStatus = addCsResponse.getStatus(); } else if (retStatus != addCsResponse.getStatus()) { retStatus = 400; } continue; } } // else if (getCsResponse.getStatus() != Status.OK.getStatusCode()) { // GeoServer get commands will almost always return a 200 or // 404 unless there is a sever error final String ret = "{ \"Adapter\":\"" + adapterId + "\",\"Status\":" + getCsResponse.getStatus() + ",\"Message\":\"Checking Existence of coverage store returned error: " + getCsResponse.readEntity(String.class) + "\"},"; buf.append(ret); if (retStatus == -1) { retStatus = getCsResponse.getStatus(); } else if (retStatus != getCsResponse.getStatus()) { retStatus = 400; } continue; } // See if the coverage already exists final Response getCvResponse = getCoverage(workspaceName, cvgStoreName, dataAdapterInfo.typeName, true); if (getCvResponse.getStatus() == Status.OK.getStatusCode()) { LOGGER.debug(dataAdapterInfo.typeName + " layer already exists"); retStatus = 400; final String ret = "{ \"Adapter\":\"" + adapterId + "\",\"Status\":400,\"Message\":\"Coverage already exists\"},"; buf.append(ret); continue; } // We have a coverage store. Add the layer per the adapter ID final Response addCvResponse = addCoverage(workspaceName, cvgStoreName, dataAdapterInfo.typeName); // If any layers get added, we will return a 200 if (addCvResponse.getStatus() == Status.CREATED.getStatusCode()) { final String ret = "{ \"Adapter\":\"" + adapterId + "\",\"Status\":" + addCvResponse.getStatus() + ",\"Message\":\"Coverage added successfully\"},"; buf.append(ret); layerAdded = true; } else { final String ret = "{ \"Adapter\":\"" + adapterId + "\",\"Status\":" + addCvResponse.getStatus() + ",\"Message\":\"Adding coverage returned error: " + addCvResponse.readEntity(String.class) + "\"},"; buf.append(ret); // If there are multiple different error codes, just return // a 400 if (retStatus == -1) { retStatus = addCvResponse.getStatus(); } else if (retStatus != addCvResponse.getStatus()) { retStatus = 400; } } } // handle datastores and feature layers else { // verify datastore exists final Response getDsResponse = getDatastore(workspaceName, dataStoreName, true); if (getDsResponse.getStatus() == Status.NOT_FOUND.getStatusCode()) { final Response addDsResponse = addDatastore(workspaceName, dataStoreName, storeName); if (addDsResponse.getStatus() != Status.CREATED.getStatusCode()) { final String ret = "{ \"Adapter\":\"" + adapterId + "\",\"Status\":" + addDsResponse.getStatus() + ",\"Message\":\"Adding data store returned error: " + addDsResponse.readEntity(String.class) + "\"},"; buf.append(ret); if (retStatus == -1) { retStatus = addDsResponse.getStatus(); } else if (retStatus != addDsResponse.getStatus()) { retStatus = 400; } continue; } } else if (getDsResponse.getStatus() != Status.OK.getStatusCode()) { // GeoServer get commands will almost always return a 200 or // 404 unless there is a sever error final String ret = "{ \"Adapter\":\"" + adapterId + "\",\"Status\":" + getDsResponse.getStatus() + ",\"Message\":\"Checking Existence of data store returned error: " + getDsResponse.readEntity(String.class) + "\"},"; buf.append(ret); if (retStatus == -1) { retStatus = getDsResponse.getStatus(); } else if (retStatus != getDsResponse.getStatus()) { retStatus = 400; } continue; } LOGGER.debug("Checking for existing feature layer: " + dataAdapterInfo.typeName); // See if the feature layer already exists final Response getFlResponse = getFeatureLayer(dataAdapterInfo.typeName, true); if (getFlResponse.getStatus() == Status.OK.getStatusCode()) { LOGGER.debug(dataAdapterInfo.typeName + " layer already exists"); retStatus = 400; final String ret = "{ \"Adapter\":\"" + adapterId + "\",\"Status\":400,\"Message\":\"Feature Layer already exists\"},"; buf.append(ret); continue; } LOGGER.debug( "Get feature layer: " + dataAdapterInfo.typeName + " returned " + getFlResponse.getStatus()); // We have a datastore. Add the layer per the adapter ID final Response addFlResponse = addFeatureLayer(workspaceName, dataStoreName, dataAdapterInfo.typeName, defaultStyle); // If any layers get added, we will return a 200 if (addFlResponse.getStatus() == Status.CREATED.getStatusCode()) { final String ret = "{ \"Adapter\":\"" + adapterId + "\",\"Status\":" + addFlResponse.getStatus() + ",\"Message\":\"Feature Layer added successfully\"},"; buf.append(ret); layerAdded = true; } else { final String ret = "{ \"Adapter\":\"" + adapterId + "\",\"Status\":" + addFlResponse.getStatus() + ",\"Message\":\"Adding data store error: " + addFlResponse.readEntity(String.class) + "\"},"; buf.append(ret); // If there are multiple different error codes, just return // a 400 if (retStatus == -1) { retStatus = addFlResponse.getStatus(); } else if (retStatus != addFlResponse.getStatus()) { retStatus = 400; } } } } // Report back to the caller the adapter IDs and the types that were // used to create the layers buf.deleteCharAt(buf.length() - 1); buf.append("]}"); if (layerAdded) { return Response.ok(buf.toString()).build(); } else { final String ret = buf.toString(); return Response.status(400).entity(ret).build(); } } /** * Get JSON object(s) from adapter list */ private JSONObject getJsonFromAdapters( final ArrayList adapterInfoList, final String description) { final StringBuffer buf = new StringBuffer(); // If we made it this far, let's just iterate through the adapter IDs // and build the JSON response data buf.append("{'description':'" + description + "', " + "'layers':["); for (int i = 0; i < adapterInfoList.size(); i++) { final DataAdapterInfo info = adapterInfoList.get(i); buf.append("{'id':'" + info.typeName + "',"); buf.append("'type':'" + (info.isRaster ? "raster" : "vector") + "'}"); if (i < (adapterInfoList.size() - 1)) { buf.append(","); } } buf.append("]}"); return JSONObject.fromObject(buf.toString()); } /** * Check if workspace exists * * @param workspace * @return true if workspace exists, false if not */ public boolean workspaceExists(String workspace) { if (workspace == null) { workspace = config.getWorkspace(); } final Response getWsResponse = getWorkspaces(); if (getWsResponse.getStatus() == Status.OK.getStatusCode()) { final JSONObject jsonResponse = JSONObject.fromObject(getWsResponse.getEntity()); final JSONArray workspaces = jsonResponse.getJSONArray("workspaces"); for (int i = 0; i < workspaces.size(); i++) { final String wsName = workspaces.getJSONObject(i).getString("name"); if (wsName.equals(workspace)) { return true; } } } else { LOGGER.error("Error retrieving GeoServer workspace list"); } return false; } /** * Get list of workspaces from geoserver */ public Response getWorkspaces() { final Response resp = getWebTarget().path("rest/workspaces.json").request().get(); if (resp.getStatus() == Status.OK.getStatusCode()) { resp.bufferEntity(); // get the workspace names final JSONArray workspaceArray = getArrayEntryNames( JSONObject.fromObject(resp.readEntity(String.class)), "workspaces", "workspace"); final JSONObject workspacesObj = new JSONObject(); workspacesObj.put("workspaces", workspaceArray); return Response.ok(workspacesObj.toString(defaultIndentation)).build(); } return resp; } /** * Add workspace to geoserver */ public Response addWorkspace(final String workspace) { return getWebTarget().path("rest/workspaces").request().post( Entity.entity("{'workspace':{'name':'" + workspace + "'}}", MediaType.APPLICATION_JSON)); } /** * Delete workspace from geoserver */ public Response deleteWorkspace(final String workspace) { return getWebTarget().path("rest/workspaces/" + workspace).queryParam( "recurse", "true").request().delete(); } /** * Get the string version of a datastore JSONObject from geoserver */ public Response getDatastore( final String workspaceName, final String datastoreName, final boolean quietOnNotFound) { final Response resp = getWebTarget().path( "rest/workspaces/" + workspaceName + "/datastores/" + datastoreName + ".json").queryParam("quietOnNotFound", quietOnNotFound).request().get(); if (resp.getStatus() == Status.OK.getStatusCode()) { resp.bufferEntity(); final JSONObject datastore = JSONObject.fromObject(resp.readEntity(String.class)); if (datastore != null) { return Response.ok(datastore.toString(defaultIndentation)).build(); } } return resp; } /** * Get list of Datastore names from geoserver */ public Response getDatastores(final String workspaceName) { final Response resp = getWebTarget().path( "rest/workspaces/" + workspaceName + "/datastores.json").request().get(); if (resp.getStatus() == Status.OK.getStatusCode()) { resp.bufferEntity(); // get the datastore names final JSONArray datastoreArray = getArrayEntryNames( JSONObject.fromObject(resp.readEntity(String.class)), "dataStores", "dataStore"); final JSONObject dsObj = new JSONObject(); dsObj.put("dataStores", datastoreArray); return Response.ok(dsObj.toString(defaultIndentation)).build(); } return resp; } /** * Add a geowave datastore to geoserver */ public Response addDatastore( final String workspaceName, String datastoreName, final String gwStoreName) { final DataStorePluginOptions inputStoreOptions = getStorePlugin(gwStoreName); if ((datastoreName == null) || datastoreName.isEmpty()) { datastoreName = gwStoreName + GeoServerConfig.DEFAULT_DS; } final String lockMgmt = "memory"; final String authMgmtPrvdr = "empty"; final String authDataUrl = ""; final String queryIndexStrategy = GeoWavePluginConfig.DEFAULT_QUERY_INDEX_STRATEGY; final String dataStoreJson = createDatastoreJson( inputStoreOptions.getType(), inputStoreOptions.getOptionsAsMap(), datastoreName, lockMgmt, authMgmtPrvdr, authDataUrl, queryIndexStrategy, true); // create a new geoserver style return getWebTarget().path("rest/workspaces/" + workspaceName + "/datastores").request().post( Entity.entity(dataStoreJson, MediaType.APPLICATION_JSON)); } /** * Delete a geowave datastore from geoserver */ public Response deleteDatastore(final String workspaceName, final String datastoreName) { return getWebTarget().path( "rest/workspaces/" + workspaceName + "/datastores/" + datastoreName).queryParam( "recurse", "true").request().delete(); } /** * Get a layer from geoserver */ public Response getFeatureLayer(final String layerName, final boolean quietOnNotFound) { final Response resp = getWebTarget().path("rest/layers/" + layerName + ".json").queryParam( "quietOnNotFound", quietOnNotFound).request().get(); if (resp.getStatus() == Status.OK.getStatusCode()) { final JSONObject layer = JSONObject.fromObject(resp.readEntity(String.class)); if (layer != null) { return Response.ok(layer.toString(defaultIndentation)).build(); } } return resp; } /** * Get list of layers from geoserver * * @param workspaceName : if null, don't filter on workspace * @param datastoreName : if null, don't filter on datastore * @param geowaveOnly : if true, only return geowave layers * @return the list of layers */ public Response getFeatureLayers( final String workspaceName, final String datastoreName, final boolean geowaveOnly) { final boolean wsFilter = ((workspaceName != null) && !workspaceName.isEmpty()); final boolean dsFilter = ((datastoreName != null) && !datastoreName.isEmpty()); final Response resp = getWebTarget().path("rest/layers.json").request().get(); if (resp.getStatus() == Status.OK.getStatusCode()) { resp.bufferEntity(); // get the datastore names final JSONArray layerArray = getArrayEntryNames( JSONObject.fromObject(resp.readEntity(String.class)), "layers", "layer"); // holder for simple layer info (when geowaveOnly = false) final JSONArray layerInfoArray = new JSONArray(); final Map> namespaceLayersMap = new HashMap<>(); final Pattern p = Pattern.compile("workspaces/(.*?)/datastores/(.*?)/"); for (int i = 0; i < layerArray.size(); i++) { final boolean include = !geowaveOnly && !wsFilter && !dsFilter; // no // filtering // of // any // kind if (include) { // just grab it... layerInfoArray.add(layerArray.getJSONObject(i)); continue; // and move on } // at this point, we are filtering somehow. get some more info // about the layer final String name = layerArray.getJSONObject(i).getString("name"); final String layer = (String) getFeatureLayer(name, false).getEntity(); // get the workspace and name for each datastore String ws = null; String ds = null; final Matcher m = p.matcher(layer); if (m.find()) { ws = m.group(1); ds = m.group(2); } // filter on datastore? if (!dsFilter || ((ds != null) && ds.equals(datastoreName))) { // filter on workspace? if (!wsFilter || ((ws != null) && ws.equals(workspaceName))) { final JSONObject datastore = JSONObject.fromObject(getDatastore(ds, ws, false).getEntity()).getJSONObject( "dataStore"); // only process GeoWave layers if (geowaveOnly) { if ((datastore != null) && datastore.containsKey("type") && datastore.getString("type").startsWith("GeoWave Datastore")) { JSONArray entryArray = null; if (datastore.get("connectionParameters") instanceof JSONObject) { entryArray = datastore.getJSONObject("connectionParameters").getJSONArray("entry"); } else if (datastore.get("connectionParameters") instanceof JSONArray) { entryArray = datastore.getJSONArray("connectionParameters").getJSONObject(0).getJSONArray( "entry"); } if (entryArray == null) { LOGGER.error( "entry Array is null - didn't find a connectionParameters datastore object that was a JSONObject or JSONArray"); } else { // group layers by namespace for (int j = 0; j < entryArray.size(); j++) { final JSONObject entry = entryArray.getJSONObject(j); final String key = entry.getString("@key"); final String value = entry.getString("$"); if (key.startsWith("gwNamespace")) { if (namespaceLayersMap.containsKey(value)) { namespaceLayersMap.get(value).add(name); } else { final ArrayList layers = new ArrayList<>(); layers.add(name); namespaceLayersMap.put(value, layers); } break; } } } } } else { // just get all the layers from this store layerInfoArray.add(layerArray.getJSONObject(i)); } } } } // Handle geowaveOnly response if (geowaveOnly) { // create the json object with layers sorted by namespace final JSONArray layersArray = new JSONArray(); for (final Map.Entry> kvp : namespaceLayersMap.entrySet()) { final JSONArray layers = new JSONArray(); for (int i = 0; i < kvp.getValue().size(); i++) { final JSONObject layerObj = new JSONObject(); layerObj.put("name", kvp.getValue().get(i)); layers.add(layerObj); } final JSONObject layersObj = new JSONObject(); layersObj.put("namespace", kvp.getKey()); layersObj.put("layers", layers); layersArray.add(layersObj); } final JSONObject layersObj = new JSONObject(); layersObj.put("layers", layersArray); return Response.ok(layersObj.toString(defaultIndentation)).build(); } else { final JSONObject layersObj = new JSONObject(); layersObj.put("layers", layerInfoArray); return Response.ok(layersObj.toString(defaultIndentation)).build(); } } return resp; } /** * Add feature layer to geoserver */ public Response addFeatureLayer( final String workspaceName, final String datastoreName, final String layerName, final String defaultStyle) { if (defaultStyle != null) { getWebTarget().path("rest/layers/" + layerName + ".json").request().put( Entity.entity( "{'layer':{'defaultStyle':{'name':'" + defaultStyle + "'}}}", MediaType.APPLICATION_JSON)); } return getWebTarget().path( "rest/workspaces/" + workspaceName + "/datastores/" + datastoreName + "/featuretypes").request().post( Entity.entity( "{'featureType':{'name':'" + layerName + "'}}", MediaType.APPLICATION_JSON)); } /** * Delete a feature layer from geoserver */ public Response deleteFeatureLayer(final String layerName) { return getWebTarget().path("rest/layers/" + layerName).request().delete(); } /** * Change the default style of a layer */ public Response setLayerStyle(final String layerName, final String styleName) { return getWebTarget().path("rest/layers/" + layerName + ".json").request().put( Entity.entity( "{'layer':{'defaultStyle':{'name':'" + styleName + "'}}}", MediaType.APPLICATION_JSON)); } /** * Get a geoserver style */ public Response getStyle( @PathParam("styleName") final String styleName, final boolean quietOnNotFound) { final Response resp = getWebTarget().path("rest/styles/" + styleName + ".sld").queryParam( "quietOnNotFound", quietOnNotFound).request().get(); if (resp.getStatus() == Status.OK.getStatusCode()) { final InputStream inStream = (InputStream) resp.getEntity(); return Response.ok(inStream, MediaType.APPLICATION_XML).header( "Content-Disposition", "attachment; filename=\"" + styleName + ".sld\"").build(); } return resp; } /** * Get a list of geoserver styles */ public Response getStyles() { final Response resp = getWebTarget().path("rest/styles.json").request().get(); if (resp.getStatus() == Status.OK.getStatusCode()) { resp.bufferEntity(); // get the style names final JSONArray styleArray = getArrayEntryNames( JSONObject.fromObject(resp.readEntity(String.class)), "styles", "style"); final JSONObject stylesObj = new JSONObject(); stylesObj.put("styles", styleArray); return Response.ok(stylesObj.toString(defaultIndentation)).build(); } return resp; } /** * Add a style to geoserver */ public Response addStyle(final String styleName, final InputStream fileInStream) { final Response addStyleResponse = getWebTarget().path("rest/styles").request().post( Entity.entity( "{'style':{'name':'" + styleName + "','filename':'" + styleName + ".sld'}}", MediaType.APPLICATION_JSON)); // Return the reponse if this style is not correctly created. This // method actually makes 2 rest calls to GeoServer if (addStyleResponse.getStatus() != Status.CREATED.getStatusCode()) { return addStyleResponse; } return getWebTarget().path("rest/styles/" + styleName).request().put( Entity.entity(fileInStream, "application/vnd.ogc.sld+xml")); } /** * Delete a style from geoserver */ public Response deleteStyle(final String styleName) { return getWebTarget().path("rest/styles/" + styleName).request().delete(); } /** * Get coverage store from geoserver */ public Response getCoverageStore( final String workspaceName, final String coverageName, final boolean quietOnNotFound) { final Response resp = getWebTarget().path( "rest/workspaces/" + workspaceName + "/coveragestores/" + coverageName + ".json").queryParam("quietOnNotFound", quietOnNotFound).request().get(); if (resp.getStatus() == Status.OK.getStatusCode()) { resp.bufferEntity(); final JSONObject cvgstore = JSONObject.fromObject(resp.readEntity(String.class)); if (cvgstore != null) { return Response.ok(cvgstore.toString(defaultIndentation)).build(); } } return resp; } /** * Get a list of coverage stores from geoserver */ public Response getCoverageStores(final String workspaceName) { final Response resp = getWebTarget().path( "rest/workspaces/" + workspaceName + "/coveragestores.json").request().get(); if (resp.getStatus() == Status.OK.getStatusCode()) { resp.bufferEntity(); // get the datastore names final JSONArray coveragesArray = getArrayEntryNames( JSONObject.fromObject(resp.readEntity(String.class)), "coverageStores", "coverageStore"); final JSONObject dsObj = new JSONObject(); dsObj.put("coverageStores", coveragesArray); return Response.ok(dsObj.toString(defaultIndentation)).build(); } return resp; } /** * Add coverage store to geoserver */ public Response addCoverageStore( final String workspaceName, String cvgStoreName, final String gwStoreName, final Boolean equalizeHistogramOverride, final String interpolationOverride, final Boolean scaleTo8Bit) { final DataStorePluginOptions inputStoreOptions = getStorePlugin(gwStoreName); if ((cvgStoreName == null) || cvgStoreName.isEmpty()) { cvgStoreName = gwStoreName + GeoServerConfig.DEFAULT_CS; } // Get the store's db config final Map storeConfigMap = inputStoreOptions.getOptionsAsMap(); storeConfigMap.put("gwNamespace", inputStoreOptions.getGeoWaveNamespace()); final String cvgStoreXml = createCoverageXml( storeConfigMap, equalizeHistogramOverride, interpolationOverride, scaleTo8Bit, workspaceName, cvgStoreName); LOGGER.debug("Add coverage store - xml params:\n" + cvgStoreXml); // create a new geoserver style return getWebTarget().path( "rest/workspaces/" + workspaceName + "/coveragestores").request().post( Entity.entity(cvgStoreXml, MediaType.APPLICATION_XML)); } /** * Delete coverage store form geoserver */ public Response deleteCoverageStore(final String workspaceName, final String cvgstoreName) { return getWebTarget().path( "rest/workspaces/" + workspaceName + "/coveragestores/" + cvgstoreName).queryParam( "recurse", "true").request().delete(); } /** * Get a list of coverages (raster layers) from geoserver */ public Response getCoverages(final String workspaceName, final String cvsstoreName) { final Response resp = getWebTarget().path( "rest/workspaces/" + workspaceName + "/coveragestores/" + cvsstoreName + "/coverages.json").request().get(); if (resp.getStatus() == Status.OK.getStatusCode()) { resp.bufferEntity(); // get the datastore names final JSONArray coveragesArray = getArrayEntryNames( JSONObject.fromObject(resp.readEntity(String.class)), "coverages", "coverage"); final JSONObject dsObj = new JSONObject(); dsObj.put("coverages", coveragesArray); return Response.ok(dsObj.toString(defaultIndentation)).build(); } return resp; } /** * Get coverage from geoserver */ public Response getCoverage( final String workspaceName, final String cvgStoreName, final String coverageName, final boolean quietOnNotFound) { final Response resp = getWebTarget().path( "rest/workspaces/" + workspaceName + "/coveragestores/" + cvgStoreName + "/coverages/" + coverageName + ".json").queryParam("quietOnNotFound", quietOnNotFound).request().get(); if (resp.getStatus() == Status.OK.getStatusCode()) { resp.bufferEntity(); final JSONObject cvg = JSONObject.fromObject(resp.readEntity(String.class)); if (cvg != null) { return Response.ok(cvg.toString(defaultIndentation)).build(); } } return resp; } /** * Add coverage to geoserver */ public Response addCoverage( final String workspaceName, final String cvgStoreName, final String coverageName) { final String jsonString = "{'coverage':" + "{'name':'" + coverageName + "'," + "'nativeCoverageName':'" + coverageName + "'}}"; LOGGER.debug("Posting JSON: " + jsonString + " to " + workspaceName + "/" + cvgStoreName); return getWebTarget().path( "rest/workspaces/" + workspaceName + "/coveragestores/" + cvgStoreName + "/coverages").request().post(Entity.entity(jsonString, MediaType.APPLICATION_JSON)); } /** * Delete coverage from geoserver */ public Response deleteCoverage( final String workspaceName, final String cvgstoreName, final String coverageName) { return getWebTarget().path( "rest/workspaces/" + workspaceName + "/coveragestores/" + cvgstoreName + "/coverages/" + coverageName).queryParam("recurse", "true").request().delete(); } // Internal methods protected String createFeatureTypeJson(final String featureTypeName) { final JSONObject featTypeJson = new JSONObject(); featTypeJson.put("name", featureTypeName); final JSONObject jsonObj = new JSONObject(); jsonObj.put("featureType", featTypeJson); return jsonObj.toString(); } protected JSONArray getArrayEntryNames( JSONObject jsonObj, final String firstKey, final String secondKey) { // get the top level object/array if (jsonObj.get(firstKey) instanceof JSONObject) { jsonObj = jsonObj.getJSONObject(firstKey); } else if (jsonObj.get(firstKey) instanceof JSONArray) { final JSONArray tempArray = jsonObj.getJSONArray(firstKey); if (tempArray.size() > 0) { if (tempArray.get(0) instanceof JSONObject) { jsonObj = tempArray.getJSONObject(0); } else { // empty list! return new JSONArray(); } } } // get the sub level object/array final JSONArray entryArray = new JSONArray(); if (jsonObj.get(secondKey) instanceof JSONObject) { final JSONObject entry = new JSONObject(); entry.put("name", jsonObj.getJSONObject(secondKey).getString("name")); entryArray.add(entry); } else if (jsonObj.get(secondKey) instanceof JSONArray) { final JSONArray entries = jsonObj.getJSONArray(secondKey); for (int i = 0; i < entries.size(); i++) { final JSONObject entry = new JSONObject(); entry.put("name", entries.getJSONObject(i).getString("name")); entryArray.add(entry); } } return entryArray; } protected String createDatastoreJson( final String geowaveStoreType, final Map geowaveStoreConfig, final String name, final String lockMgmt, final String authMgmtProvider, final String authDataUrl, final String queryIndexStrategy, final boolean enabled) { final JSONObject dataStore = new JSONObject(); dataStore.put("name", name); dataStore.put("type", GeoServerConfig.DISPLAY_NAME_PREFIX + geowaveStoreType); dataStore.put("enabled", Boolean.toString(enabled)); final JSONObject connParams = new JSONObject(); if (geowaveStoreConfig != null) { for (final Entry e : geowaveStoreConfig.entrySet()) { connParams.put(e.getKey(), e.getValue()); } } connParams.put("Lock Management", lockMgmt); connParams.put(GeoServerConfig.QUERY_INDEX_STRATEGY_KEY, queryIndexStrategy); connParams.put("Authorization Management Provider", authMgmtProvider); if (!authMgmtProvider.equals("empty")) { connParams.put("Authorization Data URL", authDataUrl); } dataStore.put("connectionParameters", connParams); final JSONObject jsonObj = new JSONObject(); jsonObj.put("dataStore", dataStore); return jsonObj.toString(); } private String createCoverageXml( final Map geowaveStoreConfig, final Boolean equalizeHistogramOverride, final String interpolationOverride, final Boolean scaleTo8Bit, final String workspace, final String cvgstoreName) { String coverageXml = null; StreamResult result = null; try { // create the post XML final DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setFeature("http://xml.org/sax/features/external-general-entities", false); factory.setFeature("http://xml.org/sax/features/external-parameter-entities", false); final Document xmlDoc = factory.newDocumentBuilder().newDocument(); final Element rootEl = xmlDoc.createElement("coverageStore"); xmlDoc.appendChild(rootEl); final Element nameEl = xmlDoc.createElement("name"); nameEl.appendChild(xmlDoc.createTextNode(cvgstoreName)); rootEl.appendChild(nameEl); final Element wsEl = xmlDoc.createElement("workspace"); wsEl.appendChild(xmlDoc.createTextNode(workspace)); rootEl.appendChild(wsEl); final Element typeEl = xmlDoc.createElement("type"); typeEl.appendChild(xmlDoc.createTextNode("GeoWaveRasterFormat")); rootEl.appendChild(typeEl); final Element enabledEl = xmlDoc.createElement("enabled"); enabledEl.appendChild(xmlDoc.createTextNode("true")); rootEl.appendChild(enabledEl); final Element configEl = xmlDoc.createElement("configure"); configEl.appendChild(xmlDoc.createTextNode("all")); rootEl.appendChild(configEl); // Method using custom URL & handler: final String storeConfigUrl = createParamUrl( geowaveStoreConfig, equalizeHistogramOverride, interpolationOverride, scaleTo8Bit); final Element urlEl = xmlDoc.createElement("url"); urlEl.appendChild(xmlDoc.createTextNode(storeConfigUrl)); rootEl.appendChild(urlEl); // use a transformer to create the xml string for the rest call // HP Fortify "XML External Entity Injection" not relevant final TransformerFactory xformerFactory = TransformerFactory.newInstance(); final Transformer xformer = xformerFactory.newTransformer(); final DOMSource source = new DOMSource(xmlDoc); result = new StreamResult(new StringWriter()); xformer.transform(source, result); // HP Fortify "Improper Resource Shutdown or Release" false positive // coverageXml holds onto a string rather than the writer itself. // result.getWriter().close() is called explicitly in the finally // clause below coverageXml = result.getWriter().toString(); } catch (final TransformerException e) { LOGGER.error("Unable to create transformer", e); } catch (final ParserConfigurationException e1) { LOGGER.error("Unable to create DocumentBuilderFactory", e1); } finally { if ((result != null) && (result.getWriter() != null)) { try { result.getWriter().close(); } catch (final IOException e) { LOGGER.error(e.getLocalizedMessage(), e); } } } return coverageXml; } private String createParamUrl( final Map geowaveStoreConfig, final Boolean equalizeHistogramOverride, final String interpolationOverride, final Boolean scaleTo8Bit) { // Create the custom geowave url w/ params final StringBuffer buf = new StringBuffer(); boolean first = true; for (final Entry e : geowaveStoreConfig.entrySet()) { if (!first) { buf.append(";"); } else { first = false; } buf.append(e.getKey()).append("=").append(e.getValue()); } if (equalizeHistogramOverride != null) { buf.append(";equalizeHistogramOverride="); buf.append(equalizeHistogramOverride); } if (interpolationOverride != null) { buf.append(";interpolationOverride="); buf.append(interpolationOverride); } if (scaleTo8Bit != null) { buf.append(";scaleTo8Bit="); buf.append(scaleTo8Bit); } return buf.toString(); } public DataStorePluginOptions getStorePlugin(final String storeName) { return CLIUtils.loadStore(storeName, config.getPropFile(), console); } public ArrayList getStoreAdapters(final String storeName, final String adapterId) { final ArrayList adapterInfoList = getStoreAdapterInfo(storeName, adapterId); final ArrayList adapterIdList = new ArrayList<>(); for (final DataAdapterInfo info : adapterInfoList) { adapterIdList.add(info.typeName); } return adapterIdList; } private ArrayList getStoreAdapterInfo( final String storeName, final String adapterId) { final DataStorePluginOptions dsPlugin = getStorePlugin(storeName); final DataStore dataStore = dsPlugin.createDataStore(); final ArrayList adapterInfoList = new ArrayList<>(); LOGGER.debug("Adapter list for " + storeName + " with adapterId = " + adapterId + ": "); for (final DataTypeAdapter adapter : dataStore.getTypes()) { final DataAdapterInfo info = getAdapterInfo(adapterId, adapter); if (info != null) { adapterInfoList.add(info); LOGGER.debug("> '" + info.typeName + "' adapter passed filter"); } } LOGGER.debug("getStoreAdapterInfo(" + storeName + ") got " + adapterInfoList.size() + " ids"); if (dataStore instanceof Closeable) { try { ((Closeable) dataStore).close(); } catch (final IOException e) { LOGGER.error("Unable to close datastore"); } } return adapterInfoList; } private DataAdapterInfo getAdapterInfo(final String typeName, final DataTypeAdapter adapter) { LOGGER.debug("getAdapterInfo for id = " + typeName); final DataAdapterInfo info = new DataAdapterInfo(); info.typeName = adapter.getTypeName(); info.isRaster = false; if ((adapter instanceof RasterDataAdapter) || ((adapter instanceof InternalDataAdapter) && (((InternalDataAdapter) adapter).getAdapter() instanceof RasterDataAdapter))) { info.isRaster = true; } LOGGER.debug("> Adapter ID: " + info.typeName); LOGGER.debug("> Adapter Type: " + adapter.getClass().getSimpleName()); if ((typeName == null) || typeName.equals(AddOption.ALL.name())) { LOGGER.debug("id is null or all"); return info; } if (typeName.equals(adapter.getTypeName())) { LOGGER.debug("id matches adapter id"); return info; } if (typeName.equals(AddOption.RASTER.name()) && ((adapter instanceof RasterDataAdapter) || ((adapter instanceof InternalDataAdapter) && (((InternalDataAdapter) adapter).getAdapter() instanceof RasterDataAdapter)))) { LOGGER.debug("id is all-raster and adapter is raster type"); return info; } if (typeName.equals(AddOption.VECTOR.name()) && ((adapter instanceof GeotoolsFeatureDataAdapter) || ((adapter instanceof InternalDataAdapter) && (((InternalDataAdapter) adapter).getAdapter() instanceof GeotoolsFeatureDataAdapter)))) { LOGGER.debug("id is all-vector and adapter is vector type"); return info; } LOGGER.debug("No match!"); return null; } } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/GeoServerSSLConfigurationOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ /** */ package org.locationtech.geowave.cli.geoserver; import com.beust.jcommander.Parameters; /** */ @Parameters( commandDescription = "SSL Configuration Options that can be specified if connecting to geoserver over SSL") public class GeoServerSSLConfigurationOptions extends StoreSSLConfigurationOptions { public GeoServerSSLConfigurationOptions() { super("geoserver"); } } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/GeoServerSection.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.DefaultOperation; import org.locationtech.geowave.core.cli.operations.GeoWaveTopLevelSection; import com.beust.jcommander.Parameters; @GeowaveOperation(name = {"gs", "geoserver"}, parentOperation = GeoWaveTopLevelSection.class) @Parameters(commandDescription = "Commands that manage geoserver data stores and layers") public class GeoServerSection extends DefaultOperation { } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/SSLOptionAnnotation.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver; import static java.lang.annotation.ElementType.FIELD; import static java.lang.annotation.ElementType.METHOD; import java.lang.annotation.Retention; import java.lang.annotation.Target; /** Annotation for specifying the base property name to */ @Retention(java.lang.annotation.RetentionPolicy.RUNTIME) @Target({FIELD, METHOD}) public @interface SSLOptionAnnotation { String propertyBaseName(); } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/StoreSSLConfigurationOptions.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ /** */ package org.locationtech.geowave.cli.geoserver; import java.lang.annotation.Annotation; import java.lang.reflect.Field; import java.util.Properties; import org.locationtech.geowave.core.cli.converters.OptionalPasswordConverter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.Parameter; /** */ public abstract class StoreSSLConfigurationOptions { private static final Logger LOGGER = LoggerFactory.getLogger(StoreSSLConfigurationOptions.class); private final String configPrefix; public StoreSSLConfigurationOptions(final String configPrefix) { this.configPrefix = configPrefix; } @SSLOptionAnnotation(propertyBaseName = "ssl.security.protocol") @Parameter( names = "--sslSecurityProtocol", description = "Specify the Transport Layer Security (TLS) protocol to use when connecting to the server. By default, the system will use TLS.") protected String sslSecurityProtocol; @SSLOptionAnnotation(propertyBaseName = "ssl.trustStore") @Parameter( names = "--sslTrustStorePath", description = "Specify the absolute path to where truststore file is located on system. The truststore file is used to validate client certificates.") protected String sslTrustStorePath; @SSLOptionAnnotation(propertyBaseName = "ssl.trustStorePassword") @Parameter( names = "--sslTrustStorePassword", description = "Specify the password to use to access the truststore file. - " + OptionalPasswordConverter.DEFAULT_PASSWORD_DESCRIPTION, converter = OptionalPasswordConverter.class) protected String sslTrustStorePassword; @SSLOptionAnnotation(propertyBaseName = "ssl.trustStoreType") @Parameter( names = "--sslTrustStoreType", description = "Specify the type of key store used for the truststore, i.e. JKS (Java KeyStore).") protected String sslTrustStoreType; @SSLOptionAnnotation(propertyBaseName = "ssl.trustStoreProvider") @Parameter( names = "--sslTrustStoreProvider", description = "Specify the name of the truststore provider to be used for the server certificate.") protected String sslTrustStoreProvider; @SSLOptionAnnotation(propertyBaseName = "ssl.trustStoreMgrFactoryAlgorithm") @Parameter( names = "--sslTrustManagerAlgorithm", description = "Specify the algorithm to use for the truststore.") protected String sslTrustManagerAlgorithm; @SSLOptionAnnotation(propertyBaseName = "ssl.trustStoreMgrFactoryProvider") @Parameter( names = "--sslTrustManagerProvider", description = "Specify the trust manager factory provider.") protected String sslTrustManagerProvider; @SSLOptionAnnotation(propertyBaseName = "ssl.keyStore") @Parameter( names = "--sslKeyStorePath", description = "Specify the absolute path to where the keystore file is located on system. The keystore contains the server certificate to be loaded.") protected String sslKeyStorePath; @SSLOptionAnnotation(propertyBaseName = "ssl.keyStorePassword") @Parameter( names = "--sslKeyStorePassword", description = "Specify the password to use to access the keystore file. - " + OptionalPasswordConverter.DEFAULT_PASSWORD_DESCRIPTION, converter = OptionalPasswordConverter.class) protected String sslKeyStorePassword; @SSLOptionAnnotation(propertyBaseName = "ssl.keyStoreProvider") @Parameter( names = "--sslKeyStoreProvider", description = "Specify the name of the keystore provider to be used for the server certificate.") protected String sslKeyStoreProvider; @SSLOptionAnnotation(propertyBaseName = "ssl.keyPassword") @Parameter( names = "--sslKeyPassword", description = "Specify the password to be used to access the server certificate from the specified keystore file. - " + OptionalPasswordConverter.DEFAULT_PASSWORD_DESCRIPTION, converter = OptionalPasswordConverter.class) protected String sslKeyPassword; @SSLOptionAnnotation(propertyBaseName = "ssl.keyStoreType") @Parameter( names = "--sslKeyStoreType", description = "The type of keystore file to be used for the server certificate.") protected String sslKeyStoreType; @SSLOptionAnnotation(propertyBaseName = "ssl.keyMgrFactoryAlgorithm") @Parameter( names = "--sslKeyManagerAlgorithm", description = "Specify the algorithm to use for the keystore.") protected String sslKeyManagerAlgorithm; @SSLOptionAnnotation(propertyBaseName = "ssl.keyMgrFactoryProvider") @Parameter( names = "--sslKeyManagerProvider", description = "Specify the key manager factory provider.") protected String sslKeyManagerProvider; /** @return the sslSecurityProtocol */ public String getSslSecurityProtocol() { return sslSecurityProtocol; } /** @param sslSecurityProtocol the sslSecurityProtocol to set */ public void setSslSecurityProtocol(final String sslSecurityProtocol) { this.sslSecurityProtocol = sslSecurityProtocol; } /** @return the sslTrustStorePath */ public String getSslTrustStorePath() { return sslTrustStorePath; } /** @param sslTrustStorePath the sslTrustStorePath to set */ public void setSslTrustStorePath(final String sslTrustStorePath) { this.sslTrustStorePath = sslTrustStorePath; } /** @return the sslTrustStorePassword */ public String getSslTrustStorePassword() { return sslTrustStorePassword; } /** @param sslTrustStorePassword the sslTrustStorePassword to set */ public void setSslTrustStorePassword(final String sslTrustStorePassword) { this.sslTrustStorePassword = sslTrustStorePassword; } /** @return the sslTrustStoreType */ public String getSslTrustStoreType() { return sslTrustStoreType; } /** @param sslTrustStoreType the sslTrustStoreType to set */ public void setSslTrustStoreType(final String sslTrustStoreType) { this.sslTrustStoreType = sslTrustStoreType; } /** @return the sslTrustStoreProvider */ public String getSslTrustStoreProvider() { return sslTrustStoreProvider; } /** @param sslTrustStoreProvider the sslTrustStoreProvider to set */ public void setSslTrustStoreProvider(final String sslTrustStoreProvider) { this.sslTrustStoreProvider = sslTrustStoreProvider; } /** @return the sslTrustManagerAlgorithm */ public String getSslTrustManagerAlgorithm() { return sslTrustManagerAlgorithm; } /** @param sslTrustManagerAlgorithm the sslTrustManagerAlgorithm to set */ public void setSslTrustManagerAlgorithm(final String sslTrustManagerAlgorithm) { this.sslTrustManagerAlgorithm = sslTrustManagerAlgorithm; } /** @return the sslTrustManagerProvider */ public String getSslTrustManagerProvider() { return sslTrustManagerProvider; } /** @param sslTrustManagerProvider the sslTrustManagerProvider to set */ public void setSslTrustManagerProvider(final String sslTrustManagerProvider) { this.sslTrustManagerProvider = sslTrustManagerProvider; } /** @return the sslKeyStorePath */ public String getSslKeyStorePath() { return sslKeyStorePath; } /** @param sslKeyStorePath the sslKeyStorePath to set */ public void setSslKeyStorePath(final String sslKeyStorePath) { this.sslKeyStorePath = sslKeyStorePath; } /** @return the sslKeyStorePassword */ public String getSslKeyStorePassword() { return sslKeyStorePassword; } /** @param sslKeyStorePassword the sslKeyStorePassword to set */ public void setSslKeyStorePassword(final String sslKeyStorePassword) { this.sslKeyStorePassword = sslKeyStorePassword; } /** @return the sslKeyStoreProvider */ public String getSslKeyStoreProvider() { return sslKeyStoreProvider; } /** @param sslKeyStoreProvider the sslKeyStoreProvider to set */ public void setSslKeyStoreProvider(final String sslKeyStoreProvider) { this.sslKeyStoreProvider = sslKeyStoreProvider; } /** @return the sslKeyPassword */ public String getSslKeyPassword() { return sslKeyPassword; } /** @param sslKeyPassword the sslKeyPassword to set */ public void setSslKeyPassword(final String sslKeyPassword) { this.sslKeyPassword = sslKeyPassword; } /** @return the sslKeyStoreType */ public String getSslKeyStoreType() { return sslKeyStoreType; } /** @param sslKeyStoreType the sslKeyStoreType to set */ public void setSslKeyStoreType(final String sslKeyStoreType) { this.sslKeyStoreType = sslKeyStoreType; } /** @return the sslKeyManagerAlgorithm */ public String getSslKeyManagerAlgorithm() { return sslKeyManagerAlgorithm; } /** @param sslKeyManagerAlgorithm the sslKeyManagerAlgorithm to set */ public void setSslKeyManagerAlgorithm(final String sslKeyManagerAlgorithm) { this.sslKeyManagerAlgorithm = sslKeyManagerAlgorithm; } /** @return the sslKeyManagerProvider */ public String getSslKeyManagerProvider() { return sslKeyManagerProvider; } /** @param sslKeyManagerProvider the sslKeyManagerProvider to set */ public void setSslKeyManagerProvider(final String sslKeyManagerProvider) { this.sslKeyManagerProvider = sslKeyManagerProvider; } public boolean saveProperties(final Properties existingProps) { boolean updated = false; final Field[] fields = StoreSSLConfigurationOptions.class.getDeclaredFields(); if ((fields != null) && (fields.length != 0)) { for (final Field field : fields) { field.setAccessible(true); // HPFortify // "Access Specifier Manipulation" // False Positive: These fields are being modified by trusted // code, // in a way that is not influenced by user input final Annotation[] annotations = field.getAnnotations(); for (final Annotation annotation : annotations) { if (annotation instanceof SSLOptionAnnotation) { final SSLOptionAnnotation sslOptionAnnotation = (SSLOptionAnnotation) annotation; Object value = null; try { value = field.get(this); } catch (IllegalArgumentException | IllegalAccessException e) { LOGGER.error(e.getLocalizedMessage(), e); } // only write to properties the values which have been // specified if ((value != null) && (sslOptionAnnotation.propertyBaseName() != null)) { final String propertyKey = String.format("%s.%s", configPrefix, sslOptionAnnotation.propertyBaseName()); existingProps.put(propertyKey, value); updated = true; } } } } } return updated; } } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/constants/GeoServerConstants.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ /** */ package org.locationtech.geowave.cli.geoserver.constants; /** GeoServer connection constants */ public interface GeoServerConstants { public static final String GEOSERVER_NAMESPACE_PREFIX = "geoserver"; public static final String GEOSERVER_URL = GEOSERVER_NAMESPACE_PREFIX + ".url"; public static final String GEOSERVER_USER = GEOSERVER_NAMESPACE_PREFIX + ".user"; public static final String GEOSERVER_PASS = GEOSERVER_NAMESPACE_PREFIX + ".pass"; public static final String GEOSERVER_WORKSPACE = GEOSERVER_NAMESPACE_PREFIX + ".workspace"; public static final String GEOSERVER_CS = GEOSERVER_NAMESPACE_PREFIX + ".coverageStore"; public static final String GEOSERVER_DS = GEOSERVER_NAMESPACE_PREFIX + ".dataStore"; public static final String GEOSERVER_SSL_SECURITY_PROTOCOL = GEOSERVER_NAMESPACE_PREFIX + ".ssl.security.protocol"; public static final String GEOSERVER_SSL_TRUSTSTORE_FILE = GEOSERVER_NAMESPACE_PREFIX + ".ssl.trustStore"; public static final String GEOSERVER_SSL_TRUSTSTORE_PASS = GEOSERVER_NAMESPACE_PREFIX + ".ssl.trustStorePassword"; public static final String GEOSERVER_SSL_TRUSTSTORE_TYPE = GEOSERVER_NAMESPACE_PREFIX + ".ssl.trustStoreType"; public static final String GEOSERVER_SSL_TRUSTSTORE_PROVIDER = GEOSERVER_NAMESPACE_PREFIX + ".ssl.trustStoreProvider"; public static final String GEOSERVER_SSL_TRUSTMGR_ALG = GEOSERVER_NAMESPACE_PREFIX + ".ssl.trustStoreMgrFactoryAlgorithm"; public static final String GEOSERVER_SSL_TRUSTMGR_PROVIDER = GEOSERVER_NAMESPACE_PREFIX + ".ssl.trustStoreMgrFactoryProvider"; public static final String GEOSERVER_SSL_KEYSTORE_FILE = GEOSERVER_NAMESPACE_PREFIX + ".ssl.keyStore"; public static final String GEOSERVER_SSL_KEYSTORE_PASS = GEOSERVER_NAMESPACE_PREFIX + ".ssl.keyStorePassword"; public static final String GEOSERVER_SSL_KEYSTORE_PROVIDER = GEOSERVER_NAMESPACE_PREFIX + ".ssl.keyStoreProvider"; public static final String GEOSERVER_SSL_KEY_PASS = GEOSERVER_NAMESPACE_PREFIX + ".ssl.keyPassword"; public static final String GEOSERVER_SSL_KEYSTORE_TYPE = GEOSERVER_NAMESPACE_PREFIX + ".ssl.keyStoreType"; public static final String GEOSERVER_SSL_KEYMGR_ALG = GEOSERVER_NAMESPACE_PREFIX + ".ssl.keyMgrFactoryAlgorithm"; public static final String GEOSERVER_SSL_KEYMGR_PROVIDER = GEOSERVER_NAMESPACE_PREFIX + ".ssl.keyMgrFactoryProvider"; } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/coverage/CoverageOperationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver.coverage; import org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi; public class CoverageOperationProvider implements CLIOperationProviderSpi { private static final Class[] OPERATIONS = new Class[] { CoverageSection.class, GeoServerListCoveragesCommand.class, GeoServerGetCoverageCommand.class, GeoServerAddCoverageCommand.class, GeoServerRemoveCoverageCommand.class}; @Override public Class[] getOperations() { return OPERATIONS; } } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/coverage/CoverageSection.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver.coverage; import org.locationtech.geowave.cli.geoserver.GeoServerSection; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.DefaultOperation; import com.beust.jcommander.Parameters; @GeowaveOperation(name = {"cv", "coverage"}, parentOperation = GeoServerSection.class) @Parameters(commandDescription = "Commands for configuring GeoServer coverages") public class CoverageSection extends DefaultOperation { } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/coverage/GeoServerAddCoverageCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver.coverage; import java.util.ArrayList; import java.util.List; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import org.locationtech.geowave.cli.geoserver.GeoServerCommand; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "add", parentOperation = CoverageSection.class) @Parameters(commandDescription = "Add a GeoServer coverage") public class GeoServerAddCoverageCommand extends GeoServerCommand { @Parameter(names = {"-ws", "--workspace"}, required = false, description = "workspace name") private String workspace = null; @Parameter(names = {"-cs", "--cvgstore"}, required = true, description = "coverage store name") private String cvgstore = null; @Parameter(description = "") private List parameters = new ArrayList<>(); private String cvgName = null; @Override public void execute(final OperationParams params) throws Exception { params.getConsole().println(computeResults(params)); } @Override public String computeResults(final OperationParams params) throws Exception { if (parameters.size() != 1) { throw new ParameterException("Requires argument: "); } if ((workspace == null) || workspace.isEmpty()) { workspace = geoserverClient.getConfig().getWorkspace(); } cvgName = parameters.get(0); final Response addLayerResponse = geoserverClient.addCoverage(workspace, cvgstore, cvgName); if (addLayerResponse.getStatus() == Status.OK.getStatusCode()) { return "Add coverage '" + cvgName + "' to '" + workspace + "/" + cvgstore + "' on GeoServer: OK"; } final String errorMessage = "Error adding GeoServer coverage " + cvgName + ": " + addLayerResponse.readEntity(String.class) + "\nGeoServer Response Code = " + addLayerResponse.getStatus(); return handleError(addLayerResponse, errorMessage); } public void setCvgstore(String cvgstore) { this.cvgstore = cvgstore; } public void setParameters(List parameters) { this.parameters = parameters; } } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/coverage/GeoServerGetCoverageCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver.coverage; import java.util.ArrayList; import java.util.List; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import org.locationtech.geowave.cli.geoserver.GeoServerCommand; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import net.sf.json.JSONObject; @GeowaveOperation(name = "get", parentOperation = CoverageSection.class) @Parameters(commandDescription = "Get a GeoServer coverage's info") public class GeoServerGetCoverageCommand extends GeoServerCommand { @Parameter(names = {"-ws", "--workspace"}, required = false, description = "workspace name") private String workspace = null; @Parameter(names = {"-cs", "--cvgstore"}, required = true, description = "coverage store name") private String cvgstore = null; @Parameter(description = "") private List parameters = new ArrayList<>(); private String cvgName = null; @Override public void execute(final OperationParams params) throws Exception { params.getConsole().println(computeResults(params)); } @Override public String computeResults(final OperationParams params) throws Exception { if (parameters.size() != 1) { throw new ParameterException("Requires argument: "); } if ((workspace == null) || workspace.isEmpty()) { workspace = geoserverClient.getConfig().getWorkspace(); } cvgName = parameters.get(0); final Response getCvgResponse = geoserverClient.getCoverage(workspace, cvgstore, cvgName, false); if (getCvgResponse.getStatus() == Status.OK.getStatusCode()) { final JSONObject jsonResponse = JSONObject.fromObject(getCvgResponse.getEntity()); return "\nGeoServer coverage info for '" + cvgName + "': " + jsonResponse.toString(2); } final String errorMessage = "Error getting GeoServer coverage info for " + cvgName + ": " + getCvgResponse.readEntity(String.class) + "\nGeoServer Response Code = " + getCvgResponse.getStatus(); return handleError(getCvgResponse, errorMessage); } public void setCvgstore(final String cvgstore) { this.cvgstore = cvgstore; } public void setParameters(final List parameters) { this.parameters = parameters; } } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/coverage/GeoServerListCoveragesCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver.coverage; import java.util.ArrayList; import java.util.List; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import org.locationtech.geowave.cli.geoserver.GeoServerCommand; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import net.sf.json.JSONArray; import net.sf.json.JSONObject; @GeowaveOperation(name = "list", parentOperation = CoverageSection.class) @Parameters(commandDescription = "List GeoServer Coverages") public class GeoServerListCoveragesCommand extends GeoServerCommand { @Parameter(names = {"-ws", "--workspace"}, required = false, description = "workspace name") private String workspace; @Parameter(description = "") private List parameters = new ArrayList<>(); private String csName = null; @Override public void execute(final OperationParams params) throws Exception { params.getConsole().println(computeResults(params)); } @Override public String computeResults(final OperationParams params) throws Exception { if (parameters.size() != 1) { throw new ParameterException("Requires argument: "); } if ((workspace == null) || workspace.isEmpty()) { workspace = geoserverClient.getConfig().getWorkspace(); } csName = parameters.get(0); final Response getCvgStoreResponse = geoserverClient.getCoverages(workspace, csName); if (getCvgStoreResponse.getStatus() == Status.OK.getStatusCode()) { final JSONObject jsonResponse = JSONObject.fromObject(getCvgStoreResponse.getEntity()); final JSONArray cvgArray = jsonResponse.getJSONArray("coverages"); return "\nGeoServer coverage list for '" + csName + "': " + cvgArray.toString(2); } final String errorMessage = "Error getting GeoServer coverage list for '" + csName + "': " + getCvgStoreResponse.readEntity(String.class) + "\nGeoServer Response Code = " + getCvgStoreResponse.getStatus(); return handleError(getCvgStoreResponse, errorMessage); } public void setParameters(List parameters) { this.parameters = parameters; } } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/coverage/GeoServerRemoveCoverageCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver.coverage; import java.util.ArrayList; import java.util.List; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import org.locationtech.geowave.cli.geoserver.GeoServerRemoveCommand; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "rm", parentOperation = CoverageSection.class) @Parameters(commandDescription = "Remove a GeoServer coverage") public class GeoServerRemoveCoverageCommand extends GeoServerRemoveCommand { @Parameter(names = {"-ws", "--workspace"}, required = false, description = "workspace name") private String workspace = null; @Parameter(names = {"-cs", "--cvgstore"}, required = true, description = "coverage store name") private String cvgstore = null; @Parameter(description = "") private List parameters = new ArrayList<>(); private String cvgName = null; @Override public void execute(final OperationParams params) throws Exception { params.getConsole().println(computeResults(params)); } @Override public String computeResults(final OperationParams params) throws Exception { if (parameters.size() != 1) { throw new ParameterException("Requires argument: "); } if ((workspace == null) || workspace.isEmpty()) { workspace = geoserverClient.getConfig().getWorkspace(); } cvgName = parameters.get(0); final Response getCvgResponse = geoserverClient.deleteCoverage(workspace, cvgstore, cvgName); if (getCvgResponse.getStatus() == Status.OK.getStatusCode()) { return "\nRemove GeoServer coverage '" + cvgName + "': OK"; } final String errorMessage = "Error removing GeoServer coverage '" + cvgName + "': " + getCvgResponse.readEntity(String.class) + "\nGeoServer Response Code = " + getCvgResponse.getStatus(); return handleError(getCvgResponse, errorMessage); } public void setCvgstore(String cvgstore) { this.cvgstore = cvgstore; } public void setParameters(List parameters) { this.parameters = parameters; } } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/cvstore/CoverageStoreOperationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver.cvstore; import org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi; public class CoverageStoreOperationProvider implements CLIOperationProviderSpi { private static final Class[] OPERATIONS = new Class[] { CoverageStoreSection.class, GeoServerAddCoverageStoreCommand.class, GeoServerGetCoverageStoreCommand.class, GeoServerListCoverageStoresCommand.class, GeoServerRemoveCoverageStoreCommand.class}; @Override public Class[] getOperations() { return OPERATIONS; } } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/cvstore/CoverageStoreSection.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver.cvstore; import org.locationtech.geowave.cli.geoserver.GeoServerSection; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.DefaultOperation; import com.beust.jcommander.Parameters; @GeowaveOperation(name = {"cs", "coveragestore"}, parentOperation = GeoServerSection.class) @Parameters(commandDescription = "Commands for configuring GeoServer coverage stores") public class CoverageStoreSection extends DefaultOperation { } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/cvstore/GeoServerAddCoverageStoreCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver.cvstore; import java.util.ArrayList; import java.util.List; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import org.locationtech.geowave.cli.geoserver.GeoServerCommand; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "add", parentOperation = CoverageStoreSection.class) @Parameters(commandDescription = "Add a GeoServer coverage store") public class GeoServerAddCoverageStoreCommand extends GeoServerCommand { @Parameter(names = {"-ws", "--workspace"}, required = false, description = "workspace name") private String workspace = null; @Parameter( names = {"-cs", "--coverageStore"}, required = false, description = "coverage store name") private String coverageStore = null; @Parameter( names = {"-histo", "--equalizeHistogramOverride"}, required = false, description = "This parameter will override the behavior to always perform histogram equalization if a histogram exists. Valid values are true and false.", arity = 1) private Boolean equalizeHistogramOverride = null; @Parameter( names = {"-interp", "--interpolationOverride"}, required = false, description = "This will override the default interpolation stored for each layer. Valid values are 0, 1, 2, 3 for NearestNeighbor, Bilinear, Bicubic, and Bicubic (polynomial variant) resepctively. ") private String interpolationOverride = null; @Parameter( names = {"-scale", "--scaleTo8Bit"}, required = false, description = "By default, integer values will automatically be scaled to 8-bit and floating point values will not. This can be overridden setting this value to true or false.", arity = 1) private Boolean scaleTo8Bit = null; @Parameter(description = "") private List parameters = new ArrayList<>(); private String gwStore = null; public void setCoverageStore(final String coverageStore) { this.coverageStore = coverageStore; } public void setEqualizeHistogramOverride(final Boolean equalizeHistogramOverride) { this.equalizeHistogramOverride = equalizeHistogramOverride; } public void setInterpolationOverride(final String interpolationOverride) { this.interpolationOverride = interpolationOverride; } public void setScaleTo8Bit(final Boolean scaleTo8Bit) { this.scaleTo8Bit = scaleTo8Bit; } public void setParameters(final List parameters) { this.parameters = parameters; } @Override public void execute(final OperationParams params) throws Exception { params.getConsole().println(computeResults(params)); } @Override public String computeResults(final OperationParams params) throws Exception { if (parameters.size() != 1) { throw new ParameterException("Requires argument: "); } gwStore = parameters.get(0); if ((workspace == null) || workspace.isEmpty()) { workspace = geoserverClient.getConfig().getWorkspace(); } final Response addStoreResponse = geoserverClient.addCoverageStore( workspace, coverageStore, gwStore, equalizeHistogramOverride, interpolationOverride, scaleTo8Bit); if ((addStoreResponse.getStatus() == Status.OK.getStatusCode()) || (addStoreResponse.getStatus() == Status.CREATED.getStatusCode())) { return "Add coverage store for '" + gwStore + "' to workspace '" + workspace + "' on GeoServer: OK"; } final String errorMessage = "Error adding coverage store for '" + gwStore + "' to workspace '" + workspace + "' on GeoServer: " + addStoreResponse.readEntity(String.class) + "\nGeoServer Response Code = " + addStoreResponse.getStatus(); return handleError(addStoreResponse, errorMessage); } } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/cvstore/GeoServerGetCoverageStoreCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver.cvstore; import java.util.ArrayList; import java.util.List; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import org.locationtech.geowave.cli.geoserver.GeoServerCommand; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import net.sf.json.JSONObject; @GeowaveOperation(name = "get", parentOperation = CoverageStoreSection.class) @Parameters(commandDescription = "Get GeoServer CoverageStore info") public class GeoServerGetCoverageStoreCommand extends GeoServerCommand { @Parameter(names = {"-ws", "--workspace"}, required = false, description = "workspace name") private String workspace; @Parameter(description = "") private List parameters = new ArrayList<>(); private String csName = null; @Override public void execute(final OperationParams params) throws Exception { params.getConsole().println(computeResults(params)); } public void setParameters(List parameters) { this.parameters = parameters; } @Override public String computeResults(final OperationParams params) throws Exception { if (parameters.size() != 1) { throw new ParameterException("Requires argument: "); } if ((workspace == null) || workspace.isEmpty()) { workspace = geoserverClient.getConfig().getWorkspace(); } csName = parameters.get(0); final Response getCvgStoreResponse = geoserverClient.getCoverageStore(workspace, csName, false); if (getCvgStoreResponse.getStatus() == Status.OK.getStatusCode()) { final JSONObject jsonResponse = JSONObject.fromObject(getCvgStoreResponse.getEntity()); final JSONObject cvgstore = jsonResponse.getJSONObject("coverageStore"); return "\nGeoServer coverage store info for '" + csName + "': " + cvgstore.toString(2); } final String errorMessage = "Error getting GeoServer coverage store info for '" + csName + "': " + getCvgStoreResponse.readEntity(String.class) + "\nGeoServer Response Code = " + getCvgStoreResponse.getStatus(); return handleError(getCvgStoreResponse, errorMessage); } } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/cvstore/GeoServerListCoverageStoresCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver.cvstore; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import org.locationtech.geowave.cli.geoserver.GeoServerCommand; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import com.beust.jcommander.Parameter; import com.beust.jcommander.Parameters; import net.sf.json.JSONArray; import net.sf.json.JSONObject; @GeowaveOperation(name = "list", parentOperation = CoverageStoreSection.class) @Parameters(commandDescription = "List GeoServer coverage stores") public class GeoServerListCoverageStoresCommand extends GeoServerCommand { @Parameter(names = {"-ws", "--workspace"}, required = false, description = "workspace name") private String workspace; @Override public void execute(final OperationParams params) throws Exception { params.getConsole().println(computeResults(params)); } @Override public String computeResults(final OperationParams params) throws Exception { if ((workspace == null) || workspace.isEmpty()) { workspace = geoserverClient.getConfig().getWorkspace(); } final Response listCvgStoresResponse = geoserverClient.getCoverageStores(workspace); if (listCvgStoresResponse.getStatus() == Status.OK.getStatusCode()) { final JSONObject jsonResponse = JSONObject.fromObject(listCvgStoresResponse.getEntity()); final JSONArray cvgStores = jsonResponse.getJSONArray("coverageStores"); return "\nGeoServer coverage stores list for '" + workspace + "': " + cvgStores.toString(2); } final String errorMessage = "Error getting GeoServer coverage stores list for '" + workspace + "': " + listCvgStoresResponse.readEntity(String.class) + "\nGeoServer Response Code = " + listCvgStoresResponse.getStatus(); return handleError(listCvgStoresResponse, errorMessage); } } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/cvstore/GeoServerRemoveCoverageStoreCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver.cvstore; import java.util.ArrayList; import java.util.List; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import org.locationtech.geowave.cli.geoserver.GeoServerRemoveCommand; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "rm", parentOperation = CoverageStoreSection.class) @Parameters(commandDescription = "Remove GeoServer Coverage Store") public class GeoServerRemoveCoverageStoreCommand extends GeoServerRemoveCommand { @Parameter(names = {"-ws", "--workspace"}, required = false, description = "Workspace Name") private String workspace; @Parameter(description = "") private List parameters = new ArrayList<>(); private String cvgstoreName = null; @Override public void execute(final OperationParams params) throws Exception { params.getConsole().println(computeResults(params)); } public void setParameters(final List parameters) { this.parameters = parameters; } @Override public String computeResults(final OperationParams params) throws Exception { if (parameters.size() != 1) { throw new ParameterException("Requires argument: "); } if ((workspace == null) || workspace.isEmpty()) { workspace = geoserverClient.getConfig().getWorkspace(); } cvgstoreName = parameters.get(0); final Response deleteCvgStoreResponse = geoserverClient.deleteCoverageStore(workspace, cvgstoreName); if (deleteCvgStoreResponse.getStatus() == Status.OK.getStatusCode()) { return "Delete store '" + cvgstoreName + "' from workspace '" + workspace + "' on GeoServer: OK"; } final String errorMessage = "Error deleting store '" + cvgstoreName + "' from workspace '" + workspace + "' on GeoServer: " + deleteCvgStoreResponse.readEntity(String.class) + "\nGeoServer Response Code = " + deleteCvgStoreResponse.getStatus(); return handleError(deleteCvgStoreResponse, errorMessage); } } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/datastore/DatastoreOperationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver.datastore; import org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi; public class DatastoreOperationProvider implements CLIOperationProviderSpi { private static final Class[] OPERATIONS = new Class[] { DatastoreSection.class, GeoServerAddDatastoreCommand.class, GeoServerGetDatastoreCommand.class, GeoServerListDatastoresCommand.class, GeoServerRemoveDatastoreCommand.class, GeoServerGetStoreAdapterCommand.class}; @Override public Class[] getOperations() { return OPERATIONS; } } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/datastore/DatastoreSection.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver.datastore; import org.locationtech.geowave.cli.geoserver.GeoServerSection; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.DefaultOperation; import com.beust.jcommander.Parameters; @GeowaveOperation(name = {"ds", "datastore"}, parentOperation = GeoServerSection.class) @Parameters(commandDescription = "Commands for configuring GeoServer datastores") public class DatastoreSection extends DefaultOperation { } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/datastore/GeoServerAddDatastoreCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver.datastore; import java.util.ArrayList; import java.util.List; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import org.locationtech.geowave.cli.geoserver.GeoServerCommand; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "add", parentOperation = DatastoreSection.class) @Parameters(commandDescription = "Add a GeoServer datastore") public class GeoServerAddDatastoreCommand extends GeoServerCommand { @Parameter(names = {"-ws", "--workspace"}, required = false, description = "workspace name") private String workspace = null; @Parameter(names = {"-ds", "--datastore"}, required = false, description = "datastore name") private String datastore = null; @Parameter(description = "") private List parameters = new ArrayList<>(); private String gwStore = null; @Override public void execute(final OperationParams params) throws Exception { params.getConsole().println(computeResults(params)); } public void setDatastore(final String datastore) { this.datastore = datastore; } public void setParameters(final List parameters) { this.parameters = parameters; } @Override public String computeResults(final OperationParams params) throws Exception { if (parameters.size() != 1) { throw new ParameterException("Requires argument: "); } gwStore = parameters.get(0); if ((workspace == null) || workspace.isEmpty()) { workspace = geoserverClient.getConfig().getWorkspace(); } final Response addStoreResponse = geoserverClient.addDatastore(workspace, datastore, gwStore); if ((addStoreResponse.getStatus() == Status.OK.getStatusCode()) || (addStoreResponse.getStatus() == Status.CREATED.getStatusCode())) { return "Add datastore for '" + gwStore + "' to workspace '" + workspace + "' on GeoServer: OK"; } final String errorMessage = "Error adding datastore for '" + gwStore + "' to workspace '" + workspace + "' on GeoServer: " + addStoreResponse.readEntity(String.class) + "\nGeoServer Response Code = " + addStoreResponse.getStatus(); return handleError(addStoreResponse, errorMessage); } } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/datastore/GeoServerGetDatastoreCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver.datastore; import java.util.ArrayList; import java.util.List; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import org.locationtech.geowave.cli.geoserver.GeoServerCommand; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import net.sf.json.JSONObject; @GeowaveOperation(name = "get", parentOperation = DatastoreSection.class) @Parameters(commandDescription = "Get GeoServer DataStore info") public class GeoServerGetDatastoreCommand extends GeoServerCommand { @Parameter(names = {"-ws", "--workspace"}, required = false, description = "workspace name") private String workspace = null; @Parameter(description = "") private List parameters = new ArrayList<>(); private String datastore = null; @Override public void execute(final OperationParams params) throws Exception { params.getConsole().println(computeResults(params)); } public void setParameters(List parameters) { this.parameters = parameters; } @Override public String computeResults(final OperationParams params) throws Exception { if (parameters.size() != 1) { throw new ParameterException("Requires argument: "); } datastore = parameters.get(0); if ((workspace == null) || workspace.isEmpty()) { workspace = geoserverClient.getConfig().getWorkspace(); } final Response getStoreResponse = geoserverClient.getDatastore(workspace, datastore, false); if (getStoreResponse.getStatus() == Status.OK.getStatusCode()) { final JSONObject jsonResponse = JSONObject.fromObject(getStoreResponse.getEntity()); final JSONObject datastore = jsonResponse.getJSONObject("dataStore"); return "\nGeoServer store info for '" + datastore + "': " + datastore.toString(2); } final String errorMessage = "Error getting GeoServer store info for '" + datastore + "': " + getStoreResponse.readEntity(String.class) + "\nGeoServer Response Code = " + getStoreResponse.getStatus(); return handleError(getStoreResponse, errorMessage); } } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/datastore/GeoServerGetStoreAdapterCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver.datastore; import java.util.ArrayList; import java.util.List; import org.locationtech.geowave.cli.geoserver.GeoServerCommand; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; @GeowaveOperation(name = {"getsa", "getstoreadapters"}, parentOperation = DatastoreSection.class) @Parameters(commandDescription = "Get GeoWave store adapters") public class GeoServerGetStoreAdapterCommand extends GeoServerCommand> { @Parameter(description = "") private List parameters = new ArrayList<>(); private String storeName = null; public void setParameters(List parameters) { this.parameters = parameters; } @Override public void execute(final OperationParams params) throws Exception { final List adapterList = computeResults(params); params.getConsole().println("Store " + storeName + " has these adapters:"); for (final String adapterId : adapterList) { params.getConsole().println(adapterId); } } @Override public List computeResults(final OperationParams params) throws Exception { if (parameters.size() != 1) { throw new ParameterException("Requires argument: "); } storeName = parameters.get(0); final List adapterList = geoserverClient.getStoreAdapters(storeName, null); return adapterList; } } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/datastore/GeoServerListDatastoresCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver.datastore; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import org.locationtech.geowave.cli.geoserver.GeoServerCommand; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import com.beust.jcommander.Parameter; import com.beust.jcommander.Parameters; import net.sf.json.JSONArray; import net.sf.json.JSONObject; @GeowaveOperation(name = "list", parentOperation = DatastoreSection.class) @Parameters(commandDescription = "List GeoServer datastores") public class GeoServerListDatastoresCommand extends GeoServerCommand { @Parameter(names = {"-ws", "--workspace"}, required = false, description = "workspace name") private String workspace; @Override public void execute(final OperationParams params) throws Exception { params.getConsole().println(computeResults(params)); } @Override public String computeResults(final OperationParams params) throws Exception { if ((workspace == null) || workspace.isEmpty()) { workspace = geoserverClient.getConfig().getWorkspace(); } final Response listStoresResponse = geoserverClient.getDatastores(workspace); if (listStoresResponse.getStatus() == Status.OK.getStatusCode()) { final JSONObject jsonResponse = JSONObject.fromObject(listStoresResponse.getEntity()); final JSONArray datastores = jsonResponse.getJSONArray("dataStores"); return "\nGeoServer stores list for '" + workspace + "': " + datastores.toString(2); } final String errorMessage = "Error getting GeoServer stores list for '" + workspace + "': " + listStoresResponse.readEntity(String.class) + "\nGeoServer Response Code = " + listStoresResponse.getStatus(); return handleError(listStoresResponse, errorMessage); } } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/datastore/GeoServerRemoveDatastoreCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver.datastore; import java.util.ArrayList; import java.util.List; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import org.locationtech.geowave.cli.geoserver.GeoServerRemoveCommand; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "rm", parentOperation = DatastoreSection.class) @Parameters(commandDescription = "Remove GeoServer DataStore") public class GeoServerRemoveDatastoreCommand extends GeoServerRemoveCommand { @Parameter(names = {"-ws", "--workspace"}, required = false, description = "Workspace Name") private String workspace; @Parameter(description = "") private List parameters = new ArrayList<>(); private String datastoreName = null; @Override public void execute(final OperationParams params) throws Exception { params.getConsole().println(computeResults(params)); } public void setParameters(final List parameters) { this.parameters = parameters; } @Override public String computeResults(final OperationParams params) throws Exception { if (parameters.size() != 1) { throw new ParameterException("Requires argument: "); } if ((workspace == null) || workspace.isEmpty()) { workspace = geoserverClient.getConfig().getWorkspace(); } datastoreName = parameters.get(0); final Response deleteStoreResponse = geoserverClient.deleteDatastore(workspace, datastoreName); if (deleteStoreResponse.getStatus() == Status.OK.getStatusCode()) { return "Delete store '" + datastoreName + "' from workspace '" + workspace + "' on GeoServer: OK"; } final String errorMessage = "Error deleting store '" + datastoreName + "' from workspace '" + workspace + "' on GeoServer: " + deleteStoreResponse.readEntity(String.class) + "\nGeoServer Response Code = " + deleteStoreResponse.getStatus(); return handleError(deleteStoreResponse, errorMessage); } } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/featurelayer/FeatureLayerOperationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver.featurelayer; import org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi; public class FeatureLayerOperationProvider implements CLIOperationProviderSpi { private static final Class[] OPERATIONS = new Class[] { FeatureLayerSection.class, GeoServerAddFeatureLayerCommand.class, GeoServerGetFeatureLayerCommand.class, GeoServerListFeatureLayersCommand.class, GeoServerRemoveFeatureLayerCommand.class}; @Override public Class[] getOperations() { return OPERATIONS; } } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/featurelayer/FeatureLayerSection.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver.featurelayer; import org.locationtech.geowave.cli.geoserver.GeoServerSection; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.DefaultOperation; import com.beust.jcommander.Parameters; @GeowaveOperation(name = {"fl", "featurelayer"}, parentOperation = GeoServerSection.class) @Parameters(commandDescription = "Commands for configuring GeoServer feature layers") public class FeatureLayerSection extends DefaultOperation { } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/featurelayer/GeoServerAddFeatureLayerCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver.featurelayer; import java.util.ArrayList; import java.util.List; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import org.locationtech.geowave.cli.geoserver.GeoServerCommand; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import net.sf.json.JSONObject; @GeowaveOperation(name = "add", parentOperation = FeatureLayerSection.class) @Parameters(commandDescription = "Add a GeoServer feature layer") public class GeoServerAddFeatureLayerCommand extends GeoServerCommand { @Parameter(names = {"-ws", "--workspace"}, required = false, description = "workspace name") private String workspace = null; @Parameter(names = {"-ds", "--datastore"}, required = true, description = "datastore name") private String datastore = null; @Parameter(description = "") private List parameters = new ArrayList<>(); private String layerName = null; @Override public void execute(final OperationParams params) throws Exception { params.getConsole().println(computeResults(params)); } public void setDatastore(final String datastore) { this.datastore = datastore; } public void setParameters(final List parameters) { this.parameters = parameters; } @Override public String computeResults(final OperationParams params) throws Exception { if (parameters.size() != 1) { throw new ParameterException("Requires argument: "); } if ((workspace == null) || workspace.isEmpty()) { workspace = geoserverClient.getConfig().getWorkspace(); } layerName = parameters.get(0); final Response addLayerResponse = geoserverClient.addFeatureLayer(workspace, datastore, layerName, null); if (addLayerResponse.getStatus() == Status.CREATED.getStatusCode()) { final JSONObject listObj = JSONObject.fromObject(addLayerResponse.getEntity()); return "\nGeoServer add layer response " + layerName + ":" + listObj.toString(2); } final String errorMessage = "Error adding GeoServer layer " + layerName + ": " + addLayerResponse.readEntity(String.class) + "\nGeoServer Response Code = " + addLayerResponse.getStatus(); return handleError(addLayerResponse, errorMessage); } } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/featurelayer/GeoServerGetFeatureLayerCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver.featurelayer; import java.util.ArrayList; import java.util.List; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import org.locationtech.geowave.cli.geoserver.GeoServerCommand; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import net.sf.json.JSONObject; @GeowaveOperation(name = "get", parentOperation = FeatureLayerSection.class) @Parameters(commandDescription = "Get GeoServer feature layer info") public class GeoServerGetFeatureLayerCommand extends GeoServerCommand { @Parameter(description = "") private List parameters = new ArrayList<>(); private String layerName = null; @Override public void execute(final OperationParams params) throws Exception { params.getConsole().println(computeResults(params)); } public void setParameters(List parameters) { this.parameters = parameters; } @Override public String computeResults(final OperationParams params) throws Exception { if (parameters.size() != 1) { throw new ParameterException("Requires argument: "); } layerName = parameters.get(0); final Response getLayerResponse = geoserverClient.getFeatureLayer(layerName, false); if (getLayerResponse.getStatus() == Status.OK.getStatusCode()) { final JSONObject jsonResponse = JSONObject.fromObject(getLayerResponse.getEntity()); return "\nGeoServer layer info for '" + layerName + "': " + jsonResponse.toString(2); } final String errorMessage = "Error getting GeoServer layer info for '" + layerName + "': " + getLayerResponse.readEntity(String.class) + "\nGeoServer Response Code = " + getLayerResponse.getStatus(); return handleError(getLayerResponse, errorMessage); } } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/featurelayer/GeoServerListFeatureLayersCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver.featurelayer; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import org.locationtech.geowave.cli.geoserver.GeoServerCommand; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import com.beust.jcommander.Parameter; import com.beust.jcommander.Parameters; import net.sf.json.JSONObject; @GeowaveOperation(name = "list", parentOperation = FeatureLayerSection.class) @Parameters(commandDescription = "List GeoServer feature layers") public class GeoServerListFeatureLayersCommand extends GeoServerCommand { @Parameter(names = {"-ws", "--workspace"}, required = false, description = "Workspace Name") private String workspace = null; public void setWorkspace(final String workspace) { this.workspace = workspace; } @Parameter(names = {"-ds", "--datastore"}, required = false, description = "Datastore Name") private String datastore = null; @Parameter( names = {"-g", "--geowaveOnly"}, required = false, description = "Show only GeoWave feature layers (default: false)") private Boolean geowaveOnly = false; public void setGeowaveOnly(final Boolean geowaveOnly) { this.geowaveOnly = geowaveOnly; } @Override public void execute(final OperationParams params) throws Exception { params.getConsole().println(computeResults(params)); } public void setDatastore(final String datastore) { this.datastore = datastore; } @Override public String computeResults(final OperationParams params) throws Exception { final Response listLayersResponse = geoserverClient.getFeatureLayers(workspace, datastore, geowaveOnly); if (listLayersResponse.getStatus() == Status.OK.getStatusCode()) { final JSONObject listObj = JSONObject.fromObject(listLayersResponse.getEntity()); return "\nGeoServer layer list: " + listObj.toString(2); } final String errorMessage = "Error getting GeoServer layer list: " + listLayersResponse.readEntity(String.class) + "\nGeoServer Response Code = " + listLayersResponse.getStatus(); return handleError(listLayersResponse, errorMessage); } } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/featurelayer/GeoServerRemoveFeatureLayerCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver.featurelayer; import java.util.ArrayList; import java.util.List; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import org.locationtech.geowave.cli.geoserver.GeoServerRemoveCommand; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import net.sf.json.JSONObject; @GeowaveOperation(name = "rm", parentOperation = FeatureLayerSection.class) @Parameters(commandDescription = "Remove GeoServer feature Layer") public class GeoServerRemoveFeatureLayerCommand extends GeoServerRemoveCommand { @Parameter(description = "") private List parameters = new ArrayList<>(); private String layerName = null; @Override public void execute(final OperationParams params) throws Exception { params.getConsole().println(computeResults(params)); } public void setParameters(final List parameters) { this.parameters = parameters; } @Override public String computeResults(final OperationParams params) throws Exception { if (parameters.size() != 1) { throw new ParameterException("Requires argument: "); } layerName = parameters.get(0); final Response deleteLayerResponse = geoserverClient.deleteFeatureLayer(layerName); if (deleteLayerResponse.getStatus() == Status.OK.getStatusCode()) { final JSONObject listObj = JSONObject.fromObject(deleteLayerResponse.getEntity()); return "\nGeoServer delete layer response " + layerName + ": " + listObj.toString(2); } final String errorMessage = "Error deleting GeoServer layer '" + layerName + "': " + deleteLayerResponse.readEntity(String.class) + "\nGeoServer Response Code = " + deleteLayerResponse.getStatus(); return handleError(deleteLayerResponse, errorMessage); } } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/layer/GeoServerAddLayerCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver.layer; import java.util.ArrayList; import java.util.List; import java.util.Locale; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import org.apache.commons.lang3.StringUtils; import org.locationtech.geowave.cli.geoserver.GeoServerCommand; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import com.beust.jcommander.IStringConverter; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; import net.sf.json.JSONObject; @GeowaveOperation(name = "add", parentOperation = LayerSection.class) @Parameters(commandDescription = "Add a GeoServer layer from the given GeoWave store") public class GeoServerAddLayerCommand extends GeoServerCommand { public static enum AddOption { ALL, RASTER, VECTOR; } @Parameter(names = {"-ws", "--workspace"}, required = false, description = "workspace name") private String workspace = null; @Parameter( names = {"-a", "--add"}, converter = AddOptionConverter.class, description = "For multiple layers, add (all | raster | vector)") private AddOption addOption = null; @Parameter(names = {"-t", "--typeName"}, description = "The type to add to GeoServer") private String adapterId = null; @Parameter(names = {"-sld", "--setStyle"}, description = "default style sld") private String style = null; @Parameter(description = "") private List parameters = new ArrayList<>(); public void setAddOption(final AddOption addOption) { this.addOption = addOption; } public void setStyle(final String style) { this.style = style; } public void setParameters(final List parameters) { this.parameters = parameters; } private String gwStore = null; @Override public void execute(final OperationParams params) throws Exception { params.getConsole().println(computeResults(params)); } public static class AddOptionConverter implements IStringConverter { @Override public AddOption convert(final String value) { final AddOption convertedValue = AddOption.valueOf(value.toUpperCase()); if ((convertedValue != AddOption.ALL) && (convertedValue != AddOption.RASTER) && (convertedValue != AddOption.VECTOR)) { throw new ParameterException( "Value " + value + "can not be converted to an add option. " + "Available values are: " + StringUtils.join(AddOption.values(), ", ").toLowerCase(Locale.ENGLISH)); } return convertedValue; } } @Override public String computeResults(final OperationParams params) throws Exception { if (parameters.size() != 1) { throw new ParameterException("Requires argument: "); } gwStore = parameters.get(0); if ((workspace == null) || workspace.isEmpty()) { workspace = geoserverClient.getConfig().getWorkspace(); } if (addOption != null) { // add all supercedes specific adapter // selection adapterId = addOption.name(); } final Response addLayerResponse = geoserverClient.addLayer(workspace, gwStore, adapterId, style); if (addLayerResponse.getStatus() == Status.OK.getStatusCode()) { final JSONObject jsonResponse = JSONObject.fromObject(addLayerResponse.getEntity()); return "Add GeoServer layer for '" + gwStore + ": OK : " + jsonResponse.toString(2); } final String errorMessage = "Error adding GeoServer layer for store '" + gwStore + "': " + addLayerResponse.getEntity() + "\nGeoServer Response Code = " + addLayerResponse.getStatus(); return handleError(addLayerResponse, errorMessage); } } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/layer/LayerOperationProvider.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver.layer; import org.locationtech.geowave.core.cli.spi.CLIOperationProviderSpi; public class LayerOperationProvider implements CLIOperationProviderSpi { private static final Class[] OPERATIONS = new Class[] {LayerSection.class, GeoServerAddLayerCommand.class}; @Override public Class[] getOperations() { return OPERATIONS; } } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/layer/LayerSection.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver.layer; import org.locationtech.geowave.cli.geoserver.GeoServerSection; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.DefaultOperation; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "layer", parentOperation = GeoServerSection.class) @Parameters(commandDescription = "Commands for configuring GeoServer layers from GeoWave stores") public class LayerSection extends DefaultOperation { } ================================================ FILE: extensions/cli/geoserver/src/main/java/org/locationtech/geowave/cli/geoserver/style/GeoServerAddStyleCommand.java ================================================ /** * Copyright (c) 2013-2022 Contributors to the Eclipse Foundation * *

See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.cli.geoserver.style; import java.io.File; import java.io.FileInputStream; import java.util.ArrayList; import java.util.List; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import org.locationtech.geowave.cli.geoserver.GeoServerCommand; import org.locationtech.geowave.core.cli.annotations.GeowaveOperation; import org.locationtech.geowave.core.cli.api.OperationParams; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.Parameters; @GeowaveOperation(name = "add", parentOperation = StyleSection.class) @Parameters(commandDescription = "Add a GeoServer style") public class GeoServerAddStyleCommand extends GeoServerCommand { @Parameter(names = {"-sld", "--stylesld"}, required = true, description = "style sld file") private String stylesld = null; @Parameter(description = "") private List parameters = new ArrayList<>(); private String gwStyle = null; public void setStylesld(final String stylesld) { this.stylesld = stylesld; } public void setParameters(final List parameters) { this.parameters = parameters; } @Override public void execute(final OperationParams params) throws Exception { params.getConsole().println(computeResults(params)); } @Override public String computeResults(final OperationParams params) throws Exception { if (parameters.size() != 1) { throw new ParameterException("Requires argument: